diff --git a/FE_testing.txt b/FE_testing.txt new file mode 100644 index 0000000000000000000000000000000000000000..7ea18826964781f78c5684f561a8d4bf22ce0607 --- /dev/null +++ b/FE_testing.txt @@ -0,0 +1,13 @@ +Abhängigkeiten: +pytest +pytest-cov (https://pypi.org/project/pytest-cov/ ; https://pytest-cov.readthedocs.io/en/latest/) +coverage + +#Grundsätzlicher Befehl die Coverage des Testens zu überprüfen +pytest --cov=PATH_TO_PROJECT_SRC PATH_TO_TEST_SRC + +# einen test mit einem bestimmten marker nicht ausführen +pytest TestTools.py -m "not msuser_pw" + +# Tests mit einem bestimmten Marker ausführen +pytest TestTools.py -m "msuser_pw" \ No newline at end of file diff --git a/python/compilejob.temp b/_templates/compilejob.temp similarity index 100% rename from python/compilejob.temp rename to _templates/compilejob.temp diff --git a/python/job.temp b/_templates/job.temp similarity index 100% rename from python/job.temp rename to _templates/job.temp diff --git a/python/ControlFile.py b/python/ControlFile.py index 257e072543b77281134b4dcf5123bd1d3dfdb03b..d74c9d83b6886310ec65a93c74714bcf3457d7f7 100644 --- a/python/ControlFile.py +++ b/python/ControlFile.py @@ -52,9 +52,12 @@ # MODULES # ------------------------------------------------------------------------------ import os +import re import sys import inspect +import _config + # ------------------------------------------------------------------------------ # CLASS # ------------------------------------------------------------------------------ @@ -109,7 +112,7 @@ class ControlFile(object): self.marsclass = None self.stream = None self.number = 'OFF' - self.expver = None + self.expver = '1' self.grid = None self.area = '' self.left = None @@ -141,16 +144,21 @@ class ControlFile(object): self.ectrans = 0 self.inputdir = '../work' self.outputdir = self.inputdir - self.ecmwfdatadir = None - self.exedir = None + self.ecmwfdatadir = _config.PATH_FLEXEXTRACT_DIR + self.exedir = _config.PATH_FORTRAN_SRC self.flexpart_root_scripts = None - self.makefile = None + self.makefile = 'Makefile.gfortran' self.destination = None self.gateway = None self.ecuid = None self.ecgid = None self.install_target = None self.debug = 0 + self.request = 0 + + self.logicals = ['gauss', 'omega', 'omegadiff', 'eta', 'etadiff', + 'dpdeta', 'cwc', 'wrf', 'grib2flexpart', 'ecstorage', + 'ectrans', 'debug', 'request'] self.__read_controlfile__() @@ -171,7 +179,8 @@ class ControlFile(object): from tools import my_error # read whole CONTROL file - with open(self.controlfile) as f: + with open(os.path.join(_config.PATH_CONTROLFILES, + self.controlfile)) as f: fdata = f.read().split('\n') # go through every line and store parameter @@ -221,13 +230,6 @@ class ControlFile(object): else: pass - # script directory - self.ecmwfdatadir = os.path.dirname(os.path.abspath(inspect.getfile( - inspect.currentframe()))) + '/../' - - # Fortran source directory - self.exedir = self.ecmwfdatadir + 'src/' - return def __str__(self): @@ -309,7 +311,7 @@ class ControlFile(object): return - def check_conditions(self): + def check_conditions(self, queue): ''' @Description: Checks a couple of necessary attributes and conditions, @@ -320,6 +322,11 @@ class ControlFile(object): self: instance of ControlFile class Description see class documentation. + queue: string + Name of the queue if submitted to the ECMWF servers. + Used to check if ecuid, ecgid, gateway and destination + are set correctly and are not empty. + @Return: <nothing> ''' @@ -329,10 +336,10 @@ class ControlFile(object): # check for having at least a starting date # otherwise program is not allowed to run if self.start_date is None: - print 'start_date specified neither in command line nor ' + \ - 'in CONTROL file ' + self.controlfile - print 'Try "' + sys.argv[0].split('/')[-1] + \ - ' -h" to print usage information' + print('start_date specified neither in command line nor \ + in CONTROL file ' + self.controlfile) + print('Try "' + sys.argv[0].split('/')[-1] + + ' -h" to print usage information') sys.exit(1) # retrieve just one day if end_date isn't set @@ -340,41 +347,42 @@ class ControlFile(object): self.end_date = self.start_date # assure consistency of levelist and level - if self.levelist is None: - if self.level is None: - print 'Warning: neither levelist nor level ' + \ - 'specified in CONTROL file' - sys.exit(1) - else: - self.levelist = '1/to/' + self.level + if self.levelist is None and self.level is None: + print('Warning: neither levelist nor level \ + specified in CONTROL file') + sys.exit(1) + elif self.levelist is None and self.level: + self.levelist = '1/to/' + self.level + elif (self.levelist and self.level is None) or \ + (self.levelist[-1] != self.level[-1]): + self.level = self.levelist.split('/')[-1] else: - if 'to' in self.levelist.lower(): - self.level = self.levelist.split('/')[2] - else: - self.level = self.levelist.split('/')[-1] + pass - # if area was provided at command line - # decompse area into its 4 components + # if area was provided (only from commandline) + # decompose area into its 4 components if self.area: - afloat = '.' in self.area - l = self.area.split('/') - if afloat: - for i, item in enumerate(l): - item = str(int(float(item) * 1000)) - self.upper, self.left, self.lower, self.right = l - - # prepare step for correct usage + components = self.area.split('/') + # convert float to integer coordinates + if '.' in self.area: + components = [str(int(float(item) * 1000)) + for i, item in enumerate(components)] + self.upper, self.left, self.lower, self.right = components + + # prepare step list if "/" signs are found if '/' in self.step: - l = self.step.split('/') - if 'to' in self.step.lower(): - if 'by' in self.step.lower(): - ilist = np.arange(int(l[0]), int(l[2]) + 1, int(l[4])) - self.step = ['{:0>3}'.format(i) for i in ilist] - else: - my_error(self.mailfail, self.step + ':\n' + - 'if "to" is used, please use "by" as well') + steps = self.step.split('/') + if 'to' in self.step.lower() and 'by' in self.step.lower(): + ilist = np.arange(int(steps[0]), + int(steps[2]) + 1, + int(steps[4])) + self.step = ['{:0>3}'.format(i) for i in ilist] + elif 'to' in self.step.lower() and 'by' not in self.step.lower(): + my_error(self.mailfail, self.step + ':\n' + + 'if "to" is used in steps parameter, \ + please use "by" as well') else: - self.step = l + self.step = steps # if maxstep wasn't provided # search for it in the "step" parameter @@ -406,14 +414,27 @@ class ControlFile(object): else: self.mailops = [self.mailops] - if not self.gateway or not self.destination or \ + if queue in ['ecgate', 'cca'] and \ + not self.gateway or not self.destination or \ not self.ecuid or not self.ecgid: - print '\nEnvironment variables GATWAY, DESTINATION, ECUID and ' + \ - 'ECGID were not set properly!' - print 'Please check for excistence of file "ECMWF_ENV" in the ' + \ - 'python directory!' + print('\nEnvironment variables GATEWAY, DESTINATION, ECUID and \ + ECGID were not set properly!') + print('Please check for existence of file "ECMWF_ENV" in the \ + python directory!') sys.exit(1) + if self.request != 0: + marsfile = os.path.join(_config.PATH_RUN_DIR, + _config.FILE_MARS_REQUESTS) + if os.path.isfile(marsfile): + os.remove(marsfile) + + # check all logical variables for data type + # if its a string change to integer + for var in self.logicals: + if not isinstance(getattr(self, var), int): + setattr(self, var, int(getattr(self, var))) + return def check_install_conditions(self): @@ -433,37 +454,34 @@ class ControlFile(object): if self.install_target and \ self.install_target not in ['local', 'ecgate', 'cca']: - print 'ERROR: unknown or missing installation target ' - print 'target: ', self.install_target - print 'please specify correct installation target \ - (local | ecgate | cca)' - print 'use -h or --help for help' + print('ERROR: unknown or missing installation target ') + print('target: ', self.install_target) + print('please specify correct installation target ' + + '(local | ecgate | cca)') + print('use -h or --help for help') sys.exit(1) if self.install_target and self.install_target != 'local': if not self.ecgid or not self.ecuid or \ not self.gateway or not self.destination: - print 'Please enter your ECMWF user id and group id as well as \ - the \nname of the local gateway and the ectrans \ - destination ' - print 'with command line options --ecuid --ecgid \ - --gateway --destination' - print 'Try "' + sys.argv[0].split('/')[-1] + \ - ' -h" to print usage information' - print 'Please consult ecaccess documentation or ECMWF user \ - support for further details' + print('Please enter your ECMWF user id and group id as well ' + + 'as the \nname of the local gateway and the ectrans ' + + 'destination ') + print('with command line options --ecuid --ecgid \ + --gateway --destination') + print('Try "' + sys.argv[0].split('/')[-1] + \ + ' -h" to print usage information') + print('Please consult ecaccess documentation or ECMWF user \ + support for further details') sys.exit(1) if not self.flexpart_root_scripts: self.flexpart_root_scripts = '${HOME}' else: self.flexpart_root_scripts = self.flexpart_root_scripts - else: + else: # local if not self.flexpart_root_scripts: - self.flexpart_root_scripts = '../' - - if not self.makefile: - self.makefile = 'Makefile.gfortran' + self.flexpart_root_scripts = _config.PATH_FLEXEXTRACT_DIR return diff --git a/python/EcFlexpart.py b/python/EcFlexpart.py index a4d36146370b43c19eaefd6bc602102626c1761b..d202572a81395cdd687136cad529935e02b6b2b8 100644 --- a/python/EcFlexpart.py +++ b/python/EcFlexpart.py @@ -101,6 +101,7 @@ from gribapi import grib_set, grib_index_select, grib_new_from_index, grib_get,\ grib_index_release, grib_index_get # software specific classes and modules from flex_extract +import _config from GribTools import GribTools from tools import init128, to_param_id, silent_remove, product, my_error from MarsRetrieval import MarsRetrieval @@ -203,12 +204,7 @@ class EcFlexpart(object): self.resol = c.resol self.accuracy = c.accuracy self.level = c.level - - if c.levelist: - self.levelist = c.levelist - else: - self.levelist = '1/to/' + c.level - + self.expver = c.expver # for gaussian grid retrieval self.glevelist = '1/to/' + c.level @@ -217,16 +213,6 @@ class EcFlexpart(object): else: self.gaussian = '' - if hasattr(c, 'expver') and c.expver: - self.expver = c.expver - else: - self.expver = '1' - - if hasattr(c, 'number') and c.number: - self.number = c.number - else: - self.number = '0' - if 'N' in c.grid: # Gaussian output grid self.grid = c.grid self.area = 'G' @@ -249,7 +235,6 @@ class EcFlexpart(object): # 3) Calculation/Retrieval of omega # 4) Download also data for WRF - # Different grids need different retrievals # SH = Spherical Harmonics, GG = Gaussian Grid, # OG = Output Grid, ML = MultiLevel, SL = SingleLevel @@ -273,34 +258,37 @@ class EcFlexpart(object): self.params['OG__ML'] = ['T/Q', 'ML', self.levelist, self.grid] - if c.gauss == '0' and c.eta == '1': + #if c.gauss == '0' and c.eta == '1': + if not c.gauss and c.eta: # the simplest case self.params['OG__ML'][0] += '/U/V/77' - elif c.gauss == '0' and c.eta == '0': + #elif c.gauss == '0' and c.eta == '0': + elif not c.gauss and not c.eta: # this is not recommended (inaccurate) self.params['OG__ML'][0] += '/U/V' - elif c.gauss == '1' and c.eta == '0': + #elif c.gauss == '1' and c.eta == '0': + elif c.gauss and not c.eta: # this is needed for data before 2008, or for reanalysis data self.params['GG__SL'] = ['Q', 'ML', '1', \ '{}'.format((int(self.resol) + 1) / 2)] self.params['SH__ML'] = ['U/V/D', 'ML', self.glevelist, 'OFF'] else: - print 'Warning: This is a very costly parameter combination, \ - use only for debugging!' + print('Warning: This is a very costly parameter combination, \ + use only for debugging!') self.params['GG__SL'] = ['Q', 'ML', '1', \ '{}'.format((int(self.resol) + 1) / 2)] self.params['GG__ML'] = ['U/V/D/77', 'ML', self.glevelist, \ '{}'.format((int(self.resol) + 1) / 2)] - if hasattr(c, 'omega') and c.omega == '1': + if c.omega: self.params['OG__ML'][0] += '/W' # add cloud water content if necessary - if hasattr(c, 'cwc') and c.cwc == '1': + if c.cwc: self.params['OG__ML'][0] += '/CLWC/CIWC' # add vorticity and geopotential height for WRF if necessary - if hasattr(c, 'wrf') and c.wrf == '1': + if c.wrf: self.params['OG__ML'][0] += '/Z/VO' if '/D' not in self.params['OG__ML'][0]: self.params['OG__ML'][0] += '/D' @@ -366,27 +354,28 @@ class EcFlexpart(object): with open(self.inputdir + '/' + filename, 'w') as f: f.write('&NAMGEN\n') f.write(',\n '.join(['maxl = ' + str(maxl), 'maxb = ' + str(maxb), - 'mlevel = ' + self.level, - 'mlevelist = ' + '"' + self.levelist + '"', - 'mnauf = ' + self.resol, + 'mlevel = ' + str(self.level), + 'mlevelist = ' + '"' + str(self.levelist) + + '"', + 'mnauf = ' + str(self.resol), 'metapar = ' + '77', 'rlo0 = ' + str(area[1]), 'rlo1 = ' + str(area[3]), 'rla0 = ' + str(area[2]), 'rla1 = ' + str(area[0]), - 'momega = ' + c.omega, - 'momegadiff = ' + c.omegadiff, - 'mgauss = ' + c.gauss, - 'msmooth = ' + c.smooth, - 'meta = ' + c.eta, - 'metadiff = ' + c.etadiff, - 'mdpdeta = ' + c.dpdeta])) + 'momega = ' + str(c.omega), + 'momegadiff = ' + str(c.omegadiff), + 'mgauss = ' + str(c.gauss), + 'msmooth = ' + str(c.smooth), + 'meta = ' + str(c.eta), + 'metadiff = ' + str(c.etadiff), + 'mdpdeta = ' + str(c.dpdeta)])) f.write('\n/\n') return - def retrieve(self, server, dates, inputdir='.'): + def retrieve(self, server, dates, request, inputdir='.'): ''' @Description: Finalizing the retrieval information by setting final details @@ -436,7 +425,7 @@ class EcFlexpart(object): if pk == 'OG__SL': pass if pk == 'OG_OROLSM__SL': - if oro is False: + if not oro: mfstream = 'OPER' mftype = 'AN' mftime = '00' @@ -456,7 +445,7 @@ class EcFlexpart(object): gaussian = self.gaussian # ------ on demand path -------------------------------------------------- - if self.basetime is None: + if not self.basetime: MR = MarsRetrieval(self.server, marsclass=self.marsclass, stream=mfstream, @@ -476,8 +465,17 @@ class EcFlexpart(object): expver=self.expver, param=pv[0]) - MR.display_info() - MR.data_retrieve() + if request == 0: + MR.display_info() + MR.data_retrieve() + elif request == 1: + MR.print_info() + elif request == 2: + MR.print_info() + MR.display_info() + MR.data_retrieve() + else: + print('Failure') # ------ operational path ------------------------------------------------ else: # check if mars job requests fields beyond basetime. @@ -565,6 +563,7 @@ class EcFlexpart(object): param=pv[0]) MR.display_info() + MR.data_retrieve() # -------------- non flux data ------------------------ else: @@ -661,7 +660,10 @@ class EcFlexpart(object): MR.display_info() MR.data_retrieve() - print "MARS retrieve done... " + if request == 0 or request == 2: + print('MARS retrieve done ... ') + elif request == 1: + print('MARS request printed ...') return @@ -701,20 +703,20 @@ class EcFlexpart(object): ''' - print '\n\nPostprocessing:\n Format: {}\n'.format(c.format) + print('\n\nPostprocessing:\n Format: {}\n'.format(c.format)) - if c.ecapi is False: + if not c.ecapi: print('ecstorage: {}\n ecfsdir: {}\n'. format(c.ecstorage, c.ecfsdir)) - if not hasattr(c, 'gateway'): - c.gateway = os.getenv('GATEWAY') - if not hasattr(c, 'destination'): - c.destination = os.getenv('DESTINATION') + #if not hasattr(c, 'gateway'): + # c.gateway = os.getenv('GATEWAY') + #if not hasattr(c, 'destination'): + # c.destination = os.getenv('DESTINATION') print('ectrans: {}\n gateway: {}\n destination: {}\n ' .format(c.ectrans, c.gateway, c.destination)) - print 'Output filelist: \n' - print self.outputfilelist + print('Output filelist: \n') + print(self.outputfilelist) if c.format.lower() == 'grib2': for ofile in self.outputfilelist: @@ -723,14 +725,14 @@ class EcFlexpart(object): ofile, ofile + '_2']) p = subprocess.check_call(['mv', ofile + '_2', ofile]) - if int(c.ectrans) == 1 and c.ecapi is False: + if c.ectrans and not c.ecapi: for ofile in self.outputfilelist: p = subprocess.check_call(['ectrans', '-overwrite', '-gateway', c.gateway, '-remote', c.destination, '-source', ofile]) #print('ectrans:', p) - if int(c.ecstorage) == 1 and c.ecapi is False: + if c.ecstorage and not c.ecapi: for ofile in self.outputfilelist: p = subprocess.check_call(['ecp', '-o', ofile, os.path.expandvars(c.ecfsdir)]) @@ -741,7 +743,7 @@ class EcFlexpart(object): # prepare environment for the grib2flexpart run # to convert grib to flexpart binary - if c.grib2flexpart == '1': + if c.grib2flexpart: # generate AVAILABLE file # Example of AVAILABLE file data: @@ -857,7 +859,7 @@ class EcFlexpart(object): table128) index_keys = ["date", "time", "step"] - indexfile = c.inputdir + "/date_time_stepRange.idx" + indexfile = os.path.join(c.inputdir, _config.FILE_GRIB_INDEX) silent_remove(indexfile) grib = GribTools(inputfiles.files) # creates new index file @@ -867,7 +869,7 @@ class EcFlexpart(object): index_vals = [] for key in index_keys: index_vals.append(grib_index_get(iid, key)) - print index_vals[-1] + print(index_vals[-1]) # index_vals looks for example like: # index_vals[0]: ('20171106', '20171107', '20171108') ; date # index_vals[1]: ('0', '1200', '1800', '600') ; time @@ -877,9 +879,13 @@ class EcFlexpart(object): '17':None, '19':None, '21':None, '22':None, '20':None} for prod in product(*index_vals): + # e.g. prod = ('20170505', '0', '12') + # ( date ,time, step) + # per date e.g. time = 0, 1200 + # per time e.g. step = 3, 6, 9, 12 # flag for Fortran program CONVERT2 and file merging convertFlag = False - print 'current prod: ', prod + print('current prod: ', prod) # e.g. prod = ('20170505', '0', '12') # ( date ,time, step) # per date e.g. time = 0, 600, 1200, 1800 @@ -901,8 +907,9 @@ class EcFlexpart(object): # remove old fort.* files and open new ones # they are just valid for a single product for k, f in fdict.iteritems(): - silent_remove(c.inputdir + "/fort." + k) - fdict[k] = open(c.inputdir + '/fort.' + k, 'w') + fortfile = os.path.join(c.inputdir, 'fort.' + k) + silent_remove(fortfile) + fdict[k] = open(fortfile, 'w') cdate = str(grib_get(gid, 'date')) time = grib_get(gid, 'time') @@ -930,23 +937,20 @@ class EcFlexpart(object): if timestamp < slimit or timestamp > elimit: continue + else: + pass try: - if c.wrf == '1': - if 'olddate' not in locals(): - fwrf = open(c.outputdir + '/WRF' + cdate + - '.{:0>2}'.format(time) + '.000.grb2', 'w') + if c.wrf: + if 'olddate' not in locals() or cdate != olddate: + fwrf = open(os.path.join(c.outputdir, + 'WRF' + cdate + '.{:0>2}'.format(time) + + '.000.grb2'), 'w') olddate = cdate[:] - else: - if cdate != olddate: - fwrf = open(c.outputdir + '/WRF' + cdate + - '.{:0>2}'.format(time) + '.000.grb2', - 'w') - olddate = cdate[:] except AttributeError: pass - # helper variable to remember which fields are already used. + # helper variable to remember which fields were already used. savedfields = [] while 1: if gid is None: @@ -957,7 +961,7 @@ class EcFlexpart(object): if paramId == 133 and gridtype == 'reduced_gg': # Specific humidity (Q.grb) is used as a template only # so we need the first we "meet" - with open(c.inputdir + '/fort.18', 'w') as fout: + with open(os.path.join(c.inputdir, 'fort.18'), 'w') as fout: grib_write(gid, fout) elif paramId == 131 or paramId == 132: grib_write(gid, fdict['10']) @@ -970,7 +974,7 @@ class EcFlexpart(object): elif paramId == 155 and gridtype == 'sh': grib_write(gid, fdict['13']) elif paramId in [129, 138, 155] and levtype == 'hybrid' \ - and c.wrf == '1': + and c.wrf: pass elif paramId == 246 or paramId == 247: # cloud liquid water and ice @@ -990,16 +994,17 @@ class EcFlexpart(object): grib_write(gid, fdict['16']) savedfields.append(paramId) else: - print 'duplicate ' + str(paramId) + ' not written' + print('duplicate ' + str(paramId) + ' not written') try: - if c.wrf == '1': - if levtype == 'hybrid': # model layer - if paramId in [129, 130, 131, 132, 133, 138, 155]: - grib_write(gid, fwrf) - else: # sfc layer - if paramId in wrfpars: - grib_write(gid, fwrf) + if c.wrf: + # model layer + if levtype == 'hybrid' and \ + paramId in [129, 130, 131, 132, 133, 138, 155]: + grib_write(gid, fwrf) + # sfc layer + elif paramId in wrfpars: + grib_write(gid, fwrf) except AttributeError: pass @@ -1013,29 +1018,28 @@ class EcFlexpart(object): if convertFlag: pwd = os.getcwd() os.chdir(c.inputdir) - if os.stat('fort.21').st_size == 0 and int(c.eta) == 1: - print 'Parameter 77 (etadot) is missing, most likely it is \ - not available for this type or date/time\n' - print 'Check parameters CLASS, TYPE, STREAM, START_DATE\n' + if os.stat('fort.21').st_size == 0 and c.eta: + print('Parameter 77 (etadot) is missing, most likely it is \ + not available for this type or date/time\n') + print('Check parameters CLASS, TYPE, STREAM, START_DATE\n') my_error(c.mailfail, 'fort.21 is empty while parameter eta \ is set to 1 in CONTROL file') # create the corresponding output file fort.15 # (generated by CONVERT2) + fort.16 (paramId 167 and 168) - p = subprocess.check_call( - [os.path.expandvars(os.path.expanduser(c.exedir)) + - '/CONVERT2'], shell=True) + p = subprocess.check_call([os.path.join(c.exedir, 'CONVERT2')], + shell=True) os.chdir(pwd) # create final output filename, e.g. EN13040500 (ENYYMMDDHH) - fnout = c.inputdir + '/' + c.prefix + fnout = os.path.join(c.inputdir, c.prefix) if c.maxstep > 12: suffix = cdate[2:8] + '.{:0>2}'.format(time/100) + \ '.{:0>3}'.format(step) else: suffix = cdateH[2:10] fnout += suffix - print "outputfile = " + fnout + print("outputfile = " + fnout) self.outputfilelist.append(fnout) # needed for final processing # create outputfile and copy all data from intermediate files @@ -1043,22 +1047,25 @@ class EcFlexpart(object): orolsm = os.path.basename(glob.glob( c.inputdir + '/OG_OROLSM__SL.*.' + c.ppid + '*')[0]) fluxfile = 'flux' + cdate[0:2] + suffix - if c.cwc != '1': + if not c.cwc: flist = ['fort.15', fluxfile, 'fort.16', orolsm] else: flist = ['fort.15', 'fort.22', fluxfile, 'fort.16', orolsm] with open(fnout, 'wb') as fout: for f in flist: - shutil.copyfileobj( - open(c.inputdir + '/' + f, 'rb'), fout) + shutil.copyfileobj(open(os.path.join(c.inputdir, f), + 'rb'), fout) - if c.omega == '1': - with open(c.outputdir + '/OMEGA', 'wb') as fout: + if c.omega: + with open(os.path.join(c.outputdir, 'OMEGA'), 'wb') as fout: shutil.copyfileobj( - open(c.inputdir + '/fort.25', 'rb'), fout) + open(os.path.join(c.inputdir, 'fort.25'), + 'rb'), fout) + else: + pass - if hasattr(c, 'wrf') and c.wrf == '1': + if c.wrf: fwrf.close() grib_index_release(iid) @@ -1101,7 +1108,7 @@ class EcFlexpart(object): table128 = init128(_config.PATH_GRIBTABLE) pars = to_param_id(self.params['OG_acc_SL'][0], table128) index_keys = ["date", "time", "step"] - indexfile = c.inputdir + "/date_time_stepRange.idx" + indexfile = os.path.join(c.inputdir, _config.FILE_GRIB_INDEX) silent_remove(indexfile) grib = GribTools(inputfiles.files) # creates new index file @@ -1111,7 +1118,7 @@ class EcFlexpart(object): index_vals = [] for key in index_keys: key_vals = grib_index_get(iid, key) - print key_vals + print(key_vals) # have to sort the steps for disaggregation, # therefore convert to int first if key == 'step': @@ -1132,17 +1139,22 @@ class EcFlexpart(object): svalsdict[str(p)] = [] stepsdict[str(p)] = [] - print 'maxstep: ', c.maxstep + print('maxstep: ', c.maxstep) for prod in product(*index_vals): # e.g. prod = ('20170505', '0', '12') # ( date ,time, step) # per date e.g. time = 0, 1200 # per time e.g. step = 3, 6, 9, 12 + print('current prod: ', prod) for i in range(len(index_keys)): grib_index_select(iid, index_keys[i], prod[i]) + # get first id from current product gid = grib_new_from_index(iid) + + # if there is data for this product combination + # prepare some date and time parameter before reading the data if gid is not None: cdate = grib_get(gid, 'date') time = grib_get(gid, 'time') @@ -1160,28 +1172,31 @@ class EcFlexpart(object): break if c.maxstep > 12: - fnout = c.inputdir + '/flux' + \ - sdate.strftime('%Y%m%d') + '.{:0>2}'.format(time/100) + \ - '.{:0>3}'.format(step-2*int(c.dtime)) - gnout = c.inputdir + '/flux' + \ - sdate.strftime('%Y%m%d') + '.{:0>2}'.format(time/100) + \ - '.{:0>3}'.format(step-int(c.dtime)) - hnout = c.inputdir + '/flux' + \ - sdate.strftime('%Y%m%d') + '.{:0>2}'.format(time/100) + \ - '.{:0>3}'.format(step) - g = open(gnout, 'w') - h = open(hnout, 'w') + fnout = os.path.join(c.inputdir, 'flux' + + sdate.strftime('%Y%m%d') + + '.{:0>2}'.format(time/100) + + '.{:0>3}'.format(step-2*int(c.dtime))) + gnout = os.path.join(c.inputdir, 'flux' + + sdate.strftime('%Y%m%d') + + '.{:0>2}'.format(time/100) + + '.{:0>3}'.format(step-int(c.dtime))) + hnout = os.path.join(c.inputdir, 'flux' + + sdate.strftime('%Y%m%d') + + '.{:0>2}'.format(time/100) + + '.{:0>3}'.format(step)) else: - fnout = c.inputdir + '/flux' + fdate.strftime('%Y%m%d%H') - gnout = c.inputdir + '/flux' + (fdate + - timedelta(hours=int(c.dtime)) - ).strftime('%Y%m%d%H') - hnout = c.inputdir + '/flux' + sdates.strftime('%Y%m%d%H') - g = open(gnout, 'w') - h = open(hnout, 'w') - - print "outputfile = " + fnout - f = open(fnout, 'w') + fnout = os.path.join(c.inputdir, 'flux' + + fdate.strftime('%Y%m%d%H')) + gnout = os.path.join(c.inputdir, 'flux' + + (fdate + timedelta(hours=int(c.dtime))). + strftime('%Y%m%d%H')) + hnout = os.path.join(c.inputdir, 'flux' + + sdates.strftime('%Y%m%d%H')) + + print("outputfile = " + fnout) + f_handle = open(fnout, 'w') + g_handle = open(gnout, 'w') + h_handle = open(hnout, 'w') # read message for message and store relevant data fields # data keywords are stored in pars @@ -1242,7 +1257,7 @@ class EcFlexpart(object): grib_set(gid, 'time', fdate.hour*100) grib_set(gid, 'date', fdate.year*10000 + fdate.month*100+fdate.day) - grib_write(gid, f) + grib_write(gid, f_handle) if c.basetime is not None: elimit = datetime.strptime(c.end_date + @@ -1269,7 +1284,7 @@ class EcFlexpart(object): grib_set(gid, 'date', truedatetime.year * 10000 + truedatetime.month * 100 + truedatetime.day) - grib_write(gid, h) + grib_write(gid, h_handle) #values = (svdp[1]+svdp[2])/2. if cparamId == '142' or cparamId == '143': @@ -1284,15 +1299,15 @@ class EcFlexpart(object): truedatetime.month * 100 + truedatetime.day) grib_set_values(gid, values) - grib_write(gid, g) + grib_write(gid, g_handle) grib_release(gid) gid = grib_new_from_index(iid) - f.close() - g.close() - h.close() + f_handle.close() + g_handle.close() + h_handle.close() grib_index_release(iid) diff --git a/python/GribTools.py b/python/GribTools.py index a68d1a5485f1b7e3039081ec7fb176d90466ab06..7d375b18bce7fbaeace888d41fadcb27c46f012f 100644 --- a/python/GribTools.py +++ b/python/GribTools.py @@ -296,15 +296,15 @@ class GribTools(object): iid: integer Grib index id. ''' - print "... index will be done" + print("... index will be done") iid = None if os.path.exists(index_file): iid = grib_index_read(index_file) - print "Use existing index file: %s " % (index_file) + print("Use existing index file: %s " % (index_file)) else: for filename in self.filenames: - print "Inputfile: %s " % (filename) + print("Inputfile: %s " % (filename)) if iid is None: iid = grib_index_new_from_file(filename, index_keys) else: @@ -313,6 +313,6 @@ class GribTools(object): if iid is not None: grib_index_write(iid, index_file) - print '... index done' + print('... index done') return iid diff --git a/python/MarsRetrieval.py b/python/MarsRetrieval.py index 54a32b06513e9428f27cf0771b148e2c98e86893..755dcefe7497ca985375d2cf9ab4210b1414428b 100644 --- a/python/MarsRetrieval.py +++ b/python/MarsRetrieval.py @@ -64,6 +64,7 @@ import subprocess import os +import _config # ------------------------------------------------------------------------------ # CLASS # ------------------------------------------------------------------------------ @@ -309,7 +310,8 @@ class MarsRetrieval(object): def display_info(self): ''' @Description: - Prints all class attributes and their values. + Prints all class attributes and their values to the + standard output. @Input: self: instance of MarsRetrieval @@ -327,7 +329,39 @@ class MarsRetrieval(object): if item[0] in 'server': pass else: - print item[0] + ': ' + str(item[1]) + print(item[0] + ': ' + str(item[1])) + + return + + + def print_info(self): + ''' + @Description: + Prints all mars requests to an extra file for debugging and + information. + + @Input: + self: instance of MarsRetrieval + For description see class documentation. + + @Return: + <nothing> + ''' + # Get all class attributes and their values as a dictionary + attrs = vars(self) + + # open a file to store all requests to + with open(os.path.join(_config.PATH_RUN_DIR, + _config.FILE_MARS_REQUESTS), 'a') as f: + f.write('mars\n') + # iterate through all attributes and print them + # with their corresponding values + for item in attrs.items(): + if item[0] in 'server': + pass + else: + f.write(item[0] + ': ' + str(item[1]) + '\n') + f.write('\n\n') return @@ -370,11 +404,11 @@ class MarsRetrieval(object): try: self.server.execute(s, target) except: - print 'MARS Request failed, \ - have you already registered at apps.ecmwf.int?' + print('MARS Request failed, \ + have you already registered at apps.ecmwf.int?') raise IOError if os.stat(target).st_size == 0: - print 'MARS Request returned no data - please check request' + print('MARS Request returned no data - please check request') raise IOError # MARS request via extra process in shell else: @@ -383,14 +417,14 @@ class MarsRetrieval(object): stdout=subprocess.PIPE, stderr=subprocess.PIPE, bufsize=1) pout = p.communicate(input=s)[0] - print pout.decode() + print(pout.decode()) if 'Some errors reported' in pout.decode(): - print 'MARS Request failed - please check request' + print('MARS Request failed - please check request') raise IOError if os.stat(target).st_size == 0: - print 'MARS Request returned no data - please check request' + print('MARS Request returned no data - please check request') raise IOError return diff --git a/python/_config.py b/python/_config.py index 4389b23f3bc69c6a70a92ffc6566bf23588a6974..7ae76a6bd7803b9ca749b68d221dc5d3e5c50245 100644 --- a/python/_config.py +++ b/python/_config.py @@ -27,24 +27,66 @@ import inspect _VERSION_STR = '7.1' -# add path to pythonpath +# ------------------------------------------------------------------------------ +# FILENAMES +# ------------------------------------------------------------------------------ + +FLEXEXTRACT_DIRNAME = 'flex_extract_v' + _VERSION_STR +FILE_MARS_REQUESTS = 'mars_requests.dat' +FORTRAN_EXECUTABLE = 'CONVERT2' +FILE_USER_ENVVARS = 'ECMWF_ENV' +TEMPFILE_INSTALL_COMPILEJOB = 'compilejob.temp' +FILE_INSTALL_COMPILEJOB = 'compilejob.ksh' +TEMPFILE_INSTALL_JOB = 'job.temp.o' +TEMPFILE_JOB = 'job.temp' +FILE_JOB_OD = 'job.ksh' +FILE_JOB_OP = 'jopoper.ksh' +FILE_NAMELIST = 'fort.4' +FILE_GRIB_INDEX = 'date_time_stepRange.idx' + +# ------------------------------------------------------------------------------ +# PATHES +# ------------------------------------------------------------------------------ + +# path to the flex_extract directory +PATH_FLEXEXTRACT_DIR = os.path.normpath(os.path.dirname(os.path.abspath( + inspect.getfile(inspect.currentframe()))) + '/../') + +# path to the local python source files PATH_LOCAL_PYTHON = os.path.dirname(os.path.abspath( inspect.getfile(inspect.currentframe()))) +PATH_RELATIVE_PYTHON = os.path.relpath(PATH_LOCAL_PYTHON, PATH_FLEXEXTRACT_DIR) +# add path to pythonpath if PATH_LOCAL_PYTHON not in sys.path: sys.path.append(PATH_LOCAL_PYTHON) -PATH_FLEXEXTRACT_DIR = os.path.normpath(os.path.dirname(os.path.abspath( - inspect.getfile(inspect.currentframe()))) + '/../') +# path to the templates +PATH_TEMPLATES = os.path.join(PATH_FLEXEXTRACT_DIR, '_templates') +PATH_RELATIVE_TEMPLATES = os.path.relpath(PATH_TEMPLATES, PATH_FLEXEXTRACT_DIR) -PATH_TEMPLATES = os.path.join(PATH_FLEXEXTRACT_DIR + os.path.sep + - '_templates') +# path to the environment parameter file +PATH_ECMWF_ENV = os.path.join(PATH_LOCAL_PYTHON, FILE_USER_ENVVARS) +PATH_RELATIVE_ECMWF_ENV = os.path.relpath(PATH_ECMWF_ENV, PATH_FLEXEXTRACT_DIR) # path to gribtable -PATH_GRIBTABLE = os.path.join(PATH_TEMPLATES + os.path.sep + - 'ecmwf_grib1_table_128') +PATH_GRIBTABLE = os.path.join(PATH_TEMPLATES, 'ecmwf_grib1_table_128') + +# path to run directory +PATH_RUN_DIR = os.path.join(PATH_FLEXEXTRACT_DIR, 'run') +PATH_RELATIVE_RUN_DIR = os.path.relpath(PATH_RUN_DIR, PATH_FLEXEXTRACT_DIR) + +# path to directory where all control files are stored +PATH_CONTROLFILES = os.path.join(PATH_RUN_DIR, 'control') +PATH_RELATIVE_CONTROLFILES = os.path.relpath(PATH_CONTROLFILES, PATH_FLEXEXTRACT_DIR) + +# path to directory where all job scripts are stored +PATH_JOBSCRIPTS = os.path.join(PATH_RUN_DIR, 'jobscripts') +PATH_RELATIVE_JOBSCRIPTS = os.path.relpath(PATH_JOBSCRIPTS, PATH_FLEXEXTRACT_DIR) + +# path to the fortran executable and the source code +PATH_FORTRAN_SRC = os.path.join(PATH_FLEXEXTRACT_DIR, 'src') +PATH_RELATIVE_FORTRAN_SRC = os.path.relpath(PATH_FORTRAN_SRC, PATH_FLEXEXTRACT_DIR) -PATH_RUN_DIR = os.path.join(PATH_FLEXEXTRACT_DIR + os.path.sep + - 'run') +# path to the python testing directory +PATH_TEST_DIR = os.path.join(PATH_LOCAL_PYTHON, 'pythontest') -PATH_CONTROLFILES = os.path.join(PATH_RUN_DIR + os.path.sep + - 'control') diff --git a/python/get_mars_data.py b/python/get_mars_data.py index f059ed4cb6b452b02fc30b7ff14650326a91d552..07dfd78fb64f3badf344674422b5396ccaaa139f 100755 --- a/python/get_mars_data.py +++ b/python/get_mars_data.py @@ -56,16 +56,10 @@ except ImportError: ecapi = False # software specific classes and modules from flex_extract +import _config from tools import my_error, normal_exit, get_cmdline_arguments, read_ecenv from EcFlexpart import EcFlexpart from UioFiles import UioFiles - -# add path to pythonpath so that python finds its buddies -LOCAL_PYTHON_PATH = os.path.dirname(os.path.abspath( - inspect.getfile(inspect.currentframe()))) -if LOCAL_PYTHON_PATH not in sys.path: - sys.path.append(LOCAL_PYTHON_PATH) - # ------------------------------------------------------------------------------ # FUNCTION # ------------------------------------------------------------------------------ @@ -88,19 +82,16 @@ def main(): try: c = ControlFile(args.controlfile) except IOError: - try: - c = ControlFile(LOCAL_PYTHON_PATH + args.controlfile) - except IOError: - print 'Could not read CONTROL file "' + args.controlfile + '"' - print 'Either it does not exist or its syntax is wrong.' - print 'Try "' + sys.argv[0].split('/')[-1] + \ - ' -h" to print usage information' - sys.exit(1) + print('Could not read CONTROL file "' + args.controlfile + '"') + print('Either it does not exist or its syntax is wrong.') + print('Try "' + sys.argv[0].split('/')[-1] + \ + ' -h" to print usage information') + sys.exit(1) - env_parameter = read_ecenv(c.ecmwfdatadir + 'python/ECMWF_ENV') + env_parameter = read_ecenv(_config.PATH_ECMWF_ENV) c.assign_args_to_control(args, env_parameter) c.assign_envs_to_control(env_parameter) - c.check_conditions() + c.check_conditions(args.queue) get_mars_data(c) normal_exit(c.mailfail, 'Done!') @@ -136,9 +127,13 @@ def get_mars_data(c): if not os.path.exists(c.inputdir): os.makedirs(c.inputdir) - print "Retrieving EC data!" - print "start date %s " % (c.start_date) - print "end date %s " % (c.end_date) + if c.request == 0 or c.request == 2: + print("Retrieving EC data!") + elif c.request == 1: + print("Printing mars requests!") + + print("start date %s " % (c.start_date)) + print("end date %s " % (c.end_date)) if ecapi: server = ecmwfapi.ECMWFService("mars") @@ -146,7 +141,7 @@ def get_mars_data(c): server = False c.ecapi = ecapi - print 'ecapi: ', c.ecapi + print('Using ECMWF WebAPI: ' + str(c.ecapi)) # basetime geht rückwärts @@ -191,10 +186,11 @@ def get_mars_data(c): endp1 = end + datetime.timedelta(days=1) # -------------- flux data ------------------------------------------------ - print 'removing old flux content of ' + c.inputdir - tobecleaned = UioFiles(c.inputdir, - '*_acc_*.' + str(os.getppid()) + '.*.grb') - tobecleaned.delete_files() + if c.request == 0 or c.request == 2: + print('... removing old flux content of ' + c.inputdir) + tobecleaned = UioFiles(c.inputdir, + '*_acc_*.' + str(os.getppid()) + '.*.grb') + tobecleaned.delete_files() # if forecast for maximum one day (upto 24h) are to be retrieved, # collect accumulation data (flux data) @@ -212,10 +208,11 @@ def get_mars_data(c): do_retrievement(c, server, start, end, datechunk, fluxes=True) # -------------- non flux data -------------------------------------------- - print 'removing old non flux content of ' + c.inputdir - tobecleaned = UioFiles(c.inputdir, - '*__*.' + str(os.getppid()) + '.*.grb') - tobecleaned.delete_files() + if c.request == 0 or c.request == 2: + print('... removing old non flux content of ' + c.inputdir) + tobecleaned = UioFiles(c.inputdir, + '*__*.' + str(os.getppid()) + '.*.grb') + tobecleaned.delete_files() do_retrievement(c, server, start, end, datechunk, fluxes=False) @@ -279,10 +276,10 @@ def do_retrievement(c, server, start, end, delta_t, fluxes=False): dates = day.strftime("%Y%m%d") + "/to/" + \ end.strftime("%Y%m%d") - print "retrieve " + dates + " in dir " + c.inputdir + print("... retrieve " + dates + " in dir " + c.inputdir) try: - flexpart.retrieve(server, dates, c.inputdir) + flexpart.retrieve(server, dates, c.request, c.inputdir) except IOError: my_error(c.mailfail, 'MARS request failed') diff --git a/python/install.py b/python/install.py index 80d87be578fbbeeb2e04fbecfb340fcc382d20e1..89610bf4eaddf98c7d7e9241fd5139bc623c8f67 100755 --- a/python/install.py +++ b/python/install.py @@ -11,6 +11,9 @@ # - applied PEP8 style guide # - added documentation # - moved install_args_and_control in here +# - splitted code in smaller functions +# - delete convert build files in here instead of compile job script +# - changed static path names to Variables from config file # # @License: # (C) Copyright 2015-2018. @@ -33,6 +36,7 @@ # - get_install_cmdline_arguments # - install_via_gateway # - mk_tarball +# - un_tarball # - mk_env_vars # - mk_compilejob # - mk_job_template @@ -46,23 +50,17 @@ # ------------------------------------------------------------------------------ import os import sys +import glob import subprocess import inspect from argparse import ArgumentParser, ArgumentDefaultsHelpFormatter # software specific classes and modules from flex_extract +import _config from ControlFile import ControlFile from UioFiles import UioFiles from tools import make_dir, put_file_to_ecserver, submit_job_to_ecserver -# add path to pythonpath so that python finds its buddies -LOCAL_PYTHON_PATH = os.path.dirname(os.path.abspath( - inspect.getfile(inspect.currentframe()))) -if LOCAL_PYTHON_PATH not in sys.path: - sys.path.append(LOCAL_PYTHON_PATH) - -_VERSION_STR = '7.1' - # ------------------------------------------------------------------------------ # FUNCTIONS # ------------------------------------------------------------------------------ @@ -79,16 +77,17 @@ def main(): <nothing> ''' - os.chdir(LOCAL_PYTHON_PATH) + #os.chdir(_config.PATH_LOCAL_PYTHON) + args = get_install_cmdline_arguments() try: c = ControlFile(args.controlfile) except IOError: - print 'Could not read CONTROL file "' + args.controlfile + '"' - print 'Either it does not exist or its syntax is wrong.' - print 'Try "' + sys.argv[0].split('/')[-1] + \ - ' -h" to print usage information' + print('Could not read CONTROL file "' + args.controlfile + '"') + print('Either it does not exist or its syntax is wrong.') + print('Try "' + sys.argv[0].split('/')[-1] + + ' -h" to print usage information') exit(1) c.assign_args_to_control(args) @@ -133,7 +132,7 @@ def get_install_cmdline_arguments(): default=None, help="FLEXPART root directory on ECMWF \ servers (to find grib2flexpart and COMMAND file)\n\ Normally flex_extract resides in the scripts directory \ - of the FLEXPART distribution, thus the:") + of the FLEXPART distribution.") # arguments for job submission to ECMWF, only needed by submit.py parser.add_argument("--job_template", dest='job_template', @@ -170,120 +169,171 @@ def install_via_gateway(c): @Return: <nothing> ''' + import tarfile + + ecd = _config.PATH_FLEXEXTRACT_DIR + tarball_name = _config.FLEXEXTRACT_DIRNAME + '.tar' + tar_file = os.path.join(ecd, tarball_name) - ecd = c.ecmwfdatadir - tarball_name = 'flex_extract_v' + _VERSION_STR + '.tar' - target_dir = 'flex_extract_v' + _VERSION_STR - fortran_executable = 'CONVERT2' + target_dirname = _config.FLEXEXTRACT_DIRNAME + fortran_executable = _config.FORTRAN_EXECUTABLE if c.install_target.lower() != 'local': # ecgate or cca - mk_compilejob(ecd + 'python/compilejob.temp', c.makefile, - c.install_target, c.ecuid, c.ecgid, + mk_compilejob(c.makefile, c.install_target, c.ecuid, c.ecgid, c.flexpart_root_scripts) - mk_job_template(ecd + 'python/job.temp.o', c.ecuid, c.ecgid, c.gateway, + mk_job_template(c.ecuid, c.ecgid, c.gateway, c.destination, c.flexpart_root_scripts) - mk_env_vars(ecd, c.ecuid, c.ecgid, c.gateway, c.destination) + mk_env_vars(c.ecuid, c.ecgid, c.gateway, c.destination) - #os.chdir('/') - - mk_tarball(ecd, tarball_name) + mk_tarball(tar_file) put_file_to_ecserver(ecd, tarball_name, c.install_target, c.ecuid, c.ecgid) - submit_job_to_ecserver(ecd + '/python/', c.install_target, - 'compilejob.ksh') + submit_job_to_ecserver(c.install_target, + os.path.join(_config.PATH_RELATIVE_JOBSCRIPTS, + _config.FILE_INSTALL_COMPILEJOB)) - print 'job compilation script has been submitted to ecgate for ' + \ - 'installation in ' + c.flexpart_root_scripts + \ - '/' + target_dir - print 'You should get an email with subject "flexcompile" within ' + \ - 'the next few minutes!' + print('job compilation script has been submitted to ecgate for ' + + 'installation in ' + c.flexpart_root_scripts + + '/' + target_dirname) + print('You should get an email with subject "flexcompile" within ' + + 'the next few minutes!') else: #local - if not c.flexpart_root_scripts or c.flexpart_root_scripts == '../': - print 'WARNING: FLEXPART_ROOT_SCRIPTS has not been specified' - print 'There will be only the compilation of ' + \ - ' in ' + ecd + '/src' - os.chdir(ecd + '/src') + if c.flexpart_root_scripts == _config.PATH_FLEXEXTRACT_DIR : + print('WARNING: FLEXPART_ROOT_SCRIPTS has not been specified') + print('flex_extract will be installed in here by compiling the ' + + 'Fortran source in ' + _config.PATH_FORTRAN_SRC) + os.chdir(_config.PATH_FORTRAN_SRC) else: # creates the target working directory for flex_extract c.flexpart_root_scripts = os.path.expandvars(os.path.expanduser( - c.flexpart_root_scripts)) + c.flexpart_root_scripts)) if os.path.abspath(ecd) != os.path.abspath(c.flexpart_root_scripts): - os.chdir('/') - mk_tarball(ecd, tarball_name) - make_dir(c.flexpart_root_scripts + '/' + target_dir) - os.chdir(c.flexpart_root_scripts + '/' + target_dir) - print 'Untar ...' - subprocess.check_call(['tar', '-xvf', - ecd + '../' + tarball_name]) - os.chdir(c.flexpart_root_scripts + '/' + target_dir + '/src') + mk_tarball(tar_file) + make_dir(os.path.join(c.flexpart_root_scripts, + target_dirname)) + os.chdir(os.path.join(c.flexpart_root_scripts, + target_dirname)) + un_tarball(tar_file) + os.chdir(os.path.join(c.flexpart_root_scripts, + target_dirname, + _config.PATH_RELATIVE_FORTRAN_SRC)) # Create Fortran executable - CONVERT2 - print 'Install ' + target_dir + ' software on ' + \ - c.install_target + ' in directory ' + \ - os.path.abspath(os.getcwd() + '/../') + '\n' + print('Install ' + target_dirname + ' software at ' + + c.install_target + ' in directory ' + + os.path.abspath(c.flexpart_root_scripts) + '\n') + + delete_convert_build('.') + make_convert_build('.', c.makefile) - delete_convert_build('') - make_convert_build('', c.makefile, fortran_executable) + os.chdir(ecd) + if os.path.isfile(tar_file): + os.remove(tar_file) return -def mk_tarball(ecd, tarname): +def mk_tarball(tarball_path): ''' @Description: - Creates a tarball from all files which need to be sent to the + Creates a tarball with all necessary files which need to be sent to the installation directory. It does not matter if this is local or remote. Collects all python files, the Fortran source and makefiles, - the ECMWF_ENV file, the CONTROL files as well as - the korn shell and template files. + the ECMWF_ENV file, the CONTROL files as well as the + template files. @Input: - ecd: string - The path were the file is to be stored. - - tarname: string - The name of the file to send to the ECMWF server. + tarball_path: string + The complete path to the tar file which will contain all + relevant data for flex_extract. @Return: <nothing> ''' - - print 'Create tarball ...' + import tarfile + from glob import glob + + print('Create tarball ...') + + # change to FLEXEXTRACT directory so that the tar can contain + # relative pathes to the files and directories + ecd = _config.PATH_FLEXEXTRACT_DIR + '/' + os.chdir(ecd) + + # get lists of the files to be added to the tar file + ECMWF_ENV_FILE = [_config.PATH_RELATIVE_ECMWF_ENV] + pyfiles = [os.path.relpath(x, ecd) + for x in glob(_config.PATH_LOCAL_PYTHON + + os.path.sep + '*py')] + controlfiles = [os.path.relpath(x, ecd) + for x in glob(_config.PATH_CONTROLFILES + + os.path.sep + 'CONTROL*')] + tempfiles = [os.path.relpath(x, ecd) + for x in glob(_config.PATH_TEMPLATES + + os.path.sep + '*')] + ffiles = [os.path.relpath(x, ecd) + for x in glob(_config.PATH_FORTRAN_SRC + + os.path.sep + '*.f*')] + hfiles = [os.path.relpath(x, ecd) + for x in glob(_config.PATH_FORTRAN_SRC + + os.path.sep + '*.h')] + makefiles = [os.path.relpath(x, ecd) + for x in glob(_config.PATH_FORTRAN_SRC + + os.path.sep + 'Makefile*')] + + # concatenate single lists to one for a better looping + filelist = pyfiles + controlfiles + tempfiles + ffiles + hfiles + \ + makefiles + ECMWF_ENV_FILE + + # create installation tar-file try: - subprocess.check_call(['tar -cvf '+ - ecd + '../' + tarname + ' ' + - ecd + 'python/*py ' + - ecd + 'python/CONTROL* ' + - ecd + 'python/*ksh ' + - ecd + 'python/*temp* ' + - ecd + 'python/ECMWF_ENV ' + - ecd + '_templates ' + - ecd + 'src/*.f ' + - ecd + 'src/*.f90 ' + - ecd + 'src/*.h ' + - ecd + 'src/Makefile*'], shell=True) + with tarfile.open(tarball_path, "w:gz") as tar_handle: + for file in filelist: + tar_handle.add(file) + except subprocess.CalledProcessError as e: - print 'ERROR:' - print e.output - sys.exit('could not make installation tar ball!') + print('... ERROR CODE:\n ... ' + str(e.returncode)) + print('... ERROR MESSAGE:\n ... ' + str(e)) + + sys.exit('... could not make installation tar ball!') return -def mk_env_vars(ecd, ecuid, ecgid, gateway, destination): + +def un_tarball(tarball_path): + ''' + @Description: + Extracts the given tarball into current directory. + + @Input: + tarball_path: string + The complete path to the tar file which will contain all + relevant data for flex_extract. + + @Return: + <nothing> + ''' + import tarfile + + print('Untar ...') + + with tarfile.open(tarball_path) as tar_handle: + tar_handle.extractall() + + return + +def mk_env_vars(ecuid, ecgid, gateway, destination): ''' @Description: Creates a file named ECMWF_ENV which contains the necessary environmental variables at ECMWF servers. @Input: - ecd: string - The path were the file is to be stored. - ecuid: string The user id on ECMWF server. @@ -301,7 +351,7 @@ def mk_env_vars(ecd, ecuid, ecgid, gateway, destination): <nothing> ''' - with open(ecd + 'python/ECMWF_ENV', 'w') as fo: + with open(_config.PATH_RELATIVE_ECMWF_ENV, 'w') as fo: fo.write('ECUID ' + ecuid + '\n') fo.write('ECGID ' + ecgid + '\n') fo.write('GATEWAY ' + gateway + '\n') @@ -309,7 +359,7 @@ def mk_env_vars(ecd, ecuid, ecgid, gateway, destination): return -def mk_compilejob(template, makefile, target, ecuid, ecgid, fp_root): +def mk_compilejob(makefile, target, ecuid, ecgid, fp_root): ''' @Description: Modifies the original job template file so that it is specified @@ -317,10 +367,6 @@ def mk_compilejob(template, makefile, target, ecuid, ecgid, fp_root): is stored in a new file "job.temp" in the python directory. @Input: - template: string - File which contains the original text for the job template. - It must contain the complete path to the file. - makefile: string Name of the makefile which should be used to compile FORTRAN CONVERT2 program. @@ -342,10 +388,14 @@ def mk_compilejob(template, makefile, target, ecuid, ecgid, fp_root): <nothing> ''' + template = os.path.join(_config.PATH_RELATIVE_TEMPLATES, + _config.TEMPFILE_INSTALL_COMPILEJOB) with open(template) as f: fdata = f.read().split('\n') - with open(template[:-4] + 'ksh', 'w') as fo: + compilejob = os.path.join(_config.PATH_RELATIVE_JOBSCRIPTS, + _config.FILE_INSTALL_COMPILEJOB) + with open(compilejob, 'w') as fo: for data in fdata: if 'MAKEFILE=' in data: data = 'export MAKEFILE=' + makefile @@ -370,18 +420,14 @@ def mk_compilejob(template, makefile, target, ecuid, ecgid, fp_root): return -def mk_job_template(template, ecuid, ecgid, gateway, destination, fp_root): +def mk_job_template(ecuid, ecgid, gateway, destination, fp_root): ''' @Description: Modifies the original job template file so that it is specified for the user and the environment were it will be applied. Result - is stored in a new file "job.temp" in the python directory. + is stored in a new file. @Input: - template: string - File which contains the original text for the job template. - It must contain the complete path to the file. - ecuid: string The user id on ECMWF server. @@ -402,41 +448,46 @@ def mk_job_template(template, ecuid, ecgid, gateway, destination, fp_root): @Return: <nothing> ''' + fp_root_path_to_python = os.path.join(fp_root, _config.FLEXEXTRACT_DIRNAME, + _config.PATH_RELATIVE_PYTHON) + template = os.path.join(_config.PATH_RELATIVE_TEMPLATES, + _config.TEMPFILE_INSTALL_JOB) with open(template) as f: fdata = f.read().split('\n') - with open(template[:-2], 'w') as fo: + jobfile_temp = os.path.join(_config.PATH_RELATIVE_TEMPLATES, + _config.TEMPFILE_JOB) + with open(jobfile_temp, 'w') as fo: for data in fdata: if '--workdir' in data: - data = '#SBATCH --workdir=/scratch/ms/' + ecgid + \ - '/' + ecuid + data = '#SBATCH --workdir=/scratch/ms/' + ecgid + '/' + ecuid elif '##PBS -o' in data: data = '##PBS -o /scratch/ms/' + ecgid + '/' + \ ecuid + 'flex_ecmwf.$Jobname.$Job_ID.out' elif 'export PATH=${PATH}:' in data: - data += fp_root + '/flex_extract_v7.1/python' + data += fp_root_path_to_python fo.write(data + '\n') return -def delete_convert_build(ecd): +def delete_convert_build(src_path): ''' @Description: Clean up the Fortran source directory and remove all build files (e.g. *.o, *.mod and CONVERT2) @Input: - ecd: string - The path to the Fortran program. + src_path: string + Path to the fortran source directory. @Return: <nothing> ''' - modfiles = UioFiles(ecd, '*.mod') - objfiles = UioFiles(ecd, '*.o') - exefile = UioFiles(ecd, 'CONVERT2') + modfiles = UioFiles(src_path, '*.mod') + objfiles = UioFiles(src_path, '*.o') + exefile = UioFiles(src_path, _config.FORTRAN_EXECUTABLE) modfiles.delete_files() objfiles.delete_files() @@ -444,49 +495,48 @@ def delete_convert_build(ecd): return -def make_convert_build(ecd, makefile, f_executable): +def make_convert_build(src_path, makefile): ''' @Description: Compiles the Fortran code and generates the executable. @Input: - ecd: string - The path were the file is to be stored. + src_path: string + Path to the fortran source directory. makefile: string The name of the makefile which should be used. - f_executable: string - The name of the executable the Fortran program generates after - compilation. - @Return: <nothing> ''' try: - print 'Using makefile: ' + makefile - p = subprocess.Popen(['make', '-f', ecd + makefile], + print('Using makefile: ' + makefile) + p = subprocess.Popen(['make', '-f', + os.path.join(src_path, makefile)], stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE, bufsize=1) pout, perr = p.communicate() - print pout + print(pout) if p.returncode != 0: - print perr - print 'Please edit ' + makefile + \ - ' or try another Makefile in the src directory.' - print 'Most likely GRIB_API_INCLUDE_DIR, GRIB_API_LIB ' \ - 'and EMOSLIB must be adapted.' - print 'Available Makefiles:' - print UioFiles('.', 'Makefile*') + print(perr) + print('Please edit ' + makefile + + ' or try another Makefile in the src directory.') + print('Most likely GRIB_API_INCLUDE_DIR, GRIB_API_LIB ' + 'and EMOSLIB must be adapted.') + print('Available Makefiles:') + print(UioFiles(src_path, 'Makefile*')) sys.exit('Compilation failed!') except ValueError as e: - print 'ERROR: Makefile call failed:' - print e + print('ERROR: Makefile call failed:') + print(e) else: - subprocess.check_call(['ls', '-l', ecd + f_executable]) + subprocess.check_call(['ls', '-l', + os.path.join(src_path, + _config.FORTRAN_EXECUTABLE)]) return diff --git a/python/joboper.ksh b/python/joboper.ksh deleted file mode 100644 index 7eb5a8093c50aa26a1ba1f58eeba96f6cd669bbc..0000000000000000000000000000000000000000 --- a/python/joboper.ksh +++ /dev/null @@ -1,133 +0,0 @@ -#!/bin/ksh - -# ON ECGB: -# start with ecaccess-job-submit -queueName ecgb NAME_OF_THIS_FILE on gateway server -# start with sbatch NAME_OF_THIS_FILE directly on machine - -#SBATCH --workdir=/scratch/ms/at/km4a -#SBATCH --qos=normal -#SBATCH --job-name=flex_ecmwf -#SBATCH --output=flex_ecmwf.%j.out -#SBATCH --error=flex_ecmwf.%j.out -#SBATCH --mail-type=FAIL -#SBATCH --time=12:00:00 - -## CRAY specific batch requests -##PBS -N flex_ecmwf -##PBS -q np -##PBS -S /usr/bin/ksh -## -o /scratch/ms/spatlh00/lh0/flex_ecmwf.$PBS_JOBID.out -## job output is in .ecaccess_DO_NOT_REMOVE -##PBS -j oe -##PBS -V -##PBS -l EC_threads_per_task=24 -##PBS -l EC_memory_per_task=32000MB - -set -x -export VERSION=7.1 -case $HOST in - *ecg*) - module load python - module unload grib_api - module unload emos - module load grib_api/1.14.5 - module load emos/437-r64 - export PATH=${PATH}:${HOME}/flex_extract_v7.1/python - ;; - *cca*) - module switch PrgEnv-cray PrgEnv-intel - module load grib_api - module load emos - module load python - export SCRATCH=$TMPDIR - export PATH=${PATH}:${HOME}/flex_extract_v7.1/python - ;; -esac - -cd $SCRATCH -mkdir -p python$$ -cd python$$ - -export CONTROL=CONTROL - -cat >$CONTROL<<EOF -GATEWAY srvx8.img.univie.ac.at -DESTINATION annep@genericSftp -accuracy 16 -addpar 186 187 188 235 139 39 -area -base_time ${MSJ_BASETIME} -basetime None -controlfile CONTROL.temp -cwc 0 -date_chunk 3 -debug 1 -destination None -dpdeta 1 -dtime 3 -ecfsdir ectmp:/${USER}/econdemand/ -ecgid None -ecstorage 0 -ectrans 1 -ecuid None -end_date ${MSJ_YEAR}${MSJ_MONTH}${MSJ_DAY} -eta 0 -etadiff 0 -etapar 77 -expver 1 -format GRIB1 -gateway None -gauss 1 -grib2flexpart 0 -grid 5000 -inputdir ../work -install_target None -job_template job.temp -left -15000 -level 60 -levelist 55/to/60 -lower 30000 -mailfail ${USER} -mailops ${USER} -makefile None -marsclass EI -maxstep 11 -number OFF -omega 0 -omegadiff 0 -outputdir ../work -prefix EI -queue ecgate -resol 63 -right 45000 -smooth 0 -start_date ${MSJ_YEAR}${MSJ_MONTH}${MSJ_DAY} -step 00 01 02 03 04 05 00 07 08 09 10 11 00 01 02 03 04 05 00 07 08 09 10 11 -stream OPER -time 00 00 00 00 00 00 06 00 00 00 00 00 12 12 12 12 12 12 18 12 12 12 12 12 -type AN FC FC FC FC FC AN FC FC FC FC FC AN FC FC FC FC FC AN FC FC FC FC FC -upper 75000 -wrf 0 -EOF - - -submit.py --controlfile=$CONTROL --inputdir=./work --outputdir=./work 1> prot 2>&1 - -if [ $? -eq 0 ] ; then - l=0 - for muser in `grep -i MAILOPS $CONTROL`; do - if [ $l -gt 0 ] ; then - mail -s flex.${HOST}.$$ $muser <prot - fi - l=$(($l+1)) - done -else - l=0 - for muser in `grep -i MAILFAIL $CONTROL`; do - if [ $l -gt 0 ] ; then - mail -s "ERROR! flex.${HOST}.$$" $muser <prot - fi - l=$(($l+1)) - done -fi - diff --git a/python/prepare_flexpart.py b/python/prepare_flexpart.py index 088c2a0fd49e2cc9452d622640b8b0c137b47f07..9ea7868d570e564db9cf8b5b73a9c8bc072926b0 100755 --- a/python/prepare_flexpart.py +++ b/python/prepare_flexpart.py @@ -56,9 +56,9 @@ import os import inspect import sys import socket -import _config # software specific classes and modules from flex_extract +import _config from UioFiles import UioFiles from tools import clean_up, get_cmdline_arguments, read_ecenv from EcFlexpart import EcFlexpart @@ -70,13 +70,6 @@ try: except ImportError: ecapi = False -# add path to pythonpath so that python finds its buddies -LOCAL_PYTHON_PATH = os.path.dirname(os.path.abspath( - inspect.getfile(inspect.currentframe()))) -if LOCAL_PYTHON_PATH not in sys.path: - sys.path.append(LOCAL_PYTHON_PATH) - - # ------------------------------------------------------------------------------ # FUNCTION # ------------------------------------------------------------------------------ @@ -99,19 +92,16 @@ def main(): try: c = ControlFile(args.controlfile) except IOError: - try: - c = ControlFile(LOCAL_PYTHON_PATH + args.controlfile) - except IOError: - print 'Could not read CONTROL file "' + args.controlfile + '"' - print 'Either it does not exist or its syntax is wrong.' - print 'Try "' + sys.argv[0].split('/')[-1] + \ - ' -h" to print usage information' - sys.exit(1) - - env_parameter = read_ecenv(c.ecmwfdatadir + 'python/ECMWF_ENV') + print('Could not read CONTROL file "' + args.controlfile + '"') + print('Either it does not exist or its syntax is wrong.') + print('Try "' + sys.argv[0].split('/')[-1] + \ + ' -h" to print usage information') + sys.exit(1) + + env_parameter = read_ecenv(_config.PATH_ECMWF_ENV) c.assign_args_to_control(args, env_parameter) c.assign_envs_to_control(env_parameter) - c.check_conditions() + c.check_conditions(args.queue) prepare_flexpart(args.ppid, c) return @@ -169,8 +159,8 @@ def prepare_flexpart(ppid, c): if c.basetime == '00': start = start - datetime.timedelta(days=1) - print 'Prepare ' + start.strftime("%Y%m%d") + \ - "/to/" + end.strftime("%Y%m%d") + print('Prepare ' + start.strftime("%Y%m%d") + + "/to/" + end.strftime("%Y%m%d")) # create output dir if necessary if not os.path.exists(c.outputdir): @@ -181,7 +171,7 @@ def prepare_flexpart(ppid, c): # deaccumulate the flux data flexpart = EcFlexpart(c, fluxes=True) - flexpart.write_namelist(c, 'fort.4') + flexpart.write_namelist(c, _config.FILE_NAMELIST) flexpart.deacc_fluxes(inputfiles, c) # get a list of all files from the root inputdir @@ -196,7 +186,7 @@ def prepare_flexpart(ppid, c): # check if in debugging mode, then store all files # otherwise delete temporary files if int(c.debug) != 0: - print '\nTemporary files left intact' + print('\nTemporary files left intact') else: clean_up(c) diff --git a/python/profiling.py b/python/profiling.py index 4511af2aca3a41265a9dd035b11430e84626ac62..a20cad6d0c0c51b5868b045e9e56c74c80b88d11 100644 --- a/python/profiling.py +++ b/python/profiling.py @@ -65,7 +65,7 @@ def timefn(fn): t1 = time.time() result = fn(*args, **kwargs) t2 = time.time() - print "@timefn:" + fn.func_name + " took " + str(t2 - t1) + " seconds" + print("@timefn:" + fn.func_name + " took " + str(t2 - t1) + " seconds") return result diff --git a/python/pythontest/.pytest_cache/README.md b/python/pythontest/.pytest_cache/README.md new file mode 100644 index 0000000000000000000000000000000000000000..bb78ba07ee538a8e56ecaa7e22910d963ef2b634 --- /dev/null +++ b/python/pythontest/.pytest_cache/README.md @@ -0,0 +1,8 @@ +# pytest cache directory # + +This directory contains data from the pytest's cache plugin, +which provides the `--lf` and `--ff` options, as well as the `cache` fixture. + +**Do not** commit this to version control. + +See [the docs](https://docs.pytest.org/en/latest/cache.html) for more information. diff --git a/python/pythontest/.pytest_cache/v/cache/lastfailed b/python/pythontest/.pytest_cache/v/cache/lastfailed new file mode 100644 index 0000000000000000000000000000000000000000..1dc15479252a154024f66d578cefdbb290f20f14 --- /dev/null +++ b/python/pythontest/.pytest_cache/v/cache/lastfailed @@ -0,0 +1,3 @@ +{ + "TestInstall.py::TestTools::test_un_tarball": true +} \ No newline at end of file diff --git a/python/pythontest/.pytest_cache/v/cache/nodeids b/python/pythontest/.pytest_cache/v/cache/nodeids new file mode 100644 index 0000000000000000000000000000000000000000..cfcdd4d958ad90f5380b871042448e4dd2bcd78b --- /dev/null +++ b/python/pythontest/.pytest_cache/v/cache/nodeids @@ -0,0 +1,4 @@ +[ + "TestInstall.py::TestTools::test_mk_tarball", + "TestInstall.py::TestTools::test_un_tarball" +] \ No newline at end of file diff --git a/python/pythontest/TestInstall.py b/python/pythontest/TestInstall.py index 0bc236af4e78d65a5beb19862bdda5574b73aa90..deff52278cf38c3dfe3e5a882e5788b59eceaae7 100644 --- a/python/pythontest/TestInstall.py +++ b/python/pythontest/TestInstall.py @@ -5,8 +5,10 @@ import unittest import sys import os import inspect -sys.path.append('../python') +sys.path.append('../') +import _config import install +from tools import make_dir class TestTools(unittest.TestCase): @@ -16,14 +18,79 @@ class TestTools(unittest.TestCase): def setUp(self): pass + # - main + # - get_install_cmdline_arguments + # - install_via_gateway + #! - mk_tarball + #! - un_tarball + # - mk_env_vars + # - mk_compilejob + # - mk_job_template + # - delete_convert_build + # - make_convert_build def test_mk_tarball(self): - ecd = os.path.dirname(os.path.abspath(inspect.getfile( - inspect.currentframe()))) + '/../' - #print ecd - install.mk_tarball(ecd) + import tarfile + ecd = _config.PATH_FLEXEXTRACT_DIR + os.path.sep + + # list comparison files for tarball content + tar_test_dir = os.path.join(_config.PATH_TEST_DIR + + os.path.sep + 'TestInstallTar') + tar_test_fedir = os.path.join(tar_test_dir, 'flex_extract_v7.1') + + comparison_list = [] + for path, subdirs, files in os.walk(tar_test_fedir): + for name in files: + if 'tar' not in name: + comparison_list.append(os.path.relpath(os.path.join(path, name), tar_test_fedir)) + + # create test tarball and list its content files + tarballname = _config.FLEXEXTRACT_DIRNAME + '_test.tar' + install.mk_tarball(ecd + tarballname) + with tarfile.open(ecd + tarballname, 'r') as tar_handle: + tar_content_list = tar_handle.getnames() + + # remove test tar file from flex_extract directory + os.remove(ecd + tarballname) + + # test if comparison filelist is equal to the + # filelist of tarball content + assert sorted(comparison_list) == sorted(tar_content_list) + + def test_un_tarball(self): + import tarfile + import shutil + + ecd = _config.PATH_FLEXEXTRACT_DIR + os.path.sep + + # list comparison files for tarball content + tar_test_dir = os.path.join(_config.PATH_TEST_DIR + + os.path.sep + 'TestInstallTar') + tar_test_fedir = os.path.join(tar_test_dir, 'flex_extract_v7.1') + comparison_list = [] + for path, subdirs, files in os.walk(tar_test_fedir): + for name in files: + if 'tar' not in name: + comparison_list.append(os.path.relpath(os.path.join(path, name), tar_test_fedir)) + + # untar in test directory + test_dir = os.path.join(tar_test_dir, 'test_untar') + make_dir(test_dir) + os.chdir(test_dir) + tarballname = _config.FLEXEXTRACT_DIRNAME + '.tar' + install.un_tarball(os.path.join(tar_test_dir, tarballname)) + tarfiles_list = [] + for path, subdirs, files in os.walk(test_dir): + for name in files: + tarfiles_list.append(os.path.relpath(os.path.join(path, name), test_dir)) + + # test for equality + assert sorted(tarfiles_list) == sorted(comparison_list) + + # clean up temp test dir + shutil.rmtree(test_dir) if __name__ == "__main__": - unittest.main() \ No newline at end of file + unittest.main() diff --git a/python/pythontest/TestInstallTar/flex_extract_v7.1/_templates/compilejob.temp b/python/pythontest/TestInstallTar/flex_extract_v7.1/_templates/compilejob.temp new file mode 100644 index 0000000000000000000000000000000000000000..715308b3d361e2b0f5c088b344ecde539d74243e --- /dev/null +++ b/python/pythontest/TestInstallTar/flex_extract_v7.1/_templates/compilejob.temp @@ -0,0 +1,77 @@ +#!/bin/ksh + +# ON ECGB: +# start with ecaccess-job-submit -queueName ecgb NAME_OF_THIS_FILE on gateway server +# start with sbatch NAME_OF_THIS_FILE directly on machine + +#SBATCH --workdir=/scratch/ms/spatlh00/lh0 +#SBATCH --qos=normal +#SBATCH --job-name=flex_ecmwf +#SBATCH --output=flex_ecmwf.%j.out +#SBATCH --error=flex_ecmwf.%j.out +#SBATCH --mail-type=FAIL +#SBATCH --time=12:00:00 + +## CRAY specific batch requests +##PBS -N flex_ecmwf +##PBS -q ns +##PBS -S /usr/bin/ksh +# -o /scratch/ms/no/sbc/flex_ecmwf.$Jobname.$Job_ID.out +# job output is in .ecaccess_DO_NOT_REMOVE +##PBS -j oe +##PBS -V +##PBS -l EC_threads_per_task=1 +##PBS -l EC_memory_per_task=3200MB + +set -x +export VERSION=7.1 +case $HOST in + *ecg*) + module load python + module unload grib_api + module unload emos + module load grib_api/1.14.5 + module load emos/437-r64 + export FLEXPART_ROOT_SCRIPTS= + export MAKEFILE=Makefile.gfortran + ;; + *cca*) + module switch PrgEnv-cray PrgEnv-intel + module load grib_api + module load emos + module load python + echo ${GROUP} + echo ${HOME} + echo $HOME | awk -F / '{print $1, $2, $3, $4}' + export GROUP=`echo $HOME | awk -F / '{print $4}'` + export SCRATCH=/scratch/ms/${GROUP}/${USER} + export FLEXPART_ROOT_SCRIPTS= + export MAKEFILE=Makefile.CRAY + ;; +esac + +mkdir -p $FLEXPART_ROOT_SCRIPTS/flex_extract_v$VERSION +cd $FLEXPART_ROOT_SCRIPTS/flex_extract_v$VERSION # if FLEXPART_ROOT is not set this means cd to the home directory +tar -xvf $HOME/flex_extract_v$VERSION.tar +cd src +\rm *.o *.mod CONVERT2 +make -f $MAKEFILE >flexcompile 2>flexcompile + +ls -l CONVERT2 >>flexcompile +if [ $? -eq 0 ]; then + echo 'SUCCESS!' >>flexcompile + mail -s flexcompile.$HOST.$$ $USER <flexcompile +else + echo Environment: >>flexcompile + env >> flexcompile + mail -s "ERROR! flexcompile.$HOST.$$" $USER <flexcompile +fi + + + + + + + + + diff --git a/python/pythontest/TestInstallTar/flex_extract_v7.1/_templates/ecmwf_grib1_table_128 b/python/pythontest/TestInstallTar/flex_extract_v7.1/_templates/ecmwf_grib1_table_128 new file mode 100644 index 0000000000000000000000000000000000000000..b14d7afb69fd6f687840d2c406698f56f3d696d8 --- /dev/null +++ b/python/pythontest/TestInstallTar/flex_extract_v7.1/_templates/ecmwf_grib1_table_128 @@ -0,0 +1,197 @@ +! +! ECMWFGRIB128.TBL -- GRIB 2 parameter conversion table version 128 +! +!ID# NAME UNITS GNAM SCALE MISSING +! +001 Stream function m**2 s**-1 STRF 0 -9999.00 +002 Velocity potential m**2 s**-1 VPOT 0 -9999.00 +003 Potential temperature K THTA 0 -9999.00 +004 Equivalent potential temperature K THTE 0 -9999.00 +005 Saturated eq. pot. temperature K STHE 0 -9999.00 +!006-010 Reserved for Metview +011 U component of divergent wind m s**-1 UDVW 0 -9999.00 +012 V component of divergent wind m s**-1 VDVW 0 -9999.00 +013 U component of rotational wind m s**-1 URTW 0 -9999.00 +014 V component of rotational wind m s**-1 VRTW 0 -9999.00 +!015-020 Reserved for Metview +021 Unbalanced component of temp. K UCTP 0 -9999.00 +022 Unbal. comp. of log surface pres ln(Pa) UCLN 0 -9999.00 +023 Unbal. comp. of divergence s**-1 UCDV 0 -9999.00 +024 Reserved for future unbal. comp. - X 0 -9999.00 +023 Reserved for future unbal. comp. - X 0 -9999.00 +026 Lake cover (0-1) fraction CL 0 -9999.00 +027 Low vegetation cover (0-1) fraction CVL 0 -9999.00 +028 High vegetation cover (0-1) fraction CVH 0 -9999.00 +029 Type of low vegetation type TVL 0 -9999.00 +030 Type of high vegetation type TVH 0 -9999.00 +031 Sea-ice cover (0-1) fraction CI 0 -9999.00 +032 Snow albedo (0-1) fraction ASN 0 -9999.00 +033 Snow density kg m**-3 RSN 0 -9999.00 +034 Sea surface temperature K SST 0 -9999.00 +035 Ice surface temperature layer 1 K ISTL1 0 -9999.00 +036 Ice surface temperature layer 2 K ISTL2 0 -9999.00 +037 Ice surface temperature layer 3 K ISTL3 0 -9999.00 +038 Ice surface temperature layer 4 K ISTL4 0 -9999.00 +039 Volumetric soil water layer 1 m**3 m**3 SWVL1 0 -9999.00 +040 Volumetric soil water layer 2 m**3 m**3 SWVL2 0 -9999.00 +041 Volumetric soil water layer 3 m**3 m**3 SWVL3 0 -9999.00 +042 Volumetric soil water layer 4 m**3 m**3 SWVL4 0 -9999.00 +043 Soil type type SLT 0 -9999.00 +044 Snow evaporation m (H2O) ES 0 -9999.00 +045 Snowmelt m (H2) SMLT 0 -9999.00 +046 Solar duration s SDUR 0 -9999.00 +047 Direct solar radiation W m**-2 DSRP 0 -9999.00 +048 Magnitude of surface stress N m**-2 s MAGSS 0 -9999.00 +049 10 metre wind gust m s**-1 10FG 0 -9999.00 +050 Large-scale precip. fraction s SLPF 0 -9999.00 +051 Maximum 2 metre temperature K MX2T24 0 -9999.00 +052 Minimum 2 metre temperature K MN2T24 0 -9999.00 +053 Montgomery potential m**2 s**-2 MONT 0 -9999.00 +054 Pressure Pa PRES 0 -9999.00 +055 Mean 2m temp.in past 24 hours K MN2T24 0 -9999.00 +056 Mean 2m dewpt. temp. in past 24h K MN2D24 0 -9999.00 +057 Downward UV radiation at sfc. W m**-2 s UVB 0 -9999.00 +058 Photo. active rad. at sfc. W m**-2 s PAR 0 -9999.00 +059 Convective available pot. energy J kg**-1 CAPE 0 -9999.00 +060 Potential vorticity K m**2 kg**-1 s**-1 PVOR 0 -9999.00 +061 Total precipitation from obs. mm*100+Nobs TPO 0 -9999.00 +062 Observation count count OBCT 0 -9999.00 +063 Start time for skin temp. diff. s TSDIFS 0 -9999.00 +064 Finish time for skin temp. diff. s TSDIFE 0 -9999.00 +065 Skin temperature difference K TSDIF 0 -9999.00 +!66 to 77 Unused +078 Total column liquid water kg m**-2 TCLW 0 -9999.00 +079 Total column ice water kg m**-2 TCIW 0 -9999.00 +!80 to 120 Experimental products (contents may vary) +!121 to 124 Unused +125 Vert. integrated tot. energy J m**-2 COLENR 0 -9999.00 +126 Param. for sensitive area pred. - SENPRM 0 -9999.00 +127 Atmospheric tide - AT 0 -9999.00 +128 Budget values - BV 0 -9999.00 +129 Geopotential m**2 s**-2 Z 0 -9999.00 +130 Temperature K T 0 -9999.00 +131 U velocity m s**-1 U 0 -9999.00 +132 V velocity m s**-1 V 0 -9999.00 +133 Specific humidity kg (H2O) kg**1 Q 0 -9999.00 +134 Surface pressure Pa SP -2 -9999.00 +135 Vertical velocity Pa s**-1 W -2 -9999.00 +136 Total column water kg m**-2 TCW 0 -9999.00 +137 Total column water vapour kg m**-2 TCWV 0 -9999.00 +138 Vorticity (relative) s**-1 VO 0 -9999.00 +139 Soil temperature level 1 K STL1 0 -9999.00 +140 Soil wetness level 1 m (H2O) SWL1 0 -9999.00 +141 Snow depth m (H2O) SD 0 -9999.00 +142 Large scale precipitation m LSP 0 -9999.00 +143 Convective precipitation m CP 0 -9999.00 +144 Snowfall (conv. + strat.) m (H2O) SF 0 -9999.00 +145 Boundary layer dissipation W m**-2 s BLD 0 -9999.00 +146 Surface sensible heat flux W m**-2 s SSHF 0 -9999.00 +147 Surface latent heat flux W m**-2 s SLHF 0 -9999.00 +148 Charnock - CHNK 0 -9999.00 +149 Surface net radiation W m**-2 s SNR 0 -9999.00 +150 Top net radiation W m**-2 s TNR 0 -9999.00 +151 Mean sea level pressure Pa MSLP 0 -9999.00 +152 Logarithm of surface pressure ln(Pa) LNSP 0 -9999.00 +153 Short-wave heating rate K SWHR 0 -9999.00 +154 Long-wave heating rate K LWHR 0 -9999.00 +155 Divergence s**-1 D 0 -9999.00 +156 Height m HGHT 0 -9999.00 +157 Relative humidity % RELH 0 -9999.00 +158 Tendency of surface pressure Pa s**-1 PTND -2 -9999.00 +159 Boundary layer height m ZPBL 0 -9999.00 +160 Standard deviation of orography m SDOR 0 -9999.00 +161 Anisotropy of sub-gridscale oro. - ISOR 0 -9999.00 +162 Angle of sub-gridscale orography rad ANOR 0 -9999.00 +163 Slope of sub-gridscale orography - SLOR 0 -9999.00 +164 Total cloud cover (0-1) fraction TCC 0 -9999.00 +165 10 metre U wind component m s**-1 10U 0 -9999.00 +166 10 metre V wind component m s**-1 10V 0 -9999.00 +167 2 metre temperature K 2T 0 -9999.00 +168 2 metre dewpoint temperature K 2D 0 -9999.00 +169 Surface solar radiation downwards W m**-2 s SSRD 0 -9999.00 +170 Soil temperature level 2 K STL2 0 -9999.00 +171 Soil wetness level 2 m (H2O) SWL2 0 -9999.00 +172 Land-sea mask (0,1) logical LSM 0 -9999.00 +173 Surface roughness m SR 0 -9999.00 +174 Albedo (0-1) fraction ALBD 0 -9999.00 +175 Surface thermal radiation down W m**-2 s STRD 0 -9999.00 +176 Surface solar radiation W m**-2 s SSR 0 -9999.00 +177 Surface thermal radiation W m**-2 s STR 0 -9999.00 +178 Top solar radiation W m**-2 s TSR 0 -9999.00 +179 Top thermal radiation W m**-2 s TTR 0 -9999.00 +180 East-West surface stress N m**-2 s EWSS 0 -9999.00 +181 North-South surface stress N m**-2 s NSSS 0 -9999.00 +182 Evaporation m (H2O) EVAP 0 -9999.00 +183 Soil temperature level 3 K STL3 0 -9999.00 +184 Soil wetness level 3 m (H2O) SWL3 0 -9999.00 +185 Convective cloud cover (0-1) fraction CCC 0 -9999.00 +186 Low cloud cover (0-1) fraction LCC 0 -9999.00 +187 Medium cloud cover (0-1) fraction MCC 0 -9999.00 +188 High cloud cover (0-1) fraction HCC 0 -9999.00 +189 Sunshine duration s SUND 0 -9999.00 +190 E-W comp. of subgrid oro. var. m**2 EWOV 0 -9999.00 +191 N-S comp. of subgrid oro. var. m**2 NSOV 0 -9999.00 +192 NW-SE comp. of subgrid oro. var. m**2 NWOV 0 -9999.00 +193 NE-SW comp. of subgrid oro. var. m**2 NEOV 0 -9999.00 +194 Brightness temperature K BTMP 0 -9999.00 +195 Lat. comp. of gravity wave stress N m**-2 s LGWS 0 -9999.00 +196 Mer. comp. of gravity wave stress N m**-2 s MGWS 0 -9999.00 +197 Gravity wave dissipation W m**-2 s GWD 0 -9999.00 +198 Skin reservoir content m (H2O) SRC 0 -9999.00 +199 Vegetation fraction (0-1) fraction VEG 0 -9999.00 +200 Variance of subgrid orography m**2 VSO 0 -9999.00 +201 Max. 2m temp. since post-process. K MX2T 0 -9999.00 +202 Min. 2m temp. since post-process. K MN2T 0 -9999.00 +203 Ozone mass mixing ratio kg (O3) kg**-1 OZMR 0 -9999.00 +204 Precipiation analysis weights - PAW 0 -9999.00 +205 Runoff m RO 0 -9999.00 +206 Total column ozone kg m**-2 TOZO 0 -9999.00 +207 10 metre wind speed m s**-1 10SI 0 -9999.00 +208 Top net solar rad., clear sky W m**-2 s TSRC 0 -9999.00 +209 Top net thermal rad., clear sky W m**-2 s TTRC 0 -9999.00 +210 Surface net solar rad., clear sky W m**-2 s SSRC 0 -9999.00 +211 Sfc. net thermal rad., clear sky W m**-2 s STRC 0 -9999.00 +212 Solar insolation W m**-2 s SI 0 -9999.00 +213 Unused +214 Diabatic heating by radiation K DHR 0 -9999.00 +215 Diab. heating by vert. diffusion K DHVD 0 -9999.00 +216 Diab. heating by cumulus convec. K DHCC 0 -9999.00 +217 Diab. heating resolved conden. K DHLC 0 -9999.00 +218 Vertical diffusion of zonal wind m s**-1 VDZW 0 -9999.00 +219 Vertical diffusion of mer.. wind m s**-1 VDMW 0 -9999.00 +220 E-W gravity wave drag tendency m s**-1 EWGD 0 -9999.00 +221 N-S gravity wave drag tendency m s**-1 NSGD 0 -9999.00 +222 Convective tendency of zonal wind m s**-1 CTZW 0 -9999.00 +223 Convective tendency of mer. wind m s**-1 CTMW 0 -9999.00 +224 Vertical diffusion of humidity kg (H2O) kg**-1 VDH 0 -9999.00 +225 Humid. tend. by cumulus convec. kg (H2O) kg**-1 HTCC 0 -9999.00 +226 Humid. tend. by resolved conden. kg (H2O) kg**-1 HTLC 0 -9999.00 +227 Change from removing neg. humid. kg (H2O) kg**-1 CRNH 0 -9999.00 +228 Total precipitation m P--M 0 -9999.00 +229 Instantaneous X surface stress N m**-2 IEWS 0 -9999.00 +230 Instantaneous Y surface stress N m**-2 INSS 0 -9999.00 +231 Instantaneous surface heat flux W m**-2 ISHF 0 -9999.00 +232 Instantaneous moisture flux kg m**-2 s IE 0 -9999.00 +233 Apparent surface humidity kg (H2O) kg**-1 ASQ 0 -9999.00 +234 Log of sfc. rough. length (heat) ln(m) LSRH 0 -9999.00 +235 Skin temperature K SKT 0 -9999.00 +236 Soil temperature level 4 K STL4 0 -9999.00 +237 Soil wetness level 4 m (H2O) SWL4 0 -9999.00 +238 Temperature of snow layer K TSN 0 -9999.00 +239 Convective snowfall m (H2O) CSF 0 -9999.00 +240 Large-scale snowfall m (H2O) LSF 0 -9999.00 +241 Accum. cloud frac. tend. (-1 - 1) fraction ACF 0 -9999.00 +242 Accum liquid water tend. (-1 - 1) fraction ALW 0 -9999.00 +243 Forecast albedo (0-1) fraction FAL 0 -9999.00 +244 Forecast surface roughness m FSR 0 -9999.00 +245 Fcst. log of src. rough. (heat) log(m) FLSR 0 -9999.00 +246 Cloud liquid water content kg (H2O) kg**-1 CLWC 0 -9999.00 +247 Cloud ice water content kg kg**-1 CIWC 0 -9999.00 +248 Cloud cover (0-1) fraction CC 0 -9999.00 +249 Accum. ice water tend. (-1 - 1) fraction AIW 0 -9999.00 +250 Ice age (1,0) 0-first 1-multi logical ICE 0 -9999.00 +251 Adiabatic tendency of temperature K ATTE 0 -9999.00 +252 Adiabatic tendency of humidity kg (H2O) kg**-1 ATHE 0 -9999.00 +253 Adiabatic tendency of zonal wind m s**-1 ATZE 0 -9999.00 +254 Adiabatic tendency of mer. wind m s**-1 ATMW 0 -9999.00 +255 Indicates a missing value - MISS 0 -9999.00 \ No newline at end of file diff --git a/python/pythontest/TestInstallTar/flex_extract_v7.1/python/ControlFile.py b/python/pythontest/TestInstallTar/flex_extract_v7.1/python/ControlFile.py new file mode 100644 index 0000000000000000000000000000000000000000..59a4752c3a28c5b087528b3ce9bfc49787b215c9 --- /dev/null +++ b/python/pythontest/TestInstallTar/flex_extract_v7.1/python/ControlFile.py @@ -0,0 +1,534 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +#******************************************************************************* +# @Author: Leopold Haimberger (University of Vienna) +# +# @Date: November 2015 +# +# @Change History: +# +# February 2018 - Anne Philipp (University of Vienna): +# - applied PEP8 style guide +# - added documentation +# - applied some minor modifications in programming style/structure +# - changed name of class Control to ControlFile for more +# self-explanation naming +# - outsource of class ControlFile +# - initialisation of class attributes ( to avoid high number of +# conditional statements and set default values ) +# - divided assignment of attributes and the check of conditions +# - outsourced the commandline argument assignments to control attributes +# +# @License: +# (C) Copyright 2015-2018. +# +# This software is licensed under the terms of the Apache Licence Version 2.0 +# which can be obtained at http://www.apache.org/licenses/LICENSE-2.0. +# +# @Class Description: +# The CONTROL file is the steering part of the FLEXPART extraction +# software. All necessary parameters needed to retrieve the data fields +# from the MARS archive for driving FLEXPART are set in a CONTROL file. +# Some specific parameters like the start and end dates can be overwritten +# by the command line parameters, but in generel all parameters needed +# for a complete set of fields for FLEXPART can be set in the CONTROL file. +# +# @Class Content: +# - __init__ +# - __read_controlfile__ +# - __str__ +# - assign_args_to_control +# - assign_envs_to_control +# - check_conditions +# - check_install_conditions +# - to_list +# +# @Class Attributes: +# +# +#******************************************************************************* + +# ------------------------------------------------------------------------------ +# MODULES +# ------------------------------------------------------------------------------ +import os +import re +import sys +import inspect + +import _config + +# ------------------------------------------------------------------------------ +# CLASS +# ------------------------------------------------------------------------------ +class ControlFile(object): + ''' + Class containing the information of the flex_extract CONTROL file. + + Contains all the parameters of CONTROL file, which are e.g.: + DAY1(start_date), DAY2(end_date), DTIME, MAXSTEP, TYPE, TIME, + STEP, CLASS(marsclass), STREAM, NUMBER, EXPVER, GRID, LEFT, + LOWER, UPPER, RIGHT, LEVEL, LEVELIST, RESOL, GAUSS, ACCURACY, + OMEGA, OMEGADIFF, ETA, ETADIFF, DPDETA, SMOOTH, FORMAT, + ADDPAR, WRF, CWC, PREFIX, ECSTORAGE, ECTRANS, ECFSDIR, + MAILOPS, MAILFAIL, GRIB2FLEXPART, FLEXPARTDIR, + BASETIME, DATE_CHUNK, DEBUG, INPUTDIR, OUTPUTDIR, FLEXPART_ROOT_SCRIPTS + + For more information about format and content of the parameter + see documentation. + + ''' + + def __init__(self, filename): + ''' + @Description: + Initialises the instance of ControlFile class and defines + all class attributes with default values. Afterwards calls + function __read_controlfile__ to read parameter from + Control file. + + @Input: + self: instance of ControlFile class + Description see class documentation. + + filename: string + Name of CONTROL file. + + @Return: + <nothing> + ''' + + # list of all possible class attributes and their default values + self.controlfile = filename + self.start_date = None + self.end_date = None + self.date_chunk = 3 + self.dtime = None + self.basetime = None + self.maxstep = None + self.type = None + self.time = None + self.step = None + self.marsclass = None + self.stream = None + self.number = 'OFF' + self.expver = None + self.grid = None + self.area = '' + self.left = None + self.lower = None + self.upper = None + self.right = None + self.level = None + self.levelist = None + self.resol = None + self.gauss = 0 + self.accuracy = 24 + self.omega = 0 + self.omegadiff = 0 + self.eta = 0 + self.etadiff = 0 + self.etapar = 77 + self.dpdeta = 1 + self.smooth = 0 + self.format = 'GRIB1' + self.addpar = None + self.prefix = 'EN' + self.cwc = 0 + self.wrf = 0 + self.ecfsdir = 'ectmp:/${USER}/econdemand/' + self.mailfail = ['${USER}'] + self.mailops = ['${USER}'] + self.grib2flexpart = 0 + self.ecstorage = 0 + self.ectrans = 0 + self.inputdir = '../work' + self.outputdir = self.inputdir + self.ecmwfdatadir = None + self.exedir = None + self.flexpart_root_scripts = None + self.makefile = None + self.destination = None + self.gateway = None + self.ecuid = None + self.ecgid = None + self.install_target = None + self.debug = 0 + self.request = 0 + + self.__read_controlfile__() + + return + + def __read_controlfile__(self): + ''' + @Description: + Read CONTROL file and assign all CONTROL file variables. + + @Input: + self: instance of ControlFile class + Description see class documentation. + + @Return: + <nothing> + ''' + from tools import my_error + + # read whole CONTROL file + with open(self.controlfile) as f: + fdata = f.read().split('\n') + + # go through every line and store parameter + for ldata in fdata: + data = ldata.split() + if len(data) > 1: + if 'm_' in data[0].lower(): + data[0] = data[0][2:] + if data[0].lower() == 'class': + data[0] = 'marsclass' + if data[0].lower() == 'day1': + data[0] = 'start_date' + if data[0].lower() == 'day2': + data[0] = 'end_date' + if data[0].lower() == 'addpar': + if '/' in data[1]: + # remove leading '/' sign from addpar content + if data[1][0] == '/': + data[1] = data[1][1:] + dd = data[1].split('/') + data = [data[0]] + for d in dd: + data.append(d) + if len(data) == 2: + if '$' in data[1]: + setattr(self, data[0].lower(), data[1]) + while '$' in data[1]: + i = data[1].index('$') + j = data[1].find('{') + k = data[1].find('}') + var = os.getenv(data[1][j+1:k]) + if var is not None: + data[1] = data[1][:i] + var + data[1][k+1:] + else: + my_error(self.mailfail, + 'Could not find variable ' + + data[1][j+1:k] + ' while reading ' + + self.controlfile) + setattr(self, data[0].lower() + '_expanded', data[1]) + else: + if data[1].lower() != 'none': + setattr(self, data[0].lower(), data[1]) + else: + setattr(self, data[0].lower(), None) + elif len(data) > 2: + setattr(self, data[0].lower(), (data[1:])) + else: + pass + + # script directory + self.ecmwfdatadir = os.path.dirname(os.path.abspath(inspect.getfile( + inspect.currentframe()))) + '/../' + + # Fortran source directory + self.exedir = self.ecmwfdatadir + 'src/' + + return + + def __str__(self): + ''' + @Description: + Prepares a string which have all the ControlFile + class attributes with its associated values. + Each attribute is printed in one line and in + alphabetical order. + + Example: + 'age': 10 + 'color': 'Spotted' + 'kids': 0 + 'legs': 2 + 'name': 'Dog' + 'smell': 'Alot' + + @Input: + self: instance of ControlFile class + Description see class documentation. + + @Return: + string of ControlFile class attributes with their values + ''' + import collections + + attrs = vars(self) + attrs = collections.OrderedDict(sorted(attrs.items())) + + return '\n'.join("%s: %s" % item for item in attrs.items()) + + def assign_args_to_control(self, args): + ''' + @Description: + Overwrites the existing ControlFile instance attributes with + the command line arguments. + + @Input: + self: instance of ControlFile class + Description see class documentation. + + args: instance of ArgumentParser + Contains the commandline arguments from script/program call. + + @Return: + <nothing> + ''' + + # get dictionary of command line parameters and eliminate all + # parameters which are None (were not specified) + args_dict = vars(args) + arguments = {k : args_dict[k] for k in args_dict + if args_dict[k] != None} + + # assign all passed command line arguments to ControlFile instance + for k, v in arguments.iteritems(): + setattr(self, str(k), v) + + return + + def assign_envs_to_control(self, envs): + ''' + @Description: + Assigns the ECMWF environment parameter. + + @Input: + envs: dict of strings + Contains the ECMWF environment parameternames "ECUID", "ECGID", + "DESTINATION" and "GATEWAY" with its corresponding values. + They were read from the file "ECMWF_ENV". + + @Return: + <nothing> + ''' + + for k, v in envs.iteritems(): + setattr(self, str(k).lower(), str(v)) + + return + + def check_conditions(self): + ''' + @Description: + Checks a couple of necessary attributes and conditions, + such as if they exist and contain values. + Otherwise set default values. + + @Input: + self: instance of ControlFile class + Description see class documentation. + + @Return: + <nothing> + ''' + from tools import my_error + import numpy as np + + # check for having at least a starting date + # otherwise program is not allowed to run + if self.start_date is None: + print 'start_date specified neither in command line nor ' + \ + 'in CONTROL file ' + self.controlfile + print 'Try "' + sys.argv[0].split('/')[-1] + \ + ' -h" to print usage information' + sys.exit(1) + + # retrieve just one day if end_date isn't set + if self.end_date is None: + self.end_date = self.start_date + + # assure consistency of levelist and level + if self.levelist is None: + if self.level is None: + print 'Warning: neither levelist nor level ' + \ + 'specified in CONTROL file' + sys.exit(1) + else: + self.levelist = '1/to/' + self.level + else: + if 'to' in self.levelist.lower(): + self.level = self.levelist.split('/')[2] + else: + self.level = self.levelist.split('/')[-1] + + # if area was provided at command line + # decompse area into its 4 components + if self.area: + afloat = '.' in self.area + l = self.area.split('/') + if afloat: + for i, item in enumerate(l): + item = str(int(float(item) * 1000)) + self.upper, self.left, self.lower, self.right = l + + # prepare step for correct usage + if '/' in self.step: + l = self.step.split('/') + if 'to' in self.step.lower(): + if 'by' in self.step.lower(): + ilist = np.arange(int(l[0]), int(l[2]) + 1, int(l[4])) + self.step = ['{:0>3}'.format(i) for i in ilist] + else: + my_error(self.mailfail, self.step + ':\n' + + 'if "to" is used, please use "by" as well') + else: + self.step = l + + # if maxstep wasn't provided + # search for it in the "step" parameter + if self.maxstep is None: + self.maxstep = 0 + for s in self.step: + if int(s) > self.maxstep: + self.maxstep = int(s) + else: + self.maxstep = int(self.maxstep) + + # set root scripts since it is needed later on + if not self.flexpart_root_scripts: + self.flexpart_root_scripts = self.ecmwfdatadir + + if not isinstance(self.mailfail, list): + if ',' in self.mailfail: + self.mailfail = self.mailfail.split(',') + elif ' ' in self.mailfail: + self.mailfail = self.mailfail.split() + else: + self.mailfail = [self.mailfail] + + if not isinstance(self.mailops, list): + if ',' in self.mailops: + self.mailops = self.mailops.split(',') + elif ' ' in self.mailops: + self.mailops = self.mailops.split() + else: + self.mailops = [self.mailops] + + if not self.gateway or not self.destination or \ + not self.ecuid or not self.ecgid: + print '\nEnvironment variables GATWAY, DESTINATION, ECUID and ' + \ + 'ECGID were not set properly!' + print 'Please check for excistence of file "ECMWF_ENV" in the ' + \ + 'python directory!' + sys.exit(1) + + if self.request != 0: + marsfile = os.path.join(_config.PATH_RUN_DIR + os.path.sep + + _config.FILE_MARS_REQUESTS) + if os.path.isfile(marsfile): + os.remove(marsfile) + + # check logical variables for data type + # if its a string change to integer + logicals = ['gauss', 'omega', 'omegadiff', 'eta', 'etadiff', + 'dpdeta', 'cwc', 'wrf', 'grib2flexpart', 'ecstorage', + 'ectrans', 'debug', 'request'] + + for var in logicals: + if not isinstance(getattr(self, var), int): + setattr(self, var, int(getattr(self, var))) + + return + + def check_install_conditions(self): + ''' + @Description: + Checks a couple of necessary attributes and conditions + for the installation such as if they exist and contain values. + Otherwise set default values. + + @Input: + self: instance of ControlFile class + Description see class documentation. + + @Return: + <nothing> + ''' + + if self.install_target and \ + self.install_target not in ['local', 'ecgate', 'cca']: + print('ERROR: unknown or missing installation target ') + print('target: ', self.install_target) + print('please specify correct installation target ' + + '(local | ecgate | cca)') + print('use -h or --help for help') + sys.exit(1) + + if self.install_target and self.install_target != 'local': + if not self.ecgid or not self.ecuid or \ + not self.gateway or not self.destination: + print('Please enter your ECMWF user id and group id as well ' + + 'as the \nname of the local gateway and the ectrans ' + + 'destination ') + print('with command line options --ecuid --ecgid \ + --gateway --destination') + print('Try "' + sys.argv[0].split('/')[-1] + \ + ' -h" to print usage information') + print('Please consult ecaccess documentation or ECMWF user \ + support for further details') + sys.exit(1) + + if not self.flexpart_root_scripts: + self.flexpart_root_scripts = '${HOME}' + else: + self.flexpart_root_scripts = self.flexpart_root_scripts + else: # local + if not self.flexpart_root_scripts: + self.flexpart_root_scripts = '../' + + if not self.makefile: + self.makefile = 'Makefile.gfortran' + + return + + def to_list(self): + ''' + @Description: + Just generates a list of strings containing the attributes and + assigned values except the attributes "_expanded", "exedir", + "ecmwfdatadir" and "flexpart_root_scripts". + + @Input: + self: instance of ControlFile class + Description see class documentation. + + @Return: + l: list + A sorted list of the all ControlFile class attributes with + their values except the attributes "_expanded", "exedir", + "ecmwfdatadir" and "flexpart_root_scripts". + ''' + + import collections + + attrs = collections.OrderedDict(sorted(vars(self).items())) + + l = list() + + for item in attrs.items(): + if '_expanded' in item[0]: + pass + elif 'exedir' in item[0]: + pass + elif 'flexpart_root_scripts' in item[0]: + pass + elif 'ecmwfdatadir' in item[0]: + pass + else: + if isinstance(item[1], list): + stot = '' + for s in item[1]: + stot += s + ' ' + + l.append("%s %s" % (item[0], stot)) + else: + l.append("%s %s" % item) + + return sorted(l) + diff --git a/python/pythontest/TestInstallTar/flex_extract_v7.1/python/ECMWF_ENV b/python/pythontest/TestInstallTar/flex_extract_v7.1/python/ECMWF_ENV new file mode 100644 index 0000000000000000000000000000000000000000..5af57721445813f4a44c0b9562b8219196daeda9 --- /dev/null +++ b/python/pythontest/TestInstallTar/flex_extract_v7.1/python/ECMWF_ENV @@ -0,0 +1,4 @@ +ECUID km4a +ECGID at +GATEWAY srvx8.img.univie.ac.at +DESTINATION annep@genericSftp diff --git a/python/pythontest/TestInstallTar/flex_extract_v7.1/python/EcFlexpart.py b/python/pythontest/TestInstallTar/flex_extract_v7.1/python/EcFlexpart.py new file mode 100644 index 0000000000000000000000000000000000000000..57b2da630d336ba16c58042ab2c67a64b0c28e84 --- /dev/null +++ b/python/pythontest/TestInstallTar/flex_extract_v7.1/python/EcFlexpart.py @@ -0,0 +1,1314 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +#******************************************************************************* +# @Author: Anne Fouilloux (University of Oslo) +# +# @Date: October 2014 +# +# @Change History: +# +# November 2015 - Leopold Haimberger (University of Vienna): +# - extended with class Control +# - removed functions mkdir_p, daterange, years_between, months_between +# - added functions darain, dapoly, to_param_id, init128, normal_exit, +# my_error, clean_up, install_args_and_control, +# interpret_args_and_control, +# - removed function __del__ in class EIFLexpart +# - added the following functions in EIFlexpart: +# - create_namelist +# - process_output +# - deacc_fluxes +# - modified existing EIFlexpart - functions for the use in +# flex_extract +# - retrieve also longer term forecasts, not only analyses and +# short term forecast data +# - added conversion into GRIB2 +# - added conversion into .fp format for faster execution of FLEXPART +# (see https://www.flexpart.eu/wiki/FpCtbtoWo4FpFormat) +# +# February 2018 - Anne Philipp (University of Vienna): +# - applied PEP8 style guide +# - added documentation +# - removed function getFlexpartTime in class EcFlexpart +# - outsourced class ControlFile +# - outsourced class MarsRetrieval +# - changed class name from EIFlexpart to EcFlexpart +# - applied minor code changes (style) +# - removed "dead code" , e.g. retrieval of Q since it is not needed +# - removed "times" parameter from retrieve-method since it is not used +# +# @License: +# (C) Copyright 2014-2018. +# +# This software is licensed under the terms of the Apache Licence Version 2.0 +# which can be obtained at http://www.apache.org/licenses/LICENSE-2.0. +# +# @Class Description: +# FLEXPART needs grib files in a specifc format. All necessary data fields +# for one time step are stored in a single file. The class represents an +# instance with all the parameter and settings necessary for retrieving +# MARS data and modifing them so they are fitting FLEXPART need. The class +# is able to disaggregate the fluxes and convert grid types to the one needed +# by FLEXPART, therefore using the FORTRAN program. +# +# @Class Content: +# - __init__ +# - write_namelist +# - retrieve +# - process_output +# - create +# - deacc_fluxes +# +# @Class Attributes: +# - dtime +# - basetime +# - server +# - marsclass +# - stream +# - resol +# - accuracy +# - number +# - expver +# - glevelist +# - area +# - grid +# - level +# - levelist +# - types +# - dates +# - area +# - gaussian +# - params +# - inputdir +# - outputfilelist +# +#******************************************************************************* +#pylint: disable=unsupported-assignment-operation +# this is disabled because its an error in pylint for this specific case +#pylint: disable=consider-using-enumerate +# this is not useful in this case +# ------------------------------------------------------------------------------ +# MODULES +# ------------------------------------------------------------------------------ +import subprocess +import shutil +import os +import glob +from datetime import datetime, timedelta +import numpy as np +from gribapi import grib_set, grib_index_select, grib_new_from_index, grib_get,\ + grib_write, grib_get_values, grib_set_values, grib_release,\ + grib_index_release, grib_index_get + +# software specific classes and modules from flex_extract +import _config +from GribTools import GribTools +from tools import init128, to_param_id, silent_remove, product, my_error +from MarsRetrieval import MarsRetrieval +import disaggregation + +# ------------------------------------------------------------------------------ +# CLASS +# ------------------------------------------------------------------------------ +class EcFlexpart(object): + ''' + Class to retrieve FLEXPART specific ECMWF data. + ''' + # -------------------------------------------------------------------------- + # CLASS FUNCTIONS + # -------------------------------------------------------------------------- + def __init__(self, c, fluxes=False): + ''' + @Description: + Creates an object/instance of EcFlexpart with the + associated settings of its attributes for the retrieval. + + @Input: + self: instance of EcFlexpart + The current object of the class. + + c: instance of class ControlFile + Contains all the parameters of CONTROL file, which are e.g.: + DAY1(start_date), DAY2(end_date), DTIME, MAXSTEP, TYPE, TIME, + STEP, CLASS(marsclass), STREAM, NUMBER, EXPVER, GRID, LEFT, + LOWER, UPPER, RIGHT, LEVEL, LEVELIST, RESOL, GAUSS, ACCURACY, + OMEGA, OMEGADIFF, ETA, ETADIFF, DPDETA, SMOOTH, FORMAT, + ADDPAR, WRF, CWC, PREFIX, ECSTORAGE, ECTRANS, ECFSDIR, + MAILOPS, MAILFAIL, GRIB2FLEXPART, FLEXPARTDIR, BASETIME + DATE_CHUNK, DEBUG, INPUTDIR, OUTPUTDIR, FLEXPART_ROOT_SCRIPTS + + For more information about format and content of the parameter + see documentation. + + fluxes: boolean, optional + Decides if the flux parameter settings are stored or + the rest of the parameter list. + Default value is False. + + @Return: + <nothing> + ''' + + # different mars types for retrieving data for flexpart + self.types = dict() + + if c.maxstep > len(c.type): # Pure forecast mode + c.type = [c.type[1]] + c.step = ['{:0>3}'.format(int(c.step[0]))] + c.time = [c.time[0]] + for i in range(1, c.maxstep + 1): + c.type.append(c.type[0]) + c.step.append('{:0>3}'.format(i)) + c.time.append(c.time[0]) + + self.inputdir = c.inputdir + self.basetime = c.basetime + self.dtime = c.dtime + i = 0 + if fluxes and c.maxstep <= 24: + # no forecast beyond one day is needed! + # Thus, prepare flux data manually as usual + # with only forecast fields with start times at 00/12 + # (but without 00/12 fields since these are + # the initialisation times of the flux fields + # and therefore are zero all the time) + self.types[c.type[1]] = {'times': '00/12', 'steps': + '{}/to/12/by/{}'.format(c.dtime, c.dtime)} + else: + for ty, st, ti in zip(c.type, c.step, c.time): + btlist = range(24) + if c.basetime == '12': + btlist = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12] + if c.basetime == '00': + btlist = [13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 0] + + if i % int(c.dtime) == 0 and (i in btlist or c.maxstep > 24): + + if ty not in self.types.keys(): + self.types[ty] = {'times': '', 'steps': ''} + + if ti not in self.types[ty]['times']: + if self.types[ty]['times']: + self.types[ty]['times'] += '/' + self.types[ty]['times'] += ti + + if st not in self.types[ty]['steps']: + if self.types[ty]['steps']: + self.types[ty]['steps'] += '/' + self.types[ty]['steps'] += st + i += 1 + + self.marsclass = c.marsclass + self.stream = c.stream + self.number = c.number + self.resol = c.resol + self.accuracy = c.accuracy + self.level = c.level + + if c.levelist: + self.levelist = c.levelist + else: + self.levelist = '1/to/' + c.level + + # for gaussian grid retrieval + self.glevelist = '1/to/' + c.level + + if hasattr(c, 'gaussian') and c.gaussian: + self.gaussian = c.gaussian + else: + self.gaussian = '' + + if hasattr(c, 'expver') and c.expver: + self.expver = c.expver + else: + self.expver = '1' + + if hasattr(c, 'number') and c.number: + self.number = c.number + else: + self.number = '0' + + if 'N' in c.grid: # Gaussian output grid + self.grid = c.grid + self.area = 'G' + else: + self.grid = '{}/{}'.format(int(c.grid) / 1000., int(c.grid) / 1000.) + self.area = '{}/{}/{}/{}'.format(int(c.upper) / 1000., + int(c.left) / 1000., + int(c.lower) / 1000., + int(c.right) / 1000.) + + self.outputfilelist = [] + + + # Now comes the nasty part that deals with the different + # scenarios we have: + # 1) Calculation of etadot on + # a) Gaussian grid + # b) Output grid + # c) Output grid using parameter 77 retrieved from MARS + # 3) Calculation/Retrieval of omega + # 4) Download also data for WRF + + + # Different grids need different retrievals + # SH = Spherical Harmonics, GG = Gaussian Grid, + # OG = Output Grid, ML = MultiLevel, SL = SingleLevel + self.params = {'SH__ML': '', 'SH__SL': '', + 'GG__ML': '', 'GG__SL': '', + 'OG__ML': '', 'OG__SL': '', + 'OG_OROLSM_SL': '', 'OG_acc_SL': ''} + + if fluxes is False: + self.params['SH__SL'] = ['LNSP', 'ML', '1', 'OFF'] + # "SD/MSL/TCC/10U/10V/2T/2D/129/172" + self.params['OG__SL'] = ["141/151/164/165/166/167/168/129/172", \ + 'SFC', '1', self.grid] + if c.addpar: + if c.addpar[0] == '/': + c.addpar = c.addpar[1:] + self.params['OG__SL'][0] += '/' + '/'.join(c.addpar) + + self.params['OG_OROLSM__SL'] = ["160/27/28/173", \ + 'SFC', '1', self.grid] + + self.params['OG__ML'] = ['T/Q', 'ML', self.levelist, self.grid] + + if c.gauss == '0' and c.eta == '1': + # the simplest case + self.params['OG__ML'][0] += '/U/V/77' + elif c.gauss == '0' and c.eta == '0': + # this is not recommended (inaccurate) + self.params['OG__ML'][0] += '/U/V' + elif c.gauss == '1' and c.eta == '0': + # this is needed for data before 2008, or for reanalysis data + self.params['GG__SL'] = ['Q', 'ML', '1', \ + '{}'.format((int(self.resol) + 1) / 2)] + self.params['SH__ML'] = ['U/V/D', 'ML', self.glevelist, 'OFF'] + else: + print('Warning: This is a very costly parameter combination, ' + 'use only for debugging!') + self.params['GG__SL'] = ['Q', 'ML', '1', \ + '{}'.format((int(self.resol) + 1) / 2)] + self.params['GG__ML'] = ['U/V/D/77', 'ML', self.glevelist, \ + '{}'.format((int(self.resol) + 1) / 2)] + + if hasattr(c, 'omega') and c.omega == '1': + self.params['OG__ML'][0] += '/W' + + # add cloud water content if necessary + if hasattr(c, 'cwc') and c.cwc == '1': + self.params['OG__ML'][0] += '/CLWC/CIWC' + + # add vorticity and geopotential height for WRF if necessary + if hasattr(c, 'wrf') and c.wrf == '1': + self.params['OG__ML'][0] += '/Z/VO' + if '/D' not in self.params['OG__ML'][0]: + self.params['OG__ML'][0] += '/D' + #wrf_sfc = 'sp/msl/skt/2t/10u/10v/2d/z/lsm/sst/ci/sd/stl1/ / + # stl2/stl3/stl4/swvl1/swvl2/swvl3/swvl4'.upper() + wrf_sfc = '134/235/167/165/166/168/129/172/34/31/141/ \ + 139/170/183/236/39/40/41/42'.upper() + lwrt_sfc = wrf_sfc.split('/') + for par in lwrt_sfc: + if par not in self.params['OG__SL'][0]: + self.params['OG__SL'][0] += '/' + par + + else: + self.params['OG_acc_SL'] = ["LSP/CP/SSHF/EWSS/NSSS/SSR", \ + 'SFC', '1', self.grid] + + # if needed, add additional WRF specific parameters here + + return + + + def write_namelist(self, c, filename): + ''' + @Description: + Creates a namelist file in the temporary directory and writes + the following values to it: maxl, maxb, mlevel, + mlevelist, mnauf, metapar, rlo0, rlo1, rla0, rla1, + momega, momegadiff, mgauss, msmooth, meta, metadiff, mdpdeta + + @Input: + self: instance of EcFlexpart + The current object of the class. + + c: instance of class ControlFile + Contains all the parameters of CONTROL files, which are e.g.: + DAY1(start_date), DAY2(end_date), DTIME, MAXSTEP, TYPE, TIME, + STEP, CLASS(marsclass), STREAM, NUMBER, EXPVER, GRID, LEFT, + LOWER, UPPER, RIGHT, LEVEL, LEVELIST, RESOL, GAUSS, ACCURACY, + OMEGA, OMEGADIFF, ETA, ETADIFF, DPDETA, SMOOTH, FORMAT, + ADDPAR, WRF, CWC, PREFIX, ECSTORAGE, ECTRANS, ECFSDIR, + MAILOPS, MAILFAIL, GRIB2FLEXPART, FLEXPARTDIR, BASETIME + DATE_CHUNK, DEBUG, INPUTDIR, OUTPUTDIR, FLEXPART_ROOT_SCRIPTS + + For more information about format and content of the parameter + see documentation. + + filename: string + Name of the namelist file. + + @Return: + <nothing> + ''' + + self.inputdir = c.inputdir + area = np.asarray(self.area.split('/')).astype(float) + grid = np.asarray(self.grid.split('/')).astype(float) + + if area[1] > area[3]: + area[1] -= 360 + maxl = int((area[3] - area[1]) / grid[1]) + 1 + maxb = int((area[0] - area[2]) / grid[0]) + 1 + + with open(self.inputdir + '/' + filename, 'w') as f: + f.write('&NAMGEN\n') + f.write(',\n '.join(['maxl = ' + str(maxl), 'maxb = ' + str(maxb), + 'mlevel = ' + str(self.level), + 'mlevelist = ' + '"' + str(self.levelist) + + '"', + 'mnauf = ' + str(self.resol), + 'metapar = ' + '77', + 'rlo0 = ' + str(area[1]), + 'rlo1 = ' + str(area[3]), + 'rla0 = ' + str(area[2]), + 'rla1 = ' + str(area[0]), + 'momega = ' + str(c.omega), + 'momegadiff = ' + str(c.omegadiff), + 'mgauss = ' + str(c.gauss), + 'msmooth = ' + str(c.smooth), + 'meta = ' + str(c.eta), + 'metadiff = ' + str(c.etadiff), + 'mdpdeta = ' + str(c.dpdeta)])) + + f.write('\n/\n') + + return + + def retrieve(self, server, dates, request, inputdir='.'): + ''' + @Description: + Finalizing the retrieval information by setting final details + depending on grid type. + Prepares MARS retrievals per grid type and submits them. + + @Input: + self: instance of EcFlexpart + The current object of the class. + + server: instance of ECMWFService or ECMWFDataServer + The connection to the ECMWF server. This is different + for member state users which have full access and non + member state users which have only access to the public + data sets. The decision is made from command line argument + "public"; for public access its True (ECMWFDataServer) + for member state users its False (ECMWFService) + + dates: string + Contains start and end date of the retrieval in the format + "YYYYMMDD/to/YYYYMMDD" + + inputdir: string, optional + Path to the directory where the retrieved data is about + to be stored. The default is the current directory ('.'). + + @Return: + <nothing> + ''' + self.dates = dates + self.server = server + self.inputdir = inputdir + oro = False + + for ftype in self.types: + for pk, pv in self.params.iteritems(): + if isinstance(pv, str): + continue + mftype = '' + ftype + mftime = self.types[ftype]['times'] + mfstep = self.types[ftype]['steps'] + mfdate = self.dates + mfstream = self.stream + mftarget = self.inputdir + "/" + ftype + pk + '.' + \ + self.dates.split('/')[0] + '.' + str(os.getppid()) +\ + '.' + str(os.getpid()) + ".grb" + if pk == 'OG__SL': + pass + if pk == 'OG_OROLSM__SL': + if oro is False: + mfstream = 'OPER' + mftype = 'AN' + mftime = '00' + mfstep = '000' + mfdate = self.dates.split('/')[0] + mftarget = self.inputdir + "/" + pk + '.' + mfdate + \ + '.' + str(os.getppid()) + '.' + \ + str(os.getpid()) + ".grb" + oro = True + else: + continue + if pk == 'GG__SL' and pv[0] == 'Q': + area = "" + gaussian = 'reduced' + else: + area = self.area + gaussian = self.gaussian + + # ------ on demand path -------------------------------------------------- + if self.basetime is None: + MR = MarsRetrieval(self.server, + marsclass=self.marsclass, + stream=mfstream, + type=mftype, + levtype=pv[1], + levelist=pv[2], + resol=self.resol, + gaussian=gaussian, + accuracy=self.accuracy, + grid=pv[3], + target=mftarget, + area=area, + date=mfdate, + time=mftime, + number=self.number, + step=mfstep, + expver=self.expver, + param=pv[0]) + + if request == 0: + MR.display_info() + MR.data_retrieve() + elif request == 1: + MR.print_info() + elif request == 2: + MR.print_info() + MR.display_info() + MR.data_retrieve() + else: + print 'Failure' + # ------ operational path ------------------------------------------------ + else: + # check if mars job requests fields beyond basetime. + # If yes eliminate those fields since they may not + # be accessible with user's credentials + if 'by' in mfstep: + sm1 = 2 + else: + sm1 = -1 + + if 'by' in mftime: + tm1 = 2 + else: + tm1 = -1 + + maxdate = datetime.strptime(mfdate.split('/')[-1] + + mftime.split('/')[tm1], + '%Y%m%d%H') + istep = int(mfstep.split('/')[sm1]) + maxtime = maxdate + timedelta(hours=istep) + + elimit = datetime.strptime(mfdate.split('/')[-1] + + self.basetime, '%Y%m%d%H') + + if self.basetime == '12': + # -------------- flux data ---------------------------- + if 'acc' in pk: + + # Strategy: + # if maxtime-elimit >= 24h reduce date by 1, + # if 12h <= maxtime-elimit<12h reduce time for last date + # if maxtime-elimit<12h reduce step for last time + # A split of the MARS job into 2 is likely necessary. + maxtime = elimit - timedelta(hours=24) + mfdate = '/'.join(['/'.join(mfdate.split('/')[:-1]), + datetime.strftime(maxtime, + '%Y%m%d')]) + + MR = MarsRetrieval(self.server, + marsclass=self.marsclass, + stream=self.stream, + type=mftype, + levtype=pv[1], + levelist=pv[2], + resol=self.resol, + gaussian=gaussian, + accuracy=self.accuracy, + grid=pv[3], + target=mftarget, + area=area, + date=mfdate, + time=mftime, + number=self.number, + step=mfstep, + expver=self.expver, + param=pv[0]) + + MR.display_info() + MR.data_retrieve() + + maxtime = elimit - timedelta(hours=12) + mfdate = datetime.strftime(maxtime, '%Y%m%d') + mftime = '00' + mftarget = self.inputdir + "/" + ftype + pk + \ + '.' + mfdate + '.' + str(os.getppid()) +\ + '.' + str(os.getpid()) + ".grb" + + MR = MarsRetrieval(self.server, + marsclass=self.marsclass, + stream=self.stream, + type=mftype, + levtype=pv[1], + levelist=pv[2], + resol=self.resol, + gaussian=gaussian, + accuracy=self.accuracy, + grid=pv[3], + target=mftarget, + area=area, + date=mfdate, + time=mftime, + number=self.number, + step=mfstep, + expver=self.expver, + param=pv[0]) + + MR.display_info() + + MR.data_retrieve() + # -------------- non flux data ------------------------ + else: + MR = MarsRetrieval(self.server, + marsclass=self.marsclass, + stream=self.stream, + type=mftype, + levtype=pv[1], + levelist=pv[2], + resol=self.resol, + gaussian=gaussian, + accuracy=self.accuracy, + grid=pv[3], + target=mftarget, + area=area, + date=mfdate, + time=mftime, + number=self.number, + step=mfstep, + expver=self.expver, + param=pv[0]) + + MR.display_info() + MR.data_retrieve() + else: # basetime == 0 ??? #AP + + maxtime = elimit - timedelta(hours=24) + mfdate = datetime.strftime(maxtime, '%Y%m%d') + mftimesave = ''.join(mftime) + + if '/' in mftime: + times = mftime.split('/') + while ((int(times[0]) + + int(mfstep.split('/')[0]) <= 12) and + (pk != 'OG_OROLSM__SL') and 'acc' not in pk): + times = times[1:] + if len(times) > 1: + mftime = '/'.join(times) + else: + mftime = times[0] + + MR = MarsRetrieval(self.server, + marsclass=self.marsclass, + stream=self.stream, + type=mftype, + levtype=pv[1], + levelist=pv[2], + resol=self.resol, + gaussian=gaussian, + accuracy=self.accuracy, + grid=pv[3], + target=mftarget, + area=area, + date=mfdate, + time=mftime, + number=self.number, + step=mfstep, + expver=self.expver, + param=pv[0]) + + MR.display_info() + MR.data_retrieve() + + if (int(mftimesave.split('/')[0]) == 0 and + int(mfstep.split('/')[0]) == 0 and + pk != 'OG_OROLSM__SL'): + + mfdate = datetime.strftime(elimit, '%Y%m%d') + mftime = '00' + mfstep = '000' + mftarget = self.inputdir + "/" + ftype + pk + \ + '.' + mfdate + '.' + str(os.getppid()) +\ + '.' + str(os.getpid()) + ".grb" + + MR = MarsRetrieval(self.server, + marsclass=self.marsclass, + stream=self.stream, + type=mftype, + levtype=pv[1], + levelist=pv[2], + resol=self.resol, + gaussian=gaussian, + accuracy=self.accuracy, + grid=pv[3], + target=mftarget, + area=area, + date=mfdate, + time=mftime, + number=self.number, + step=mfstep, + expver=self.expver, + param=pv[0]) + + MR.display_info() + MR.data_retrieve() + + if request == 0 or request == 2: + print('MARS retrieve done ... ') + elif request == 1: + print('MARS request printed ...') + + return + + + def process_output(self, c): + ''' + @Description: + The grib files are postprocessed depending on the selection in + CONTROL file. The resulting files are moved to the output + directory if its not equla to the input directory. + The following modifications might be done if + properly switched in CONTROL file: + GRIB2 - Conversion to GRIB2 + ECTRANS - Transfer of files to gateway server + ECSTORAGE - Storage at ECMWF server + GRIB2FLEXPART - Conversion of GRIB files to FLEXPART binary format + + @Input: + self: instance of EcFlexpart + The current object of the class. + + c: instance of class ControlFile + Contains all the parameters of CONTROL file, which are e.g.: + DAY1(start_date), DAY2(end_date), DTIME, MAXSTEP, TYPE, TIME, + STEP, CLASS(marsclass), STREAM, NUMBER, EXPVER, GRID, LEFT, + LOWER, UPPER, RIGHT, LEVEL, LEVELIST, RESOL, GAUSS, ACCURACY, + OMEGA, OMEGADIFF, ETA, ETADIFF, DPDETA, SMOOTH, FORMAT, + ADDPAR, WRF, CWC, PREFIX, ECSTORAGE, ECTRANS, ECFSDIR, + MAILOPS, MAILFAIL, GRIB2FLEXPART, FLEXPARTDIR, BASETIME + DATE_CHUNK, DEBUG, INPUTDIR, OUTPUTDIR, FLEXPART_ROOT_SCRIPTS + + For more information about format and content of the parameter + see documentation. + + @Return: + <nothing> + + ''' + + print '\n\nPostprocessing:\n Format: {}\n'.format(c.format) + + if c.ecapi is False: + print('ecstorage: {}\n ecfsdir: {}\n'. + format(c.ecstorage, c.ecfsdir)) + if not hasattr(c, 'gateway'): + c.gateway = os.getenv('GATEWAY') + if not hasattr(c, 'destination'): + c.destination = os.getenv('DESTINATION') + print('ectrans: {}\n gateway: {}\n destination: {}\n ' + .format(c.ectrans, c.gateway, c.destination)) + + print 'Output filelist: \n' + print self.outputfilelist + + if c.format.lower() == 'grib2': + for ofile in self.outputfilelist: + p = subprocess.check_call(['grib_set', '-s', 'edition=2, \ + productDefinitionTemplateNumber=8', + ofile, ofile + '_2']) + p = subprocess.check_call(['mv', ofile + '_2', ofile]) + + if int(c.ectrans) == 1 and c.ecapi is False: + for ofile in self.outputfilelist: + p = subprocess.check_call(['ectrans', '-overwrite', '-gateway', + c.gateway, '-remote', c.destination, + '-source', ofile]) + #print('ectrans:', p) + + if int(c.ecstorage) == 1 and c.ecapi is False: + for ofile in self.outputfilelist: + p = subprocess.check_call(['ecp', '-o', ofile, + os.path.expandvars(c.ecfsdir)]) + + if c.outputdir != c.inputdir: + for ofile in self.outputfilelist: + p = subprocess.check_call(['mv', ofile, c.outputdir]) + + # prepare environment for the grib2flexpart run + # to convert grib to flexpart binary + if c.grib2flexpart == '1': + + # generate AVAILABLE file + # Example of AVAILABLE file data: + # 20131107 000000 EN13110700 ON DISC + clist = [] + for ofile in self.outputfilelist: + fname = ofile.split('/') + if '.' in fname[-1]: + l = fname[-1].split('.') + timestamp = datetime.strptime(l[0][-6:] + l[1], + '%y%m%d%H') + timestamp += timedelta(hours=int(l[2])) + cdate = datetime.strftime(timestamp, '%Y%m%d') + chms = datetime.strftime(timestamp, '%H%M%S') + else: + cdate = '20' + fname[-1][-8:-2] + chms = fname[-1][-2:] + '0000' + clist.append(cdate + ' ' + chms + ' '*6 + + fname[-1] + ' '*14 + 'ON DISC') + clist.sort() + with open(c.outputdir + '/' + 'AVAILABLE', 'w') as f: + f.write('\n'.join(clist) + '\n') + + # generate pathnames file + pwd = os.path.abspath(c.outputdir) + with open(pwd + '/pathnames', 'w') as f: + f.write(pwd + '/Options/\n') + f.write(pwd + '/\n') + f.write(pwd + '/\n') + f.write(pwd + '/AVAILABLE\n') + f.write(' = == = == = == = == = == == = \n') + + # create Options dir if necessary + if not os.path.exists(pwd + '/Options'): + os.makedirs(pwd+'/Options') + + # read template COMMAND file + with open(os.path.expandvars(os.path.expanduser( + c.flexpart_root_scripts)) + '/../Options/COMMAND', 'r') as f: + lflist = f.read().split('\n') + + # find index of list where to put in the + # date and time information + # usually after the LDIRECT parameter + i = 0 + for l in lflist: + if 'LDIRECT' in l.upper(): + break + i += 1 + + # insert the date and time information of run start and end + # into the list of lines of COMMAND file + lflist = lflist[:i+1] + \ + [clist[0][:16], clist[-1][:16]] + \ + lflist[i+3:] + + # write the new COMMAND file + with open(pwd + '/Options/COMMAND', 'w') as g: + g.write('\n'.join(lflist) + '\n') + + # change to outputdir and start the grib2flexpart run + # afterwards switch back to the working dir + os.chdir(c.outputdir) + p = subprocess.check_call([ + os.path.expandvars(os.path.expanduser(c.flexpart_root_scripts)) + + '/../FLEXPART_PROGRAM/grib2flexpart', 'useAvailable', '.']) + os.chdir(pwd) + + return + + def create(self, inputfiles, c): + ''' + @Description: + This method is based on the ECMWF example index.py + https://software.ecmwf.int/wiki/display/GRIB/index.py + + An index file will be created which depends on the combination + of "date", "time" and "stepRange" values. This is used to iterate + over all messages in each grib file which were passed through the + parameter "inputfiles" to seperate specific parameters into fort.* + files. Afterwards the FORTRAN program Convert2 is called to convert + the data fields all to the same grid and put them in one file + per unique time step (combination of "date", "time" and + "stepRange"). + + @Input: + self: instance of EcFlexpart + The current object of the class. + + inputfiles: instance of UioFiles + Contains a list of files. + + c: instance of class ControlFile + Contains all the parameters of CONTROL files, which are e.g.: + DAY1(start_date), DAY2(end_date), DTIME, MAXSTEP, TYPE, TIME, + STEP, CLASS(marsclass), STREAM, NUMBER, EXPVER, GRID, LEFT, + LOWER, UPPER, RIGHT, LEVEL, LEVELIST, RESOL, GAUSS, ACCURACY, + OMEGA, OMEGADIFF, ETA, ETADIFF, DPDETA, SMOOTH, FORMAT, + ADDPAR, WRF, CWC, PREFIX, ECSTORAGE, ECTRANS, ECFSDIR, + MAILOPS, MAILFAIL, GRIB2FLEXPART, FLEXPARTDIR, BASETIME + DATE_CHUNK, DEBUG, INPUTDIR, OUTPUTDIR, FLEXPART_ROOT_SCRIPTS + + For more information about format and content of the parameter + see documentation. + + @Return: + <nothing> + ''' + + table128 = init128(_config.PATH_GRIBTABLE) + wrfpars = to_param_id('sp/mslp/skt/2t/10u/10v/2d/z/lsm/sst/ci/sd/\ + stl1/stl2/stl3/stl4/swvl1/swvl2/swvl3/swvl4', + table128) + + index_keys = ["date", "time", "step"] + indexfile = c.inputdir + "/date_time_stepRange.idx" + silent_remove(indexfile) + grib = GribTools(inputfiles.files) + # creates new index file + iid = grib.index(index_keys=index_keys, index_file=indexfile) + + # read values of index keys + index_vals = [] + for key in index_keys: + index_vals.append(grib_index_get(iid, key)) + print index_vals[-1] + # index_vals looks for example like: + # index_vals[0]: ('20171106', '20171107', '20171108') ; date + # index_vals[1]: ('0', '1200', '1800', '600') ; time + # index_vals[2]: ('0', '12', '3', '6', '9') ; stepRange + + fdict = {'10':None, '11':None, '12':None, '13':None, '16':None, + '17':None, '19':None, '21':None, '22':None, '20':None} + + for prod in product(*index_vals): + # flag for Fortran program CONVERT2 and file merging + convertFlag = False + print 'current prod: ', prod + # e.g. prod = ('20170505', '0', '12') + # ( date ,time, step) + # per date e.g. time = 0, 600, 1200, 1800 + # per time e.g. step = 0, 3, 6, 9, 12 + for i in range(len(index_keys)): + grib_index_select(iid, index_keys[i], prod[i]) + + # get first id from current product + gid = grib_new_from_index(iid) + + # if there is data for this product combination + # prepare some date and time parameter before reading the data + if gid is not None: + # Combine all temporary data files into final grib file if + # gid is at least one time not None. Therefore set convertFlag + # to save information. The fortran program CONVERT2 is also + # only done if convertFlag is True + convertFlag = True + # remove old fort.* files and open new ones + # they are just valid for a single product + for k, f in fdict.iteritems(): + silent_remove(c.inputdir + "/fort." + k) + fdict[k] = open(c.inputdir + '/fort.' + k, 'w') + + cdate = str(grib_get(gid, 'date')) + time = grib_get(gid, 'time') + step = grib_get(gid, 'step') + # create correct timestamp from the three time informations + # date, time, step + timestamp = datetime.strptime(cdate + '{:0>2}'.format(time/100), + '%Y%m%d%H') + timestamp += timedelta(hours=int(step)) + cdateH = datetime.strftime(timestamp, '%Y%m%d%H') + + if c.basetime is not None: + slimit = datetime.strptime(c.start_date + '00', '%Y%m%d%H') + bt = '23' + if c.basetime == '00': + bt = '00' + slimit = datetime.strptime(c.end_date + bt, '%Y%m%d%H')\ + - timedelta(hours=12-int(c.dtime)) + if c.basetime == '12': + bt = '12' + slimit = datetime.strptime(c.end_date + bt, '%Y%m%d%H')\ + - timedelta(hours=12-int(c.dtime)) + + elimit = datetime.strptime(c.end_date + bt, '%Y%m%d%H') + + if timestamp < slimit or timestamp > elimit: + continue + + try: + if c.wrf == '1': + if 'olddate' not in locals(): + fwrf = open(c.outputdir + '/WRF' + cdate + + '.{:0>2}'.format(time) + '.000.grb2', 'w') + olddate = cdate[:] + else: + if cdate != olddate: + fwrf = open(c.outputdir + '/WRF' + cdate + + '.{:0>2}'.format(time) + '.000.grb2', + 'w') + olddate = cdate[:] + except AttributeError: + pass + + # helper variable to remember which fields are already used. + savedfields = [] + while 1: + if gid is None: + break + paramId = grib_get(gid, 'paramId') + gridtype = grib_get(gid, 'gridType') + levtype = grib_get(gid, 'typeOfLevel') + if paramId == 133 and gridtype == 'reduced_gg': + # Specific humidity (Q.grb) is used as a template only + # so we need the first we "meet" + with open(c.inputdir + '/fort.18', 'w') as fout: + grib_write(gid, fout) + elif paramId == 131 or paramId == 132: + grib_write(gid, fdict['10']) + elif paramId == 130: + grib_write(gid, fdict['11']) + elif paramId == 133 and gridtype != 'reduced_gg': + grib_write(gid, fdict['17']) + elif paramId == 152: + grib_write(gid, fdict['12']) + elif paramId == 155 and gridtype == 'sh': + grib_write(gid, fdict['13']) + elif paramId in [129, 138, 155] and levtype == 'hybrid' \ + and c.wrf == '1': + pass + elif paramId == 246 or paramId == 247: + # cloud liquid water and ice + if paramId == 246: + clwc = grib_get_values(gid) + else: + clwc += grib_get_values(gid) + grib_set_values(gid, clwc) + grib_set(gid, 'paramId', 201031) + grib_write(gid, fdict['22']) + elif paramId == 135: + grib_write(gid, fdict['19']) + elif paramId == 77: + grib_write(gid, fdict['21']) + else: + if paramId not in savedfields: + grib_write(gid, fdict['16']) + savedfields.append(paramId) + else: + print 'duplicate ' + str(paramId) + ' not written' + + try: + if c.wrf == '1': + if levtype == 'hybrid': # model layer + if paramId in [129, 130, 131, 132, 133, 138, 155]: + grib_write(gid, fwrf) + else: # sfc layer + if paramId in wrfpars: + grib_write(gid, fwrf) + except AttributeError: + pass + + grib_release(gid) + gid = grib_new_from_index(iid) + + for f in fdict.values(): + f.close() + + # call for CONVERT2 if flag is True + if convertFlag: + pwd = os.getcwd() + os.chdir(c.inputdir) + if os.stat('fort.21').st_size == 0 and int(c.eta) == 1: + print 'Parameter 77 (etadot) is missing, most likely it is \ + not available for this type or date/time\n' + print 'Check parameters CLASS, TYPE, STREAM, START_DATE\n' + my_error(c.mailfail, 'fort.21 is empty while parameter eta \ + is set to 1 in CONTROL file') + + # create the corresponding output file fort.15 + # (generated by CONVERT2) + fort.16 (paramId 167 and 168) + p = subprocess.check_call( + [os.path.expandvars(os.path.expanduser(c.exedir)) + + '/CONVERT2'], shell=True) + os.chdir(pwd) + + # create final output filename, e.g. EN13040500 (ENYYMMDDHH) + fnout = c.inputdir + '/' + c.prefix + if c.maxstep > 12: + suffix = cdate[2:8] + '.{:0>2}'.format(time/100) + \ + '.{:0>3}'.format(step) + else: + suffix = cdateH[2:10] + fnout += suffix + print "outputfile = " + fnout + self.outputfilelist.append(fnout) # needed for final processing + + # create outputfile and copy all data from intermediate files + # to the outputfile (final GRIB files) + orolsm = os.path.basename(glob.glob( + c.inputdir + '/OG_OROLSM__SL.*.' + c.ppid + '*')[0]) + fluxfile = 'flux' + cdate[0:2] + suffix + if c.cwc != '1': + flist = ['fort.15', fluxfile, 'fort.16', orolsm] + else: + flist = ['fort.15', 'fort.22', fluxfile, 'fort.16', orolsm] + + with open(fnout, 'wb') as fout: + for f in flist: + shutil.copyfileobj( + open(c.inputdir + '/' + f, 'rb'), fout) + + if c.omega == '1': + with open(c.outputdir + '/OMEGA', 'wb') as fout: + shutil.copyfileobj( + open(c.inputdir + '/fort.25', 'rb'), fout) + + if hasattr(c, 'wrf') and c.wrf == '1': + fwrf.close() + + grib_index_release(iid) + + return + + def deacc_fluxes(self, inputfiles, c): + ''' + @Description: + Goes through all flux fields in ordered time and de-accumulate + the fields. Afterwards the fields are disaggregated in time. + Different versions of disaggregation is provided for rainfall + data (darain, modified linear) and the surface fluxes and + stress data (dapoly, cubic polynomial). + + @Input: + self: instance of EcFlexpart + The current object of the class. + + inputfiles: instance of UioFiles + Contains a list of files. + + c: instance of class ControlFile + Contains all the parameters of CONTROL file, which are e.g.: + DAY1(start_date), DAY2(end_date), DTIME, MAXSTEP, TYPE, TIME, + STEP, CLASS(marsclass), STREAM, NUMBER, EXPVER, GRID, LEFT, + LOWER, UPPER, RIGHT, LEVEL, LEVELIST, RESOL, GAUSS, ACCURACY, + OMEGA, OMEGADIFF, ETA, ETADIFF, DPDETA, SMOOTH, FORMAT, + ADDPAR, WRF, CWC, PREFIX, ECSTORAGE, ECTRANS, ECFSDIR, + MAILOPS, MAILFAIL, GRIB2FLEXPART, FLEXPARTDIR, BASETIME + DATE_CHUNK, DEBUG, INPUTDIR, OUTPUTDIR, FLEXPART_ROOT_SCRIPTS + + For more information about format and content of the parameter + see documentation. + + @Return: + <nothing> + ''' + + table128 = init128(_config.PATH_GRIBTABLE) + pars = to_param_id(self.params['OG_acc_SL'][0], table128) + index_keys = ["date", "time", "step"] + indexfile = c.inputdir + "/date_time_stepRange.idx" + silent_remove(indexfile) + grib = GribTools(inputfiles.files) + # creates new index file + iid = grib.index(index_keys=index_keys, index_file=indexfile) + + # read values of index keys + index_vals = [] + for key in index_keys: + key_vals = grib_index_get(iid, key) + print key_vals + # have to sort the steps for disaggregation, + # therefore convert to int first + if key == 'step': + key_vals = [int(k) for k in key_vals] + key_vals.sort() + key_vals = [str(k) for k in key_vals] + index_vals.append(key_vals) + # index_vals looks for example like: + # index_vals[0]: ('20171106', '20171107', '20171108') ; date + # index_vals[1]: ('0', '1200') ; time + # index_vals[2]: (3', '6', '9', '12') ; stepRange + + valsdict = {} + svalsdict = {} + stepsdict = {} + for p in pars: + valsdict[str(p)] = [] + svalsdict[str(p)] = [] + stepsdict[str(p)] = [] + + print 'maxstep: ', c.maxstep + + for prod in product(*index_vals): + # e.g. prod = ('20170505', '0', '12') + # ( date ,time, step) + # per date e.g. time = 0, 1200 + # per time e.g. step = 3, 6, 9, 12 + for i in range(len(index_keys)): + grib_index_select(iid, index_keys[i], prod[i]) + + gid = grib_new_from_index(iid) + if gid is not None: + cdate = grib_get(gid, 'date') + time = grib_get(gid, 'time') + step = grib_get(gid, 'step') + # date+time+step-2*dtime + # (since interpolated value valid for step-2*dtime) + sdate = datetime(year=cdate/10000, + month=(cdate % 10000)/100, + day=(cdate % 100), + hour=time/100) + fdate = sdate + timedelta(hours=step-2*int(c.dtime)) + sdates = sdate + timedelta(hours=step) + elimit = None + else: + break + + if c.maxstep > 12: + fnout = c.inputdir + '/flux' + \ + sdate.strftime('%Y%m%d') + '.{:0>2}'.format(time/100) + \ + '.{:0>3}'.format(step-2*int(c.dtime)) + gnout = c.inputdir + '/flux' + \ + sdate.strftime('%Y%m%d') + '.{:0>2}'.format(time/100) + \ + '.{:0>3}'.format(step-int(c.dtime)) + hnout = c.inputdir + '/flux' + \ + sdate.strftime('%Y%m%d') + '.{:0>2}'.format(time/100) + \ + '.{:0>3}'.format(step) + g = open(gnout, 'w') + h = open(hnout, 'w') + else: + fnout = c.inputdir + '/flux' + fdate.strftime('%Y%m%d%H') + gnout = c.inputdir + '/flux' + (fdate + + timedelta(hours=int(c.dtime)) + ).strftime('%Y%m%d%H') + hnout = c.inputdir + '/flux' + sdates.strftime('%Y%m%d%H') + g = open(gnout, 'w') + h = open(hnout, 'w') + + print "outputfile = " + fnout + f = open(fnout, 'w') + + # read message for message and store relevant data fields + # data keywords are stored in pars + while 1: + if gid is None: + break + cparamId = str(grib_get(gid, 'paramId')) + step = grib_get(gid, 'step') + atime = grib_get(gid, 'time') + ni = grib_get(gid, 'Ni') + nj = grib_get(gid, 'Nj') + if cparamId in valsdict.keys(): + values = grib_get_values(gid) + vdp = valsdict[cparamId] + svdp = svalsdict[cparamId] + sd = stepsdict[cparamId] + + if cparamId == '142' or cparamId == '143': + fak = 1. / 1000. + else: + fak = 3600. + + values = (np.reshape(values, (nj, ni))).flatten() / fak + vdp.append(values[:]) # save the accumulated values + if step <= int(c.dtime): + svdp.append(values[:] / int(c.dtime)) + else: # deaccumulate values + svdp.append((vdp[-1] - vdp[-2]) / int(c.dtime)) + + print(cparamId, atime, step, len(values), + values[0], np.std(values)) + # save the 1/3-hourly or specific values + # svdp.append(values[:]) + sd.append(step) + # len(svdp) correspond to the time + if len(svdp) >= 3: + if len(svdp) > 3: + if cparamId == '142' or cparamId == '143': + values = disaggregation.darain(svdp) + else: + values = disaggregation.dapoly(svdp) + + if not (step == c.maxstep and c.maxstep > 12 \ + or sdates == elimit): + vdp.pop(0) + svdp.pop(0) + else: + if c.maxstep > 12: + values = svdp[1] + else: + values = svdp[0] + + grib_set_values(gid, values) + if c.maxstep > 12: + grib_set(gid, 'step', max(0, step-2*int(c.dtime))) + else: + grib_set(gid, 'step', 0) + grib_set(gid, 'time', fdate.hour*100) + grib_set(gid, 'date', fdate.year*10000 + + fdate.month*100+fdate.day) + grib_write(gid, f) + + if c.basetime is not None: + elimit = datetime.strptime(c.end_date + + c.basetime, '%Y%m%d%H') + else: + elimit = sdate + timedelta(2*int(c.dtime)) + + # squeeze out information of last two steps contained + # in svdp + # if step+int(c.dtime) == c.maxstep and c.maxstep>12 + # or sdates+timedelta(hours = int(c.dtime)) + # >= elimit: + # Note that svdp[0] has not been popped in this case + + if step == c.maxstep and c.maxstep > 12 or \ + sdates == elimit: + + values = svdp[3] + grib_set_values(gid, values) + grib_set(gid, 'step', 0) + truedatetime = fdate + timedelta(hours= + 2*int(c.dtime)) + grib_set(gid, 'time', truedatetime.hour * 100) + grib_set(gid, 'date', truedatetime.year * 10000 + + truedatetime.month * 100 + + truedatetime.day) + grib_write(gid, h) + + #values = (svdp[1]+svdp[2])/2. + if cparamId == '142' or cparamId == '143': + values = disaggregation.darain(list(reversed(svdp))) + else: + values = disaggregation.dapoly(list(reversed(svdp))) + + grib_set(gid, 'step', 0) + truedatetime = fdate + timedelta(hours=int(c.dtime)) + grib_set(gid, 'time', truedatetime.hour * 100) + grib_set(gid, 'date', truedatetime.year * 10000 + + truedatetime.month * 100 + + truedatetime.day) + grib_set_values(gid, values) + grib_write(gid, g) + + grib_release(gid) + + gid = grib_new_from_index(iid) + + f.close() + g.close() + h.close() + + grib_index_release(iid) + + return diff --git a/python/pythontest/TestInstallTar/flex_extract_v7.1/python/GribTools.py b/python/pythontest/TestInstallTar/flex_extract_v7.1/python/GribTools.py new file mode 100644 index 0000000000000000000000000000000000000000..a68d1a5485f1b7e3039081ec7fb176d90466ab06 --- /dev/null +++ b/python/pythontest/TestInstallTar/flex_extract_v7.1/python/GribTools.py @@ -0,0 +1,318 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +#******************************************************************************* +# @Author: Anne Fouilloux (University of Oslo) +# +# @Date: July 2014 +# +# @Change History: +# February 2018 - Anne Philipp (University of Vienna): +# - applied PEP8 style guide +# - added documentation +# - changed some naming +# +# @License: +# (C) Copyright 2014-2018. +# +# This software is licensed under the terms of the Apache Licence Version 2.0 +# which can be obtained at http://www.apache.org/licenses/LICENSE-2.0. +# +# @Class Description: +# The GRIB API provides all necessary tools to work directly with the +# grib files. Nevertheless, the GRIB API tools are very basic and are in +# direct connection with the grib files. This class provides some higher +# functions which apply a set of GRIB API tools together in the respective +# context. So, the class initially contains a list of grib files (their +# names) and the using program then applies the methods directly on the +# class objects without having to think about how the actual GRIB API +# tools have to be arranged. +# +# @Class Content: +# - __init__ +# - get_keys +# - set_keys +# - copy +# - index +# +# @Class Attributes: +# - filenames +# +#******************************************************************************* + +# ------------------------------------------------------------------------------ +# MODULES +# ------------------------------------------------------------------------------ +import os +from gribapi import grib_new_from_file, grib_is_defined, grib_get, \ + grib_release, grib_set, grib_write, grib_index_read, \ + grib_index_new_from_file, grib_index_add_file, \ + grib_index_write + +# ------------------------------------------------------------------------------ +# CLASS +# ------------------------------------------------------------------------------ +class GribTools(object): + ''' + Class for GRIB utilities (new methods) based on GRIB API + ''' + # -------------------------------------------------------------------------- + # CLASS FUNCTIONS + # -------------------------------------------------------------------------- + def __init__(self, filenames): + ''' + @Description: + Initialise an object of GribTools and assign a list + of filenames. + + @Input: + filenames: list of strings + A list of filenames. + + @Return: + <nothing> + ''' + + self.filenames = filenames + + return + + + def get_keys(self, keynames, wherekeynames=[], wherekeyvalues=[]): + ''' + @Description: + get keyvalues for a given list of keynames + a where statement can be given (list of key and list of values) + + @Input: + keynames: list of strings + List of keynames. + + wherekeynames: list of strings, optional + Default value is an empty list. + + wherekeyvalues: list of strings, optional + Default value is an empty list. + + @Return: + return_list: list of strings + List of keyvalues for given keynames. + ''' + + fileid = open(self.filenames, 'r') + + return_list = [] + + while 1: + gid_in = grib_new_from_file(fileid) + + if gid_in is None: + break + + if len(wherekeynames) != len(wherekeyvalues): + raise Exception("Number of key values and key names must be \ + the same. Give a value for each keyname!") + + select = True + i = 0 + for wherekey in wherekeynames: + if not grib_is_defined(gid_in, wherekey): + raise Exception("where key was not defined") + + select = (select and (str(wherekeyvalues[i]) == + str(grib_get(gid_in, wherekey)))) + i += 1 + + if select: + llist = [] + for key in keynames: + llist.extend([str(grib_get(gid_in, key))]) + return_list.append(llist) + + grib_release(gid_in) + + fileid.close() + + return return_list + + + def set_keys(self, fromfile, keynames, keyvalues, wherekeynames=[], + wherekeyvalues=[], strict=False, filemode='w'): + ''' + @Description: + Opens the file to read the grib messages and then write + them to a new output file. By default all messages are + written out. Also, the keyvalues of the passed list of + keynames are set or only those meeting the where statement. + (list of key and list of values). + + @Input: + fromfile: string + Filename of the input file to read the grib messages from. + + keynames: list of strings + List of keynames. Default is an empty list. + + keyvalues: list of strings + List of keynames. Default is an empty list. + + wherekeynames: list of strings, optional + Default value is an empty list. + + wherekeyvalues: list of strings, optional + Default value is an empty list. + + strict: boolean, optional + Decides if everything from keynames and keyvalues + is written out the grib file (False) or only those + meeting the where statement (True). Default is False. + + filemode: string, optional + Sets the mode for the output file. Default is "w". + + @Return: + <nothing> + + ''' + fout = open(self.filenames, filemode) + fin = open(fromfile) + + while 1: + gid_in = grib_new_from_file(fin) + + if gid_in is None: + break + + if len(wherekeynames) != len(wherekeyvalues): + raise Exception("Give a value for each keyname!") + + select = True + i = 0 + for wherekey in wherekeynames: + if not grib_is_defined(gid_in, wherekey): + raise Exception("where Key was not defined") + + select = (select and (str(wherekeyvalues[i]) == + str(grib_get(gid_in, wherekey)))) + i += 1 + + if select: + i = 0 + for key in keynames: + grib_set(gid_in, key, keyvalues[i]) + i += 1 + + grib_write(gid_in, fout) + + grib_release(gid_in) + + fin.close() + fout.close() + + return + + def copy(self, filename_in, selectWhere=True, + keynames=[], keyvalues=[], filemode='w'): + ''' + Add the content of another input grib file to the objects file but + only messages corresponding to keys/values passed to the function. + The selectWhere switch decides if to copy the keys equal to (True) or + different to (False) the keynames/keyvalues list passed to the function. + + @Input: + filename_in: string + Filename of the input file to read the grib messages from. + + selectWhere: boolean, optional + Decides if to copy the keynames and values equal to (True) or + different to (False) the keynames/keyvalues list passed to the + function. Default is True. + + keynames: list of strings, optional + List of keynames. Default is an empty list. + + keyvalues: list of strings, optional + List of keynames. Default is an empty list. + + filemode: string, optional + Sets the mode for the output file. Default is "w". + + @Return: + <nothing> + ''' + + fin = open(filename_in) + fout = open(self.filenames, filemode) + + while 1: + gid_in = grib_new_from_file(fin) + + if gid_in is None: + break + + if len(keynames) != len(keyvalues): + raise Exception("Give a value for each keyname!") + + select = True + i = 0 + for key in keynames: + if not grib_is_defined(gid_in, key): + raise Exception("Key was not defined") + + if selectWhere: + select = (select and (str(keyvalues[i]) == + str(grib_get(gid_in, key)))) + else: + select = (select and (str(keyvalues[i]) != + str(grib_get(gid_in, key)))) + i += 1 + + if select: + grib_write(gid_in, fout) + + grib_release(gid_in) + + fin.close() + fout.close() + + return + + def index(self, index_keys=["mars"], index_file="my.idx"): + ''' + @Description: + Create index file from a list of files if it does not exist or + read an index file. + + @Input: + index_keys: list of strings, optional + Contains the list of key parameter names from + which the index is to be created. + Default is a list with a single entry string "mars". + + index_file: string, optional + Filename where the indices are stored. + Default is "my.idx". + + @Return: + iid: integer + Grib index id. + ''' + print "... index will be done" + iid = None + + if os.path.exists(index_file): + iid = grib_index_read(index_file) + print "Use existing index file: %s " % (index_file) + else: + for filename in self.filenames: + print "Inputfile: %s " % (filename) + if iid is None: + iid = grib_index_new_from_file(filename, index_keys) + else: + grib_index_add_file(iid, filename) + + if iid is not None: + grib_index_write(iid, index_file) + + print '... index done' + + return iid diff --git a/python/pythontest/TestInstallTar/flex_extract_v7.1/python/MarsRetrieval.py b/python/pythontest/TestInstallTar/flex_extract_v7.1/python/MarsRetrieval.py new file mode 100644 index 0000000000000000000000000000000000000000..fa4012c2e88578d0d2635dba2a6a7530ca67498c --- /dev/null +++ b/python/pythontest/TestInstallTar/flex_extract_v7.1/python/MarsRetrieval.py @@ -0,0 +1,419 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +#******************************************************************************* +# @Author: Anne Fouilloux (University of Oslo) +# +# @Date: October 2014 +# +# @Change History: +# +# November 2015 - Leopold Haimberger (University of Vienna): +# - optimized display_info +# - optimized data_retrieve and seperate between python and shell +# script call +# +# February 2018 - Anne Philipp (University of Vienna): +# - applied PEP8 style guide +# - added documentation +# - applied some minor modifications in programming style/structure +# +# @License: +# (C) Copyright 2015-2018. +# +# This software is licensed under the terms of the Apache Licence Version 2.0 +# which can be obtained at http://www.apache.org/licenses/LICENSE-2.0. +# +# @Class Description: +# A MARS revtrieval has a specific syntax with a selection of keywords and +# their corresponding values. This class provides the necessary functions +# by displaying the selected parameters and their values and the actual +# retrievement of the data through a mars request or a Python web api +# interface. The initialization already expects all the keyword values. +# +# @Class Content: +# - __init__ +# - display_info +# - data_retrieve +# +# @Class Attributes: +# - server +# - marsclass +# - dtype +# - levtype +# - levelist +# - repres +# - date +# - resol +# - stream +# - area +# - time +# - step +# - expver +# - number +# - accuracy +# - grid +# - gaussian +# - target +# - param +# +#******************************************************************************* + +# ------------------------------------------------------------------------------ +# MODULES +# ------------------------------------------------------------------------------ +import subprocess +import os + +import _config +# ------------------------------------------------------------------------------ +# CLASS +# ------------------------------------------------------------------------------ +class MarsRetrieval(object): + ''' + Class for submitting MARS retrievals. + + A description of MARS keywords/arguments and examples of their + values can be found here: + https://software.ecmwf.int/wiki/display/UDOC/\ + Identification+keywords#Identificationkeywords-class + + ''' + + def __init__(self, server, marsclass="ei", type="", levtype="", + levelist="", repres="", date="", resol="", stream="", + area="", time="", step="", expver="1", number="", + accuracy="", grid="", gaussian="", target="", + param=""): + ''' + @Description: + Initialises the instance of the MarsRetrieval class and + defines and assigns a set of the necessary retrieval parameters + for the FLEXPART input data. + A description of MARS keywords/arguments, their dependencies + on each other and examples of their values can be found here: + + https://software.ecmwf.int/wiki/display/UDOC/MARS+keywords + + @Input: + self: instance of MarsRetrieval + For description see class documentation. + + server: instance of ECMWFService (from ECMWF Web-API) + This is the connection to the ECMWF data servers. + It is needed for the pythonic access of ECMWF data. + + marsclass: string, optional + Characterisation of dataset. E.g. EI (ERA-Interim), + E4 (ERA40), OD (Operational archive), ea (ERA5). + Default is the ERA-Interim dataset "ei". + + type: string, optional + Determines the type of fields to be retrieved. + Selects between observations, images or fields. + Examples for fields: Analysis (an), Forecast (fc), + Perturbed Forecast (pf), Control Forecast (cf) and so on. + Default is an empty string. + + levtype: string, optional + Denotes type of level. Has a direct implication on valid + levelist values! + E.g. model level (ml), pressure level (pl), surface (sfc), + potential vorticity (pv), potential temperature (pt) + and depth (dp). + Default is an empty string. + + levelist: string, optional + Specifies the required levels. It has to have a valid + correspondence to the selected levtype. + Examples: model level: 1/to/137, pressure levels: 500/to/1000 + Default is an empty string. + + repres: string, optional + Selects the representation of the archived data. + E.g. sh - spherical harmonics, gg - Gaussian grid, + ll - latitude/longitude, ... + Default is an empty string. + + date: string, optional + Specifies the Analysis date, the Forecast base date or + Observations date. Valid formats are: + Absolute as YYYY-MM-DD or YYYYMMDD. + Default is an empty string. + + resol: string, optional + Specifies the desired triangular truncation of retrieved data, + before carrying out any other selected post-processing. + The default is automatic truncation (auto), by which the lowest + resolution compatible with the value specified in grid is + automatically selected for the retrieval. + Users wanting to perform post-processing from full spectral + resolution should specify Archived Value (av). + The following are examples of existing resolutions found in + the archive: 63, 106, 159, 213, 255, 319, 399, 511, 799 or 1279. + This keyword has no meaning/effect if the archived data is + not in spherical harmonics representation. + The best selection can be found here: + https://software.ecmwf.int/wiki/display/UDOC/\ + Retrieve#Retrieve-Truncationbeforeinterpolation + Default is an empty string. + + stream: string, optional + Identifies the forecasting system used to generate the data. + E.g. oper (Atmospheric model), enfo (Ensemble forecats), ... + Default is an empty string. + + area: string, optional + Specifies the desired sub-area of data to be extracted. + Areas can be defined to wrap around the globe. + + Latitude values must be given as signed numbers, with: + north latitudes (i.e. north of the equator) + being positive (e.g: 40.5) + south latitutes (i.e. south of the equator) + being negative (e.g: -50.5) + Longtitude values must be given as signed numbers, with: + east longitudes (i.e. east of the 0 degree meridian) + being positive (e.g: 35.0) + west longitudes (i.e. west of the 0 degree meridian) + being negative (e.g: -20.5) + + E.g.: North/West/South/East + Default is an empty string. + + time: string, optional + Specifies the time of the data in hours and minutes. + Valid values depend on the type of data: Analysis time, + Forecast base time or First guess verification time + (all usually at synoptic hours: 00, 06, 12 and 18 ). + Observation time (any combination in hours and minutes is valid, + subject to data availability in the archive). + The syntax is HHMM or HH:MM. If MM is omitted it defaults to 00. + Default is an empty string. + + step: string, optional + Specifies the forecast time step from forecast base time. + Valid values are hours (HH) from forecast base time. It also + specifies the length of the forecast which verifies at + First Guess time. + E.g. 1/3/6-hourly + Default is an empty string. + + expver: string, optional + The version of the dataset. Each experiment is assigned a + unique code (version). Production data is assigned 1 or 2, + and experimental data in Operations 11, 12 ,... + Research or Member State's experiments have a four letter + experiment identifier. + Default is "1". + + number: string, optional + Selects the member in ensemble forecast run. (Only then it + is necessary.) It has a different meaning depending on + the type of data. + E.g. Perturbed Forecasts: specifies the Ensemble forecast member + Default is an empty string. + + accuracy: string, optional + Specifies the number of bits per value to be used in the + generated GRIB coded fields. + A positive integer may be given to specify the preferred number + of bits per packed value. This must not be greater than the + number of bits normally used for a Fortran integer on the + processor handling the request (typically 32 or 64 bit). + Within a compute request the accuracy of the original fields + can be passed to the result field by specifying accuracy=av. + Default is an empty string. + + grid: string, optional + Specifies the output grid which can be either a Gaussian grid + or a Latitude/Longitude grid. MARS requests specifying + grid=av will return the archived model grid. + + Lat/Lon grid: The grid spacing needs to be an integer + fraction of 90 degrees e.g. grid = 0.5/0.5 + + Gaussian grid: specified by a letter denoting the type of + Gaussian grid followed by an integer (the grid number) + representing the number of lines between the Pole and Equator, + e.g. + grid = F160 - full (or regular) Gaussian grid with + 160 latitude lines between the pole and equator + grid = N320 - ECMWF original reduced Gaussian grid with + 320 latitude lines between the pole and equator, + see Reduced Gaussian Grids for grid numbers used at ECMWF + grid = O640 - ECMWF octahedral (reduced) Gaussian grid with + 640 latitude lines between the pole and equator + Default is an empty string. + + gaussian: string, optional + This parameter is deprecated and should no longer be used. + Specifies the desired type of Gaussian grid for the output. + Valid Gaussian grids are quasi-regular (reduced) or regular. + Keyword gaussian can only be specified together with + keyword grid. Gaussian without grid has no effect. + Default is an empty string. + + target: string, optional + Specifies a file into which data is to be written after + retrieval or manipulation. Path names should always be + enclosed in double quotes. The MARS client supports automatic + generation of multiple target files using MARS keywords + enclosed in square brackets [ ]. If the environment variable + MARS_MULTITARGET_STRICT_FORMAT is set to 1 before calling mars, + the keyword values will be used in the filename as shown by + the ecCodes GRIB tool grib_ls -m, e.g. with + MARS_MULTITARGET_STRICT_FORMAT set to 1 the keywords time, + expver and param will be formatted as 0600, 0001 and 129.128 + rather than 600, 1 and 129. + Default is an empty string. + + param: string, optional + Specifies the meteorological parameter. + The list of meteorological parameters in MARS is extensive. + Their availability is directly related to their meteorological + meaning and, therefore, the rest of directives specified + in the MARS request. + Meteorological parameters can be specified by their + GRIB code (param=130), their mnemonic (param=t) or + full name (param=temperature). + The list of parameter should be seperated by a "/"-sign. + E.g. 130/131/133 + Default is an empty string. + + @Return: + <nothing> + ''' + + self.server = server + self.marsclass = marsclass + self.type = type + self.levtype = levtype + self.levelist = levelist + self.repres = repres + self.date = date + self.resol = resol + self.stream = stream + self.area = area + self.time = time + self.step = step + self.expver = expver + self.number = number + self.accuracy = accuracy + self.grid = grid + self.gaussian = gaussian + self.target = target + self.param = param + + return + + + def display_info(self): + ''' + @Description: + Prints all class attributes and their values. + + @Input: + self: instance of MarsRetrieval + For description see class documentation. + + @Return: + <nothing> + ''' + # Get all class attributes and their values as a dictionary + attrs = vars(self) + + # iterate through all attributes and print them + # with their corresponding values + for item in attrs.items(): + if item[0] in 'server': + pass + else: + print item[0] + ': ' + str(item[1]) + + return + + + def print_info(self): + ''' + ''' + # Get all class attributes and their values as a dictionary + attrs = vars(self) + + # open a file to store all requests to + with open(os.path.join(_config.PATH_RUN_DIR + os.path.sep + + _config.FILE_MARS_REQUESTS), 'a') as f: + f.write('mars\n') + # iterate through all attributes and print them + # with their corresponding values + for item in attrs.items(): + if item[0] in 'server': + pass + else: + f.write(item[0] + ': ' + str(item[1]) + '\n') + f.write('\n\n') + + return + + def data_retrieve(self): + ''' + @Description: + Submits a MARS retrieval. Depending on the existence of + ECMWF Web-API it is submitted via Python or a + subprocess in the Shell. The parameter for the mars retrieval + are taken from the defined class attributes. + + @Input: + self: instance of MarsRetrieval + For description see class documentation. + + @Return: + <nothing> + ''' + # Get all class attributes and their values as a dictionary + attrs = vars(self) + + # convert the dictionary of attributes into a comma + # seperated list of attributes with their values + # needed for the retrieval call + s = 'ret' + for k, v in attrs.iteritems(): + if k in 'server': + continue + if k == 'marsclass': + k = 'class' + if v == '': + continue + if k.lower() == 'target': + target = v + else: + s = s + ',' + k + '=' + str(v) + + # MARS request via Python script + if self.server is not False: + try: + self.server.execute(s, target) + except: + print('MARS Request failed, ' + 'have you already registered at apps.ecmwf.int?') + raise IOError + if os.stat(target).st_size == 0: + print('MARS Request returned no data - please check request') + raise IOError + # MARS request via extra process in shell + else: + s += ',target = "' + target + '"' + p = subprocess.Popen(['mars'], stdin=subprocess.PIPE, + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, bufsize=1) + pout = p.communicate(input=s)[0] + print pout.decode() + + if 'Some errors reported' in pout.decode(): + print('MARS Request failed - please check request') + raise IOError + + if os.stat(target).st_size == 0: + print('MARS Request returned no data - please check request') + raise IOError + + return diff --git a/python/pythontest/TestInstallTar/flex_extract_v7.1/python/UioFiles.py b/python/pythontest/TestInstallTar/flex_extract_v7.1/python/UioFiles.py new file mode 100644 index 0000000000000000000000000000000000000000..fe6995320308bfb88805745ac5753ffbdc9dd799 --- /dev/null +++ b/python/pythontest/TestInstallTar/flex_extract_v7.1/python/UioFiles.py @@ -0,0 +1,178 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +#******************************************************************************* +# @Author: Anne Fouilloux (University of Oslo) +# +# @Date: October 2014 +# +# @Change History: +# +# November 2015 - Leopold Haimberger (University of Vienna): +# - modified method list_files to work with glob instead of listdir +# - added pattern search in method list_files +# +# February 2018 - Anne Philipp (University of Vienna): +# - applied PEP8 style guide +# - added documentation +# - optimisation of method list_files since it didn't work correctly +# for sub directories +# - additional speed up of method list_files +# - modified the class so that it is initiated with a pattern instead +# of suffixes. Gives more precision in selection of files. +# +# @License: +# (C) Copyright 2014-2018. +# +# This software is licensed under the terms of the Apache Licence Version 2.0 +# which can be obtained at http://www.apache.org/licenses/LICENSE-2.0. +# +# @Class Decription: +# The class is for file manipulation. It is initiated with a regular +# expression pattern for this instance and can produce a list of Files +# from the given file pattern. These files can be deleted. +# +# @Class Content: +# - __init__ +# - __str__ +# - __list_files__ +# - delete_files +# +# @Class Attributes: +# - pattern +# - files +# +#******************************************************************************* + +# ------------------------------------------------------------------------------ +# MODULES +# ------------------------------------------------------------------------------ +import os +import fnmatch + +# software specific module from flex_extract +#import profiling +from tools import silent_remove, get_list_as_string + +# ------------------------------------------------------------------------------ +# CLASS +# ------------------------------------------------------------------------------ + +class UioFiles(object): + ''' + Class to manipulate files. At initialisation it has the attribute + pattern which stores a regular expression pattern for the files associated + with the instance of the class. + ''' + # -------------------------------------------------------------------------- + # CLASS FUNCTIONS + # -------------------------------------------------------------------------- + def __init__(self, path, pattern): + ''' + @Description: + Assignes a specific pattern for these files. + + @Input: + self: instance of UioFiles + Description see class documentation. + + path: string + Directory where to list the files. + + pattern: string + Regular expression pattern. For example: '*.grb' + + @Return: + <nothing> + ''' + + self.path = path + self.pattern = pattern + self.files = None + + self.__list_files__(self.path) + + return + + #@profiling.timefn + def __list_files__(self, path, callid=0): + ''' + @Description: + Lists all files in the directory with the matching + regular expression pattern. + + @Input: + self: instance of UioFiles + Description see class documentation. + + path: string + Path to the files. + + callid: integer + Id which tells the function if its the first call + or a recursive call. Default and first call is 0. + Everything different from 0 is ment to be a recursive case. + + @Return: + <nothing> + ''' + + # initialize variable in first function call + if callid == 0: + self.files = [] + + # Get the absolute path + path = os.path.abspath(path) + + # get the file list of the path if its not a directory and + # if it contains the pattern + self.files.extend([os.path.join(path, k) for k in os.listdir(path) + if fnmatch.fnmatch(k, self.pattern)]) + + # find possible sub-directories in the path + subdirs = [s for s in os.listdir(path) + if os.path.isdir(os.path.join(path, s))] + + # do recursive calls for sub-direcorties + if subdirs: + for subdir in subdirs: + self.__list_files__(os.path.join(path, subdir), callid=1) + + return + + def __str__(self): + ''' + @Description: + Converts the list of files into a single string. + The entries are sepereated by "," sign. + + @Input: + self: instance of UioFiles + Description see class documentation. + + @Return: + files_string: string + The content of the list as a single string. + ''' + + filenames = [os.path.basename(f) for f in self.files] + files_string = get_list_as_string(filenames, concatenate_sign=', ') + + return files_string + + def delete_files(self): + ''' + @Description: + Deletes the files. + + @Input: + self: instance of UioFiles + Description see class documentation. + + @Return: + <nothing> + ''' + + for old_file in self.files: + silent_remove(old_file) + + return diff --git a/python/pythontest/TestInstallTar/flex_extract_v7.1/python/_config.py b/python/pythontest/TestInstallTar/flex_extract_v7.1/python/_config.py new file mode 100644 index 0000000000000000000000000000000000000000..4c5751cc1b98855fb1fa9e7b76c7d4b82c2b6983 --- /dev/null +++ b/python/pythontest/TestInstallTar/flex_extract_v7.1/python/_config.py @@ -0,0 +1,85 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +#******************************************************************************* +# @Author: Anne Philipp (University of Vienna) +# +# @Date: August 2018 +# +# @Change History: +# +# @License: +# (C) Copyright 2014-2018. +# +# This software is licensed under the terms of the Apache Licence Version 2.0 +# which can be obtained at http://www.apache.org/licenses/LICENSE-2.0. +# +# @Description: +# Contains constant value parameter for flex_extract. +# +#******************************************************************************* + +# ------------------------------------------------------------------------------ +# MODULES +# ------------------------------------------------------------------------------ +import os +import sys +import inspect + +_VERSION_STR = '7.1' + +# ------------------------------------------------------------------------------ +# EXPLICIT FILENAMES +# ------------------------------------------------------------------------------ + +FLEXEXTRACT_DIRNAME = 'flex_extract_v' + _VERSION_STR +FILE_MARS_REQUESTS = 'mars_requests.dat' +FORTRAN_EXECUTABLE = 'CONVERT2' +FILE_USER_ENVVARS = 'ECMWF_ENV' +TEMPFILE_INSTALL_COMPILEJOB = 'compilejob.temp' +FILE_INSTALL_COMPILEJOB = 'compilejob.ksh' +TEMPFILE_INSTALL_JOB = 'job.temp.o' +TEMPFILE_JOB = 'job.temp' +FILE_JOB_OD = 'job.ksh' +FILE_JOB_OP = 'jopoper.ksh' + +# ------------------------------------------------------------------------------ +# EXPLICIT PATHES +# ------------------------------------------------------------------------------ + +# add path to pythonpath +PATH_LOCAL_PYTHON = os.path.dirname(os.path.abspath( + inspect.getfile(inspect.currentframe()))) +if PATH_LOCAL_PYTHON not in sys.path: + sys.path.append(PATH_LOCAL_PYTHON) + +PATH_FLEXEXTRACT_DIR = os.path.normpath(os.path.dirname(os.path.abspath( + inspect.getfile(inspect.currentframe()))) + '/../') + +PATH_RELATIVE_PYTHON = os.path.relpath(PATH_LOCAL_PYTHON, PATH_FLEXEXTRACT_DIR) + +PATH_TEMPLATES = os.path.join(PATH_FLEXEXTRACT_DIR + os.path.sep + + '_templates') + +PATH_RELATIVE_TEMPLATES = os.path.relpath(PATH_TEMPLATES, PATH_FLEXEXTRACT_DIR) + +# path to gribtable +PATH_GRIBTABLE = os.path.join(PATH_TEMPLATES + os.path.sep + + 'ecmwf_grib1_table_128') + +# path to run directory +PATH_RUN_DIR = os.path.join(PATH_FLEXEXTRACT_DIR + os.path.sep + + 'run') + +# path to directory where all control files are stored +PATH_CONTROLFILES = os.path.join(PATH_RUN_DIR + os.path.sep + + 'control') + +# path to directory where all control files are stored +PATH_JOBSCRIPTS = os.path.join(PATH_RUN_DIR + os.path.sep + + 'jobscripts') + +PATH_FORTRAN_SRC = os.path.join(PATH_FLEXEXTRACT_DIR + os.path.sep + + 'src') + +PATH_RELATIVE_FORTRAN_SRC = os.path.relpath(PATH_FORTRAN_SRC, PATH_FLEXEXTRACT_DIR) + diff --git a/python/pythontest/TestInstallTar/flex_extract_v7.1/python/disaggregation.py b/python/pythontest/TestInstallTar/flex_extract_v7.1/python/disaggregation.py new file mode 100644 index 0000000000000000000000000000000000000000..aa84eaf0c28110f6772ff3e7a48fd411082d52a4 --- /dev/null +++ b/python/pythontest/TestInstallTar/flex_extract_v7.1/python/disaggregation.py @@ -0,0 +1,141 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +#******************************************************************************* +# @Author: Anne Philipp (University of Vienna) +# +# @Date: March 2018 +# +# @Change History: +# +# November 2015 - Leopold Haimberger (University of Vienna): +# - migration of the methods dapoly and darain from Fortran +# (flex_extract_v6 and earlier) to Python +# +# April 2018 - Anne Philipp (University of Vienna): +# - applied PEP8 style guide +# - added structured documentation +# - outsourced the disaggregation functions dapoly and darain +# to a new module named disaggregation +# +# @License: +# (C) Copyright 2015-2018. +# +# This software is licensed under the terms of the Apache Licence Version 2.0 +# which can be obtained at http://www.apache.org/licenses/LICENSE-2.0. +# +# @Module Description: +# disaggregation of deaccumulated flux data from an ECMWF model FG field. +# Initially the flux data to be concerned are: +# - large-scale precipitation +# - convective precipitation +# - surface sensible heat flux +# - surface solar radiation +# - u stress +# - v stress +# Different versions of disaggregation is provided for rainfall +# data (darain, modified linear) and the surface fluxes and +# stress data (dapoly, cubic polynomial). +# +# @Module Content: +# - dapoly +# - darain +# +#******************************************************************************* + +# ------------------------------------------------------------------------------ +# MODULES +# ------------------------------------------------------------------------------ + +# ------------------------------------------------------------------------------ +# FUNCTIONS +# ------------------------------------------------------------------------------ +def dapoly(alist): + ''' + @Author: P. JAMES + + @Date: 2000-03-29 + + @ChangeHistory: + June 2003 - A. BECK (2003-06-01) + adaptaions + November 2015 - Leopold Haimberger (University of Vienna) + migration from Fortran to Python + + @Description: + Interpolation of deaccumulated fluxes of an ECMWF model FG field + using a cubic polynomial solution which conserves the integrals + of the fluxes within each timespan. + disaggregationregation is done for 4 accumluated timespans which generates + a new, disaggregated value which is output at the central point + of the 4 accumulation timespans. This new point is used for linear + interpolation of the complete timeseries afterwards. + + @Input: + alist: list of size 4, array(2D), type=float + List of 4 timespans as 2-dimensional, horizontal fields. + E.g. [[array_t1], [array_t2], [array_t3], [array_t4]] + + @Return: + nfield: array(2D), type=float + New field which replaces the field at the second position + of the accumulation timespans. + + ''' + pya = (alist[3] - alist[0] + 3. * (alist[1] - alist[2])) / 6. + pyb = (alist[2] + alist[0]) / 2. - alist[1] - 9. * pya / 2. + pyc = alist[1] - alist[0] - 7. * pya / 2. - 2. * pyb + pyd = alist[0] - pya / 4. - pyb / 3. - pyc / 2. + nfield = 8. * pya + 4. * pyb + 2. * pyc + pyd + + return nfield + + +def darain(alist): + ''' + @Author: P. JAMES + + @Date: 2000-03-29 + + @ChangeHistory: + June 2003 - A. BECK (2003-06-01) + adaptaions + November 2015 - Leopold Haimberger (University of Vienna) + migration from Fortran to Python + + @Description: + Interpolation of deaccumulated fluxes of an ECMWF model FG rainfall + field using a modified linear solution which conserves the integrals + of the fluxes within each timespan. + disaggregationregation is done for 4 accumluated timespans which generates + a new, disaggregated value which is output at the central point + of the 4 accumulation timespans. This new point is used for linear + interpolation of the complete timeseries afterwards. + + @Input: + alist: list of size 4, array(2D), type=float + List of 4 timespans as 2-dimensional, horizontal fields. + E.g. [[array_t1], [array_t2], [array_t3], [array_t4]] + + @Return: + nfield: array(2D), type=float + New field which replaces the field at the second position + of the accumulation timespans. + ''' + xa = alist[0] + xb = alist[1] + xc = alist[2] + xd = alist[3] + xa[xa < 0.] = 0. + xb[xb < 0.] = 0. + xc[xc < 0.] = 0. + xd[xd < 0.] = 0. + + xac = 0.5 * xb + mask = xa + xc > 0. + xac[mask] = xb[mask] * xc[mask] / (xa[mask] + xc[mask]) + xbd = 0.5 * xc + mask = xb + xd > 0. + xbd[mask] = xb[mask] * xc[mask] / (xb[mask] + xd[mask]) + nfield = xac + xbd + + return nfield diff --git a/python/pythontest/TestInstallTar/flex_extract_v7.1/python/get_mars_data.py b/python/pythontest/TestInstallTar/flex_extract_v7.1/python/get_mars_data.py new file mode 100755 index 0000000000000000000000000000000000000000..bf8e02f37a06234c23fe4ea81ff88738337a9e43 --- /dev/null +++ b/python/pythontest/TestInstallTar/flex_extract_v7.1/python/get_mars_data.py @@ -0,0 +1,301 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +#******************************************************************************* +# @Author: Anne Fouilloux (University of Oslo) +# +# @Date: October 2014 +# +# @Change History: +# +# November 2015 - Leopold Haimberger (University of Vienna): +# - moved the getEIdata program into a function "get_mars_data" +# - moved the AgurmentParser into a seperate function +# - adatpted the function for the use in flex_extract +# - renamed file to get_mars_data +# +# February 2018 - Anne Philipp (University of Vienna): +# - applied PEP8 style guide +# - added structured documentation +# - minor changes in programming style for consistence +# - added function main and moved function calls vom __main__ there +# (necessary for better documentation with docstrings for later +# online documentation) +# - use of UIFiles class for file selection and deletion +# +# +# @License: +# (C) Copyright 2014-2018. +# +# This software is licensed under the terms of the Apache Licence Version 2.0 +# which can be obtained at http://www.apache.org/licenses/LICENSE-2.0. +# +# @Program Functionality: +# This program can be used as a module in the whole flex_extract process +# or can be run by itself to just extract MARS data from ECMWF. To do so, +# a couple of necessary parameters has to be passed with the program call. +# See documentation for more details. +# +# @Program Content: +# - main +# - get_mars_data +# - do_retrievement +# +#******************************************************************************* + +# ------------------------------------------------------------------------------ +# MODULES +# ------------------------------------------------------------------------------ +import os +import sys +import datetime +import inspect +try: + ecapi = True + import ecmwfapi +except ImportError: + ecapi = False + +# software specific classes and modules from flex_extract +from tools import my_error, normal_exit, get_cmdline_arguments, read_ecenv +from EcFlexpart import EcFlexpart +from UioFiles import UioFiles + +# add path to pythonpath so that python finds its buddies +LOCAL_PYTHON_PATH = os.path.dirname(os.path.abspath( + inspect.getfile(inspect.currentframe()))) +if LOCAL_PYTHON_PATH not in sys.path: + sys.path.append(LOCAL_PYTHON_PATH) + +# ------------------------------------------------------------------------------ +# FUNCTION +# ------------------------------------------------------------------------------ +def main(): + ''' + @Description: + If get_mars_data is called from command line, this function controls + the program flow and calls the argumentparser function and + the get_mars_data function for retrieving EC data. + + @Input: + <nothing> + + @Return: + <nothing> + ''' + + args = get_cmdline_arguments() + + try: + c = ControlFile(args.controlfile) + except IOError: + try: + c = ControlFile(LOCAL_PYTHON_PATH + args.controlfile) + except IOError: + print 'Could not read CONTROL file "' + args.controlfile + '"' + print 'Either it does not exist or its syntax is wrong.' + print 'Try "' + sys.argv[0].split('/')[-1] + \ + ' -h" to print usage information' + sys.exit(1) + + env_parameter = read_ecenv(c.ecmwfdatadir + 'python/ECMWF_ENV') + c.assign_args_to_control(args, env_parameter) + c.assign_envs_to_control(env_parameter) + c.check_conditions() + + get_mars_data(c) + normal_exit(c.mailfail, 'Done!') + + return + +def get_mars_data(c): + ''' + @Description: + Retrieves the EC data needed for a FLEXPART simulation. + Start and end dates for retrieval period is set. Retrievals + are divided into smaller periods if necessary and datechunk parameter + is set. + + @Input: + c: instance of class ControlFile + Contains all the parameters of CONTROL file, which are e.g.: + DAY1(start_date), DAY2(end_date), DTIME, MAXSTEP, TYPE, TIME, + STEP, CLASS(marsclass), STREAM, NUMBER, EXPVER, GRID, LEFT, + LOWER, UPPER, RIGHT, LEVEL, LEVELIST, RESOL, GAUSS, ACCURACY, + OMEGA, OMEGADIFF, ETA, ETADIFF, DPDETA, SMOOTH, FORMAT, + ADDPAR, WRF, CWC, PREFIX, ECSTORAGE, ECTRANS, ECFSDIR, + MAILOPS, MAILFAIL, GRIB2FLEXPART, FLEXPARTDIR, BASETIME + DATE_CHUNK, DEBUG, INPUTDIR, OUTPUTDIR, FLEXPART_ROOT_SCRIPTS + + For more information about format and content of the parameter + see documentation. + + @Return: + <nothing> + ''' + + if not os.path.exists(c.inputdir): + os.makedirs(c.inputdir) + + if c.request == 0 or c.request == 2: + print("Retrieving EC data!") + elif c.request == 1: + print("Printing mars requests!") + + print("start date %s " % (c.start_date)) + print("end date %s " % (c.end_date)) + + if ecapi: + server = ecmwfapi.ECMWFService("mars") + else: + server = False + + c.ecapi = ecapi + print('Using ECMWF WebAPI: ' + str(c.ecapi)) + + # basetime geht rückwärts + + # if basetime 00 + # dann wird von 12 am vortag bis 00 am start tag geholt + # aber ohne 12 selbst sondern 12 + step + + # if basetime 12 + # dann wird von 00 + step bis 12 am start tag geholt + + # purer forecast wird vorwärts bestimmt. + # purer forecast mode ist dann wenn größer 24 stunden + # wie kann das noch festgestellt werden ???? + # nur FC und steps mehr als 24 ? + # die einzige problematik beim reinen forecast ist die benennung der files! + # also sobald es Tagesüberschneidungen gibt + # allerdings ist das relevant und ersichtlich an den NICHT FLUSS DATEN + + + # set start date of retrieval period + start = datetime.date(year=int(c.start_date[:4]), + month=int(c.start_date[4:6]), + day=int(c.start_date[6:])) + startm1 = start - datetime.timedelta(days=1) + + # set end date of retrieval period + end = datetime.date(year=int(c.end_date[:4]), + month=int(c.end_date[4:6]), + day=int(c.end_date[6:])) + + # set time period for one single retrieval + datechunk = datetime.timedelta(days=int(c.date_chunk)) + + if c.basetime == '00': + start = startm1 + + if c.basetime == '00' or c.basetime == '12': + # endp1 = end + datetime.timedelta(days=1) + endp1 = end + else: + # endp1 = end + datetime.timedelta(days=2) + endp1 = end + datetime.timedelta(days=1) + + # -------------- flux data ------------------------------------------------ + if c.request == 0 or c.request == 2: + print('... removing old flux content of ' + c.inputdir) + tobecleaned = UioFiles(c.inputdir, + '*_acc_*.' + str(os.getppid()) + '.*.grb') + tobecleaned.delete_files() + + # if forecast for maximum one day (upto 24h) are to be retrieved, + # collect accumulation data (flux data) + # with additional days in the beginning and at the end + # (used for complete disaggregation of original period) + if c.maxstep <= 24: + do_retrievement(c, server, startm1, endp1, datechunk, fluxes=True) + + # if forecast data longer than 24h are to be retrieved, + # collect accumulation data (flux data) + # with the exact start and end date + # (disaggregation will be done for the + # exact time period with boundary conditions) + else: + do_retrievement(c, server, start, end, datechunk, fluxes=True) + + # -------------- non flux data -------------------------------------------- + if c.request == 0 or c.request == 2: + print('... removing old non flux content of ' + c.inputdir) + tobecleaned = UioFiles(c.inputdir, + '*__*.' + str(os.getppid()) + '.*.grb') + tobecleaned.delete_files() + + do_retrievement(c, server, start, end, datechunk, fluxes=False) + + return + +def do_retrievement(c, server, start, end, delta_t, fluxes=False): + ''' + @Description: + Divides the complete retrieval period in smaller chunks and + retrieves the data from MARS. + + @Input: + c: instance of ControlFile + Contains all the parameters of CONTROL file, which are e.g.: + DAY1(start_date), DAY2(end_date), DTIME, MAXSTEP, TYPE, TIME, + STEP, CLASS(marsclass), STREAM, NUMBER, EXPVER, GRID, LEFT, + LOWER, UPPER, RIGHT, LEVEL, LEVELIST, RESOL, GAUSS, ACCURACY, + OMEGA, OMEGADIFF, ETA, ETADIFF, DPDETA, SMOOTH, FORMAT, + ADDPAR, WRF, CWC, PREFIX, ECSTORAGE, ECTRANS, ECFSDIR, + MAILOPS, MAILFAIL, GRIB2FLEXPART, FLEXPARTDIR, BASETIME + DATE_CHUNK, DEBUG, INPUTDIR, OUTPUTDIR, FLEXPART_ROOT_SCRIPTS + + For more information about format and content of the parameter + see documentation. + + server: instance of ECMWFService + The server connection to ECMWF + + start: instance of datetime + The start date of the retrieval. + + end: instance of datetime + The end date of the retrieval. + + delta_t: instance of datetime + Delta_t +1 is the maximal time period of a single + retrieval. + + fluxes: boolean, optional + Decides if the flux parameters are to be retrieved or + the rest of the parameter list. + Default value is False. + + @Return: + <nothing> + ''' + + # since actual day also counts as one day, + # we only need to add datechunk - 1 days to retrieval + # for a period + delta_t_m1 = delta_t - datetime.timedelta(days=1) + + day = start + while day <= end: + flexpart = EcFlexpart(c, fluxes) + tmpday = day + delta_t_m1 + if tmpday < end: + dates = day.strftime("%Y%m%d") + "/to/" + \ + tmpday.strftime("%Y%m%d") + else: + dates = day.strftime("%Y%m%d") + "/to/" + \ + end.strftime("%Y%m%d") + + + print("... retrieve " + dates + " in dir " + c.inputdir) + + try: + flexpart.retrieve(server, dates, c.request, c.inputdir) + except IOError: + my_error(c.mailfail, 'MARS request failed') + + day += delta_t + + return + +if __name__ == "__main__": + main() diff --git a/python/pythontest/TestInstallTar/flex_extract_v7.1/python/install.py b/python/pythontest/TestInstallTar/flex_extract_v7.1/python/install.py new file mode 100755 index 0000000000000000000000000000000000000000..ba994276e57bb367c2252c83cb9819bb0bb9e1c5 --- /dev/null +++ b/python/pythontest/TestInstallTar/flex_extract_v7.1/python/install.py @@ -0,0 +1,546 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +#******************************************************************************* +# @Author: Leopold Haimberger (University of Vienna) +# +# @Date: November 2015 +# +# @Change History: +# +# February 2018 - Anne Philipp (University of Vienna): +# - applied PEP8 style guide +# - added documentation +# - moved install_args_and_control in here +# +# @License: +# (C) Copyright 2015-2018. +# +# This software is licensed under the terms of the Apache Licence Version 2.0 +# which can be obtained at http://www.apache.org/licenses/LICENSE-2.0. +# +# @Program Functionality: +# Depending on the selected installation environment (locally or on the +# ECMWF server ecgate or cca) the program extracts the commandline +# arguments and the CONTROL file parameter and prepares the corresponding +# environment. The necessary files are collected in a tar-ball and placed +# at the target location. There its untared, the environment variables will +# be set and the Fortran code will be compiled. If the ECMWF environment is +# selected a job script is prepared and submitted for the remaining +# configurations after putting the tar-ball to the target ECMWF server. +# +# @Program Content: +# - main +# - get_install_cmdline_arguments +# - install_via_gateway +# - mk_tarball +# - un_tarball +# - mk_env_vars +# - mk_compilejob +# - mk_job_template +# - delete_convert_build +# - make_convert_build +# +#******************************************************************************* + +# ------------------------------------------------------------------------------ +# MODULES +# ------------------------------------------------------------------------------ +import os +import sys +import glob +import subprocess +import inspect +from argparse import ArgumentParser, ArgumentDefaultsHelpFormatter + +# software specific classes and modules from flex_extract +import _config +from ControlFile import ControlFile +from UioFiles import UioFiles +from tools import make_dir, put_file_to_ecserver, submit_job_to_ecserver + + +# ------------------------------------------------------------------------------ +# FUNCTIONS +# ------------------------------------------------------------------------------ +def main(): + ''' + @Description: + Controls the installation process. Calls the installation function + if target is specified. + + @Intput: + <nothing> + + @Return: + <nothing> + ''' + + os.chdir(_config.PATH_LOCAL_PYTHON) + args = get_install_cmdline_arguments() + + try: + c = ControlFile(args.controlfile) + except IOError: + print 'Could not read CONTROL file "' + args.controlfile + '"' + print 'Either it does not exist or its syntax is wrong.' + print 'Try "' + sys.argv[0].split('/')[-1] + \ + ' -h" to print usage information' + exit(1) + + c.assign_args_to_control(args) + c.check_install_conditions() + + install_via_gateway(c) + + return + +def get_install_cmdline_arguments(): + ''' + @Description: + Decomposes the command line arguments and assigns them to variables. + Apply default values for non mentioned arguments. + + @Input: + <nothing> + + @Return: + args: instance of ArgumentParser + Contains the commandline arguments from script/program call. + ''' + parser = ArgumentParser(description='Install flex_extract software locally or \ + on ECMWF machines', + formatter_class=ArgumentDefaultsHelpFormatter) + + parser.add_argument('--target', dest='install_target', default=None, + help="Valid targets: local | ecgate | cca , \ + the latter two are at ECMWF") + parser.add_argument("--makefile", dest="makefile", default=None, + help='Name of Makefile to use for compiling CONVERT2') + parser.add_argument("--ecuid", dest="ecuid", default=None, + help='user id at ECMWF') + parser.add_argument("--ecgid", dest="ecgid", default=None, + help='group id at ECMWF') + parser.add_argument("--gateway", dest="gateway", default=None, + help='name of local gateway server') + parser.add_argument("--destination", dest="destination", default=None, + help='ecaccess destination, e.g. leo@genericSftp') + + parser.add_argument("--flexpart_root_scripts", dest="flexpart_root_scripts", + default=None, help="FLEXPART root directory on ECMWF \ + servers (to find grib2flexpart and COMMAND file)\n\ + Normally flex_extract resides in the scripts directory \ + of the FLEXPART distribution, thus the:") + + # arguments for job submission to ECMWF, only needed by submit.py + parser.add_argument("--job_template", dest='job_template', + default="job.temp.o", + help="job template file for submission to ECMWF") + + parser.add_argument("--controlfile", dest="controlfile", + default='CONTROL.temp', + help="file with CONTROL parameters") + + args = parser.parse_args() + + return args + + +def install_via_gateway(c): + ''' + @Description: + Perform the actual installation on local machine or prepare data + transfer to remote gate and submit a job script which will + install everything on the remote gate. + + @Input: + c: instance of class ControlFile + Contains all necessary information of a CONTROL file. The parameters + are: DAY1, DAY2, DTIME, MAXSTEP, TYPE, TIME, STEP, CLASS, STREAM, + NUMBER, EXPVER, GRID, LEFT, LOWER, UPPER, RIGHT, LEVEL, LEVELIST, + RESOL, GAUSS, ACCURACY, OMEGA, OMEGADIFF, ETA, ETADIFF, DPDETA, + SMOOTH, FORMAT, ADDPAR, WRF, CWC, PREFIX, ECSTORAGE, ECTRANS, + ECFSDIR, MAILOPS, MAILFAIL, GRIB2FLEXPART, FLEXPARTDIR + For more information about format and content of the parameter see + documentation. + + @Return: + <nothing> + ''' + import tarfile + + ecd = _config.PATH_FLEXEXTRACT_DIR + tarball_name = _config.FLEXEXTRACT_DIRNAME + '.tar' + tar_file = os.path.join(ecd + os.path.sep + tarball_name) + + target_dirname = _config.FLEXEXTRACT_DIRNAME + fortran_executable = _config.FORTRAN_EXECUTABLE + + if c.install_target.lower() != 'local': # ecgate or cca + + mk_compilejob(c.makefile, c.install_target, c.ecuid, c.ecgid, + c.flexpart_root_scripts) + + mk_job_template(c.ecuid, c.ecgid, c.gateway, + c.destination, c.flexpart_root_scripts) + + mk_env_vars(c.ecuid, c.ecgid, c.gateway, c.destination) + + mk_tarball(tar_file) + + put_file_to_ecserver(ecd, tarball_name, c.install_target, + c.ecuid, c.ecgid) + + submit_job_to_ecserver(c.install_target, + os.path.join(_config.PATH_JOBSCRIPTS + + os.path.sep + + _config.FILE_INSTALL_COMPILEJOB)) + + print('job compilation script has been submitted to ecgate for ' + + 'installation in ' + c.flexpart_root_scripts + + '/' + target_dirname) + print('You should get an email with subject "flexcompile" within ' + + 'the next few minutes!') + + else: #local + if not c.flexpart_root_scripts or c.flexpart_root_scripts == '../': + #install_dir = c.flexpart_root_scripts + print('WARNING: FLEXPART_ROOT_SCRIPTS has not been specified') + print('There will be only the compilation of the Fortran program' + + ' in ' + _config.PATH_FORTRAN_SRC) + os.chdir(_config.PATH_FORTRAN_SRC) + else: # creates the target working directory for flex_extract + c.flexpart_root_scripts = os.path.expandvars(os.path.expanduser( + c.flexpart_root_scripts)) + if os.path.abspath(ecd) != os.path.abspath(c.flexpart_root_scripts): + mk_tarball(tar_file) + make_dir(os.path.join(c.flexpart_root_scripts + os.path.sep + + target_dirname)) + os.chdir(os.path.join(c.flexpart_root_scripts + os.path.sep + + target_dirname)) + un_tarball(tar_file) + os.chdir(os.path.join(c.flexpart_root_scripts + os.path.sep + + target_dirname + os.path.sep + + _config.PATH_RELATIVE_FORTRAN_SRC)) + + # Create Fortran executable - CONVERT2 + print('Install ' + target_dirname + ' software at ' + + c.install_target + ' in directory ' + + os.path.abspath(c.flexpart_root_scripts) + '\n') + + delete_convert_build('.') + make_convert_build('.', c.makefile) + + os.chdir(ecd) + if os.path.isfile(tar_file): + os.remove(tar_file) + + return + +def mk_tarball(tarball_path): + ''' + @Description: + Creates a tarball with all necessary files which need to be sent to the + installation directory. + It does not matter if this is local or remote. + Collects all python files, the Fortran source and makefiles, + the ECMWF_ENV file, the CONTROL files as well as the + template files. + + @Input: + tarball_path: string + The complete path to the tar file which will contain all + relevant data for flex_extract. + + @Return: + <nothing> + ''' + import tarfile + from glob import glob + + print('Create tarball ...') + + # change to FLEXEXTRACT directory so that the tar can contain + # relative pathes to the files and directories + ecd = _config.PATH_FLEXEXTRACT_DIR + '/' + os.chdir(ecd) + + # get lists of the files to be added to the tar file + ECMWF_ENV_FILE = [os.path.join(_config.PATH_RELATIVE_PYTHON + + os.path.sep + _config.FILE_USER_ENVVARS)] + pyfiles = [os.path.relpath(x,ecd) + for x in glob(_config.PATH_LOCAL_PYTHON + + os.path.sep + '*py')] + controlfiles = [os.path.relpath(x,ecd) + for x in glob(_config.PATH_CONTROLFILES + + os.path.sep + 'CONTROL*')] + tempfiles = [os.path.relpath(x,ecd) + for x in glob(_config.PATH_TEMPLATES)] + ffiles = [os.path.relpath(x,ecd) + for x in glob(_config.PATH_FORTRAN_SRC + + os.path.sep + '*.f*')] + hfiles = [os.path.relpath(x,ecd) + for x in glob(_config.PATH_FORTRAN_SRC + + os.path.sep + '*.h')] + makefiles = [os.path.relpath(x,ecd) + for x in glob(_config.PATH_FORTRAN_SRC + + os.path.sep + 'Makefile*')] + + # concatenate single lists to one for a better looping + filelist = pyfiles + controlfiles + tempfiles + ffiles + hfiles + \ + makefiles + ECMWF_ENV_FILE + + # create installation tar-file + try: + with tarfile.open(tarball_path, "w:gz") as tar_handle: + for file in filelist: + tar_handle.add(file) + + except subprocess.CalledProcessError as e: + print('... ERROR CODE:\n ... ' + str(e.returncode)) + print('... ERROR MESSAGE:\n ... ' + str(e)) + + sys.exit('... could not make installation tar ball!') + + return + + +def un_tarball(tarball_path): + ''' + @Description: + Extracts the given tarball into current directory. + + @Input: + tarball_path: string + The complete path to the tar file which will contain all + relevant data for flex_extract. + + @Return: + <nothing> + ''' + import tarfile + + print('Untar ...') + + with tarfile.open(tarball_path) as tar_handle: + tar_handle.extractall() + + return + +def mk_env_vars(ecuid, ecgid, gateway, destination): + ''' + @Description: + Creates a file named ECMWF_ENV which contains the + necessary environmental variables at ECMWF servers. + + @Input: + ecuid: string + The user id on ECMWF server. + + ecgid: string + The group id on ECMWF server. + + gateway: string + The gateway server the user is using. + + destination: string + The remote destination which is used to transfer files + from ECMWF server to local gateway server. + + @Return: + <nothing> + ''' + + with open(os.path.join(_config.PATH_LOCAL_PYTHON + os.path.sep + + _config.FILE_USER_ENVVARS), 'w') as fo: + fo.write('ECUID ' + ecuid + '\n') + fo.write('ECGID ' + ecgid + '\n') + fo.write('GATEWAY ' + gateway + '\n') + fo.write('DESTINATION ' + destination + '\n') + + return + +def mk_compilejob(makefile, target, ecuid, ecgid, fp_root): + ''' + @Description: + Modifies the original job template file so that it is specified + for the user and the environment were it will be applied. Result + is stored in a new file "job.temp" in the python directory. + + @Input: + makefile: string + Name of the makefile which should be used to compile FORTRAN + CONVERT2 program. + + target: string + The target where the installation should be done, e.g. the queue. + + ecuid: string + The user id on ECMWF server. + + ecgid: string + The group id on ECMWF server. + + fp_root: string + Path to the root directory of FLEXPART environment or flex_extract + environment. + + @Return: + <nothing> + ''' + + template = os.path.join(_config.PATH_TEMPLATES + os.path.sep + + _config.TEMPFILE_INSTALL_COMPILEJOB) + with open(template) as f: + fdata = f.read().split('\n') + + compilejob = os.path.join(_config.PATH_JOBSCRIPTS + os.path.sep + + _config.FILE_INSTALL_COMPILEJOB) + with open(compilejob, 'w') as fo: + for data in fdata: + if 'MAKEFILE=' in data: + data = 'export MAKEFILE=' + makefile + elif 'FLEXPART_ROOT_SCRIPTS=' in data: + if fp_root != '../': + data = 'export FLEXPART_ROOT_SCRIPTS=' + fp_root + else: + data = 'export FLEXPART_ROOT_SCRIPTS=$HOME' + elif target.lower() != 'local': + if '--workdir' in data: + data = '#SBATCH --workdir=/scratch/ms/' + \ + ecgid + '/' + ecuid + elif '##PBS -o' in data: + data = '##PBS -o /scratch/ms/' + ecgid + '/' + ecuid + \ + 'flex_ecmwf.$Jobname.$Job_ID.out' + elif 'FLEXPART_ROOT_SCRIPTS=' in data: + if fp_root != '../': + data = 'export FLEXPART_ROOT_SCRIPTS=' + fp_root + else: + data = 'export FLEXPART_ROOT_SCRIPTS=$HOME' + fo.write(data + '\n') + + return + +def mk_job_template(ecuid, ecgid, gateway, destination, fp_root): + ''' + @Description: + Modifies the original job template file so that it is specified + for the user and the environment were it will be applied. Result + is stored in a new file "job.temp" in the python directory. + + @Input: + ecuid: string + The user id on ECMWF server. + + ecgid: string + The group id on ECMWF server. + + gateway: string + The gateway server the user is using. + + destination: string + The remote destination which is used to transfer files + from ECMWF server to local gateway server. + + fp_root: string + Path to the root directory of FLEXPART environment or flex_extract + environment. + + @Return: + <nothing> + ''' + ec_python_rel_path = _config.FLEXEXTRACT_DIRNAME + '/' + \ + _config.PATH_RELATIVE_PYTHON + + template = os.path.join(_config.PATH_TEMPLATES + os.path.sep + + _config.TEMPFILE_INSTALL_JOB) + with open(template) as f: + fdata = f.read().split('\n') + + jobfile_temp = os.path.join(_config.PATH_JOBSCRIPTS + os.path.sep + + _config.TEMPFILE_JOB) + with open(jobfile_temp, 'w') as fo: + for data in fdata: + if '--workdir' in data: + data = '#SBATCH --workdir=/scratch/ms/' + ecgid + \ + '/' + ecuid + elif '##PBS -o' in data: + data = '##PBS -o /scratch/ms/' + ecgid + '/' + \ + ecuid + 'flex_ecmwf.$Jobname.$Job_ID.out' + elif 'export PATH=${PATH}:' in data: + data += fp_root + '/' + ec_python_rel_path + + fo.write(data + '\n') + return + +def delete_convert_build(src_path): + ''' + @Description: + Clean up the Fortran source directory and remove all + build files (e.g. *.o, *.mod and CONVERT2) + + @Input: + src_path: string + Path to the fortran source directory. + + @Return: + <nothing> + ''' + + modfiles = UioFiles(src_path, '*.mod') + objfiles = UioFiles(src_path, '*.o') + exefile = UioFiles(src_path, _config.FORTRAN_EXECUTABLE) + + modfiles.delete_files() + objfiles.delete_files() + exefile.delete_files() + + return + +def make_convert_build(src_path, makefile): + ''' + @Description: + Compiles the Fortran code and generates the executable. + + @Input: + src_path: string + Path to the fortran source directory. + + makefile: string + The name of the makefile which should be used. + + @Return: + <nothing> + ''' + + try: + print('Using makefile: ' + makefile) + p = subprocess.Popen(['make', '-f', + os.path.join(src_path + os.path.sep + makefile)], + stdin=subprocess.PIPE, + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + bufsize=1) + pout, perr = p.communicate() + print(pout) + if p.returncode != 0: + print(perr) + print('Please edit ' + makefile + + ' or try another Makefile in the src directory.') + print('Most likely GRIB_API_INCLUDE_DIR, GRIB_API_LIB ' + 'and EMOSLIB must be adapted.') + print('Available Makefiles:') + print(UioFiles(src_path, 'Makefile*')) + sys.exit('Compilation failed!') + except ValueError as e: + print('ERROR: Makefile call failed:') + print(e) + else: + subprocess.check_call(['ls', '-l', + os.path.join(src_path + os.path.sep + + _config.FORTRAN_EXECUTABLE)]) + + return + + +if __name__ == "__main__": + main() diff --git a/python/pythontest/TestInstallTar/flex_extract_v7.1/python/plot_retrieved.py b/python/pythontest/TestInstallTar/flex_extract_v7.1/python/plot_retrieved.py new file mode 100755 index 0000000000000000000000000000000000000000..45e7bb2e7783cfe0644b1fe2eb4c296d1bf75fe2 --- /dev/null +++ b/python/pythontest/TestInstallTar/flex_extract_v7.1/python/plot_retrieved.py @@ -0,0 +1,675 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +#******************************************************************************* +# @Author: Leopold Haimberger (University of Vienna) +# +# @Date: November 2015 +# +# @Change History: +# +# February 2018 - Anne Philipp (University of Vienna): +# - applied PEP8 style guide +# - added documentation +# - created function main and moved the two function calls for +# arguments and plotting into it +# - added function get_basics to extract the boundary conditions +# of the data fields from the first grib file it gets. +# +# @License: +# (C) Copyright 2015-2018. +# +# This software is licensed under the terms of the Apache Licence Version 2.0 +# which can be obtained at http://www.apache.org/licenses/LICENSE-2.0. +# +# @Program Functionality: +# Simple tool for creating maps and time series of retrieved fields. +# +# @Program Content: +# - main +# - get_basics +# - get_files_per_date +# - plot_retrieved +# - plot_timeseries +# - plot_map +# - get_plot_args +# +#******************************************************************************* + +# ------------------------------------------------------------------------------ +# MODULES +# ------------------------------------------------------------------------------ +import time +import datetime +import os +import inspect +import sys +from argparse import ArgumentParser, ArgumentDefaultsHelpFormatter +import matplotlib +import matplotlib.pyplot as plt +from mpl_toolkits.basemap import Basemap +from eccodes import codes_grib_new_from_file, codes_get, codes_release, \ + codes_get_values +import numpy as np + +# software specific classes and modules from flex_extract +from ControlFile import ControlFile +from UioFiles import UioFiles + +# add path to pythonpath so that python finds its buddies +LOCAL_PYTHON_PATH = os.path.dirname(os.path.abspath( + inspect.getfile(inspect.currentframe()))) +if LOCAL_PYTHON_PATH not in sys.path: + sys.path.append(LOCAL_PYTHON_PATH) + +font = {'family': 'monospace', 'size': 12} +matplotlib.rcParams['xtick.major.pad'] = '20' +matplotlib.rc('font', **font) +# ------------------------------------------------------------------------------ +# FUNCTION +# ------------------------------------------------------------------------------ +def main(): + ''' + @Description: + If plot_retrieved is called from command line, this function controls + the program flow and calls the argumentparser function and + the plot_retrieved function for plotting the retrieved GRIB data. + + @Input: + <nothing> + + @Return: + <nothing> + ''' + args, c = get_plot_args() + plot_retrieved(c) + + return + +def get_basics(ifile, verb=False): + """ + @Description: + An example grib file will be opened and basic information will + be extracted. These information are important for later use and the + initialization of numpy arrays for data storing. + + @Input: + ifile: string + Contains the full absolute path to the ECMWF grib file. + + verb (opt): bool + Is True if there should be extra output in verbose mode. + Default value is False. + + @Return: + data: dict + Contains basic informations of the ECMWF grib files, e.g. + 'Ni', 'Nj', 'latitudeOfFirstGridPointInDegrees', + 'longitudeOfFirstGridPointInDegrees', + 'latitudeOfLastGridPointInDegrees', + 'longitudeOfLastGridPointInDegrees', + 'jDirectionIncrementInDegrees', + 'iDirectionIncrementInDegrees' + """ + + data = {} + + # --- open file --- + print("Opening file for getting information data --- %s" % + os.path.basename(ifile)) + + with open(ifile) as f: + + # load first message from file + gid = codes_grib_new_from_file(f) + + # information needed from grib message + keys = ['Ni', + 'Nj', + 'latitudeOfFirstGridPointInDegrees', + 'longitudeOfFirstGridPointInDegrees', + 'latitudeOfLastGridPointInDegrees', + 'longitudeOfLastGridPointInDegrees', + 'jDirectionIncrementInDegrees', + 'iDirectionIncrementInDegrees'] + + if verb: + print '\nInformations are: ' + for key in keys: + # Get the value of the key in a grib message. + data[key] = codes_get(gid, key) + if verb: + print "%s = %s" % (key, data[key]) + if verb: + print '\n' + + # Free the memory for the message referred as gribid. + codes_release(gid) + + return data + +def get_files_per_date(files, datelist): + ''' + @Description: + The filenames contain dates which are used to select a list + of files for a specific time period specified in datelist. + + @Input: + files: instance of UioFiles + For description see class documentation. + It contains the attribute "files" which is a list of pathes + to filenames. + + datelist: list of datetimes + Contains the list of dates which should be processed for plotting. + + @Return: + filelist: list of strings + Contains the selected files for the time period. + ''' + + filelist = [] + for filename in files: + filedate = filename[-8:] + ddate = datetime.datetime.strptime(filedate, '%y%m%d%H') + if ddate in datelist: + filelist.append(filename) + + return filelist + +def plot_retrieved(c): + ''' + @Description: + Reads GRIB data from a specified time period, a list of levels + and a specified list of parameter. + + @Input: + c: instance of class ControlFile + Contains all necessary information of a CONTROL file. The parameters + are: DAY1, DAY2, DTIME, MAXSTEP, TYPE, TIME, STEP, CLASS, STREAM, + NUMBER, EXPVER, GRID, LEFT, LOWER, UPPER, RIGHT, LEVEL, LEVELIST, + RESOL, GAUSS, ACCURACY, OMEGA, OMEGADIFF, ETA, ETADIFF, DPDETA, + SMOOTH, FORMAT, ADDPAR, WRF, CWC, PREFIX, ECSTORAGE, ECTRANS, + ECFSDIR, MAILOPS, MAILFAIL, GRIB2FLEXPART, DEBUG, INPUTDIR, + OUTPUTDIR, FLEXPART_ROOT_SCRIPTS + For more information about format and content of the parameter see + documentation. + + @Return: + <nothing> + ''' + start = datetime.datetime.strptime(c.start_date, '%Y%m%d%H') + end = datetime.datetime.strptime(c.end_date, '%Y%m%d%H') + + # create datelist between start and end date + datelist = [start] # initialise datelist with first date + run_date = start + while run_date < end: + run_date += datetime.timedelta(hours=int(c.dtime)) + datelist.append(run_date) + + print 'datelist: ', datelist + + c.paramIds = np.asarray(c.paramIds, dtype='int') + c.levels = np.asarray(c.levels, dtype='int') + c.area = np.asarray(c.area) + + files = UioFiles(c.inputdir, c.prefix+'*') + ifiles = get_files_per_date(files.files, datelist) + ifiles.sort() + + gdict = get_basics(ifiles[0], verb=False) + + fdict = dict() + fmeta = dict() + fstamp = dict() + for p in c.paramIds: + for l in c.levels: + key = '{:0>3}_{:0>3}'.format(p, l) + fdict[key] = [] + fmeta[key] = [] + fstamp[key] = [] + + for filename in ifiles: + f = open(filename) + print "Opening file for reading data --- %s" % filename + fdate = datetime.datetime.strptime(filename[-8:], "%y%m%d%H") + + # Load in memory a grib message from a file. + gid = codes_grib_new_from_file(f) + while gid is not None: + gtype = codes_get(gid, 'type') + paramId = codes_get(gid, 'paramId') + parameterName = codes_get(gid, 'parameterName') + level = codes_get(gid, 'level') + + if paramId in c.paramIds and level in c.levels: + key = '{:0>3}_{:0>3}'.format(paramId, level) + print 'key: ', key + if fstamp[key]: + for i in range(len(fstamp[key])): + if fdate < fstamp[key][i]: + fstamp[key].insert(i, fdate) + fmeta[key].insert(i, [paramId, parameterName, gtype, + fdate, level]) + fdict[key].insert(i, np.flipud(np.reshape( + codes_get_values(gid), + [gdict['Nj'], gdict['Ni']]))) + break + elif fdate > fstamp[key][i] and i == len(fstamp[key])-1: + fstamp[key].append(fdate) + fmeta[key].append([paramId, parameterName, gtype, + fdate, level]) + fdict[key].append(np.flipud(np.reshape( + codes_get_values(gid), + [gdict['Nj'], gdict['Ni']]))) + break + elif fdate > fstamp[key][i] and i != len(fstamp[key])-1 \ + and fdate < fstamp[key][i+1]: + fstamp[key].insert(i, fdate) + fmeta[key].insert(i, [paramId, parameterName, gtype, + fdate, level]) + fdict[key].insert(i, np.flipud(np.reshape( + codes_get_values(gid), + [gdict['Nj'], gdict['Ni']]))) + break + else: + pass + else: + fstamp[key].append(fdate) + fmeta[key].append((paramId, parameterName, gtype, + fdate, level)) + fdict[key].append(np.flipud(np.reshape( + codes_get_values(gid), [gdict['Nj'], gdict['Ni']]))) + + codes_release(gid) + + # Load in memory a grib message from a file. + gid = codes_grib_new_from_file(f) + + f.close() + + for k in fdict.iterkeys(): + print 'fmeta: ', len(fmeta), fmeta + fml = fmeta[k] + fdl = fdict[k] + print 'fm1: ', len(fml), fml + for fd, fm in zip(fdl, fml): + print fm + ftitle = fm[1] + ' {} '.format(fm[-1]) + \ + datetime.datetime.strftime(fm[3], '%Y%m%d%H') + pname = '_'.join(fm[1].split()) + '_{}_'.format(fm[-1]) + \ + datetime.datetime.strftime(fm[3], '%Y%m%d%H') + plot_map(c, fd, fm, gdict, ftitle, pname, 'png') + + for k in fdict.iterkeys(): + fml = fmeta[k] + fdl = fdict[k] + fsl = fstamp[k] + if fdl: + fm = fml[0] + fd = fdl[0] + ftitle = fm[1] + ' {} '.format(fm[-1]) + \ + datetime.datetime.strftime(fm[3], '%Y%m%d%H') + pname = '_'.join(fm[1].split()) + '_{}_'.format(fm[-1]) + \ + datetime.datetime.strftime(fm[3], '%Y%m%d%H') + lat = -20. + lon = 20. + plot_timeseries(c, fdl, fml, fsl, lat, lon, gdict, + ftitle, pname, 'png') + + return + +def plot_timeseries(c, flist, fmetalist, ftimestamps, lat, lon, + gdict, ftitle, filename, fending, show=False): + ''' + @Description: + Creates a timeseries plot for a given lat/lon position. + + @Input: + c: instance of class ControlFile + Contains all necessary information of a CONTROL file. The parameters + are: DAY1, DAY2, DTIME, MAXSTEP, TYPE, TIME, STEP, CLASS, STREAM, + NUMBER, EXPVER, GRID, LEFT, LOWER, UPPER, RIGHT, LEVEL, LEVELIST, + RESOL, GAUSS, ACCURACY, OMEGA, OMEGADIFF, ETA, ETADIFF, DPDETA, + SMOOTH, FORMAT, ADDPAR, WRF, CWC, PREFIX, ECSTORAGE, ECTRANS, + ECFSDIR, MAILOPS, MAILFAIL, GRIB2FLEXPART, DEBUG, INPUTDIR, + OUTPUTDIR, FLEXPART_ROOT_SCRIPTS + For more information about format and content of the parameter see + documentation. + + flist: numpy array, 2d + The actual data values to be plotted from the grib messages. + + fmetalist: list of strings + Contains some meta date for the data field to be plotted: + parameter id, parameter Name, grid type, datetime, level + + ftimestamps: list of datetime + Contains the time stamps. + + lat: float + The latitude for which the timeseries should be plotted. + + lon: float + The longitude for which the timeseries should be plotted. + + gdict: dict + Contains basic informations of the ECMWF grib files, e.g. + 'Ni', 'Nj', 'latitudeOfFirstGridPointInDegrees', + 'longitudeOfFirstGridPointInDegrees', + 'latitudeOfLastGridPointInDegrees', + 'longitudeOfLastGridPointInDegrees', + 'jDirectionIncrementInDegrees', + 'iDirectionIncrementInDegrees' + + ftitle: string + The title of the timeseries. + + filename: string + The time series is stored in a file with this name. + + fending: string + Contains the type of plot, e.g. pdf or png + + show: boolean + Decides if the plot is shown after plotting or hidden. + + @Return: + <nothing> + ''' + print 'plotting timeseries' + + t1 = time.time() + + #llx = gdict['longitudeOfFirstGridPointInDegrees'] + #if llx > 180. : + # llx -= 360. + #lly = gdict['latitudeOfLastGridPointInDegrees'] + #dxout = gdict['iDirectionIncrementInDegrees'] + #dyout = gdict['jDirectionIncrementInDegrees'] + #urx = gdict['longitudeOfLastGridPointInDegrees'] + #ury = gdict['latitudeOfFirstGridPointInDegrees'] + #numxgrid = gdict['Ni'] + #numygrid = gdict['Nj'] + + farr = np.asarray(flist) + #(time, lat, lon) + + #lonindex = linspace(llx, urx, numxgrid) + #latindex = linspace(lly, ury, numygrid) + + ts = farr[:, 0, 0] + + fig = plt.figure(figsize=(12, 6.7)) + + plt.plot(ftimestamps, ts) + plt.title(ftitle) + + plt.savefig(c.outputdir + '/' + filename + '_TS.' + fending, + facecolor=fig.get_facecolor(), + edgecolor='none', + format=fending) + print 'created ', c.outputdir + '/' + filename + if show: + plt.show() + fig.clf() + plt.close(fig) + + print time.time() - t1, 's' + + return + +def plot_map(c, flist, fmetalist, gdict, ftitle, filename, fending, show=False): + ''' + @Description: + Creates a basemap plot with imshow for a given data field. + + @Input: + c: instance of class ControlFile + Contains all necessary information of a CONTROL file. The parameters + are: DAY1, DAY2, DTIME, MAXSTEP, TYPE, TIME, STEP, CLASS, STREAM, + NUMBER, EXPVER, GRID, LEFT, LOWER, UPPER, RIGHT, LEVEL, LEVELIST, + RESOL, GAUSS, ACCURACY, OMEGA, OMEGADIFF, ETA, ETADIFF, DPDETA, + SMOOTH, FORMAT, ADDPAR, WRF, CWC, PREFIX, ECSTORAGE, ECTRANS, + ECFSDIR, MAILOPS, MAILFAIL, GRIB2FLEXPART, DEBUG, INPUTDIR, + OUTPUTDIR, FLEXPART_ROOT_SCRIPTS + For more information about format and content of the parameter see + documentation. + + flist: numpy array, 2d + The actual data values to be plotted from the grib messages. + + fmetalist: list of strings + Contains some meta date for the data field to be plotted: + parameter id, parameter Name, grid type, datetime, level + + gdict: dict + Contains basic informations of the ECMWF grib files, e.g. + 'Ni', 'Nj', 'latitudeOfFirstGridPointInDegrees', + 'longitudeOfFirstGridPointInDegrees', + 'latitudeOfLastGridPointInDegrees', + 'longitudeOfLastGridPointInDegrees', + 'jDirectionIncrementInDegrees', + 'iDirectionIncrementInDegrees' + + ftitle: string + The titel of the plot. + + filename: string + The plot is stored in a file with this name. + + fending: string + Contains the type of plot, e.g. pdf or png + + show: boolean + Decides if the plot is shown after plotting or hidden. + + @Return: + <nothing> + ''' + print 'plotting map' + + t1 = time.time() + + fig = plt.figure(figsize=(12, 6.7)) + #mbaxes = fig.add_axes([0.05, 0.15, 0.8, 0.7]) + + llx = gdict['longitudeOfFirstGridPointInDegrees'] #- 360 + if llx > 180.: + llx -= 360. + lly = gdict['latitudeOfLastGridPointInDegrees'] + #dxout = gdict['iDirectionIncrementInDegrees'] + #dyout = gdict['jDirectionIncrementInDegrees'] + urx = gdict['longitudeOfLastGridPointInDegrees'] + ury = gdict['latitudeOfFirstGridPointInDegrees'] + #numxgrid = gdict['Ni'] + #numygrid = gdict['Nj'] + + m = Basemap(projection='cyl', llcrnrlon=llx, llcrnrlat=lly, + urcrnrlon=urx, urcrnrlat=ury, resolution='i') + + #lw = 0.5 + m.drawmapboundary() + #x = linspace(llx, urx, numxgrid) + #y = linspace(lly, ury, numygrid) + + #xx, yy = m(*meshgrid(x, y)) + + #s = m.contourf(xx, yy, flist) + + s = plt.imshow(flist.T, + extent=(llx, urx, lly, ury), + alpha=1.0, + interpolation='nearest' + #vmin=vn, + #vmax=vx, + #cmap=my_cmap, + #levels=levels, + #cmap=my_cmap, + #norm=LogNorm(vn,vx) + ) + + plt.title(ftitle, y=1.08) + cb = m.colorbar(s, location="right", pad="10%") + cb.set_label('label', size=14) + + thickline = np.arange(lly, ury+1, 10.) + thinline = np.arange(lly, ury+1, 5.) + m.drawparallels(thickline, + color='gray', + dashes=[1, 1], + linewidth=0.5, + labels=[1, 1, 1, 1], + xoffset=1.) + m.drawparallels(np.setdiff1d(thinline, thickline), + color='lightgray', + dashes=[1, 1], + linewidth=0.5, + labels=[0, 0, 0, 0]) + + thickline = np.arange(llx, urx+1, 10.) + thinline = np.arange(llx, urx+1, 5.) + m.drawmeridians(thickline, + color='gray', + dashes=[1, 1], + linewidth=0.5, + labels=[1, 1, 1, 1], + yoffset=1.) + m.drawmeridians(np.setdiff1d(thinline, thickline), + color='lightgray', + dashes=[1, 1], + linewidth=0.5, + labels=[0, 0, 0, 0]) + + m.drawcoastlines() + m.drawcountries() + + plt.savefig(c.outputdir + '/' + filename + '_MAP.' + fending, + facecolor=fig.get_facecolor(), + edgecolor='none', + format=fending) + print 'created ', c.outputdir + '/' + filename + if show: + plt.show() + fig.clf() + plt.close(fig) + + print time.time() - t1, 's' + + return + +def get_plot_args(): + ''' + @Description: + Assigns the command line arguments and reads CONTROL file + content. Apply default values for non mentioned arguments. + + @Input: + <nothing> + + @Return: + args: instance of ArgumentParser + Contains the commandline arguments from script/program call. + + c: instance of class ControlFile + Contains all necessary information of a CONTROL file. The parameters + are: DAY1, DAY2, DTIME, MAXSTEP, TYPE, TIME, STEP, CLASS, STREAM, + NUMBER, EXPVER, GRID, LEFT, LOWER, UPPER, RIGHT, LEVEL, LEVELIST, + RESOL, GAUSS, ACCURACY, OMEGA, OMEGADIFF, ETA, ETADIFF, DPDETA, + SMOOTH, FORMAT, ADDPAR, WRF, CWC, PREFIX, ECSTORAGE, ECTRANS, + ECFSDIR, MAILOPS, MAILFAIL, GRIB2FLEXPART, DEBUG, INPUTDIR, + OUTPUTDIR, FLEXPART_ROOT_SCRIPTS + For more information about format and content of the parameter see + documentation. + ''' + parser = ArgumentParser(description='Plot retrieved GRIB data from ' + \ + 'ECMWF MARS archive', + formatter_class=ArgumentDefaultsHelpFormatter) + +# the most important arguments + parser.add_argument("--start_date", dest="start_date", + help="start date YYYYMMDD") + parser.add_argument("--end_date", dest="end_date", + help="end_date YYYYMMDD") + + parser.add_argument("--start_step", dest="start_step", + help="start step in hours") + parser.add_argument("--end_step", dest="end_step", + help="end step in hours") + +# some arguments that override the default in the CONTROL file + parser.add_argument("--levelist", dest="levelist", + help="vertical levels to be retrieved, e.g. 30/to/60") + parser.add_argument("--area", dest="area", + help="area defined as north/west/south/east") + parser.add_argument("--paramIds", dest="paramIds", + help="parameter IDs") + parser.add_argument("--prefix", dest="prefix", default='EN', + help="output file name prefix") + +# set the working directories + parser.add_argument("--inputdir", dest="inputdir", default=None, + help="root directory for storing intermediate files") + parser.add_argument("--outputdir", dest="outputdir", default=None, + help="root directory for storing output files") + parser.add_argument("--flexpart_root_scripts", dest="flexpart_root_scripts", + help="FLEXPART root directory (to find \ + 'grib2flexpart and COMMAND file)\n \ + Normally flex_extract resides in the scripts directory \ + of the FLEXPART distribution") + + parser.add_argument("--controlfile", dest="controlfile", + default='CONTROL.temp', + help="file with CONTROL parameters") + args = parser.parse_args() + + try: + c = ControlFile(args.controlfile) + except IOError: + try: + c = ControlFile(LOCAL_PYTHON_PATH + args.controlfile) + except IOError: + print 'Could not read CONTROL file "' + args.controlfile + '"' + print 'Either it does not exist or its syntax is wrong.' + print 'Try "' + sys.argv[0].split('/')[-1] + \ + ' -h" to print usage information' + exit(1) + + if args.levelist: + c.levels = args.levelist.split('/') + else: + c.levels = [0] + + if args.area: + c.area = args.area.split('/') + else: + c.area = '[0,0]' + + c.paramIds = args.paramIds.split('/') + + if args.start_step: + c.start_step = int(args.start_step) + else: + c.start_step = 0 + + if args.end_step: + c.end_step = int(args.end_step) + else: + c.end_step = 0 + + c.start_date = args.start_date + c.end_date = args.end_date + + c.prefix = args.prefix + + c.inputdir = args.inputdir + + if args.outputdir: + c.outputdir = args.outputdir + else: + c.outputdir = c.inputdir + + return args, c + +if __name__ == "__main__": + main() diff --git a/python/pythontest/TestInstallTar/flex_extract_v7.1/python/prepare_flexpart.py b/python/pythontest/TestInstallTar/flex_extract_v7.1/python/prepare_flexpart.py new file mode 100755 index 0000000000000000000000000000000000000000..088c2a0fd49e2cc9452d622640b8b0c137b47f07 --- /dev/null +++ b/python/pythontest/TestInstallTar/flex_extract_v7.1/python/prepare_flexpart.py @@ -0,0 +1,206 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +#******************************************************************************* +# @Author: Anne Fouilloux (University of Oslo) +# +# @Date: October 2014 +# +# @Change History: +# +# November 2015 - Leopold Haimberger (University of Vienna): +# - using the WebAPI also for general MARS retrievals +# - job submission on ecgate and cca +# - job templates suitable for twice daily operational dissemination +# - dividing retrievals of longer periods into digestable chunks +# - retrieve also longer term forecasts, not only analyses and +# short term forecast data +# - conversion into GRIB2 +# - conversion into .fp format for faster execution of FLEXPART +# +# February 2018 - Anne Philipp (University of Vienna): +# - applied PEP8 style guide +# - added documentation +# - minor changes in programming style for consistence +# - BUG: removed call of clean_up-Function after call of +# prepareFlexpart in main since it is already called in +# prepareFlexpart at the end! +# - created function main and moved the two function calls for +# arguments and prepare_flexpart into it +# +# @License: +# (C) Copyright 2014-2018. +# +# This software is licensed under the terms of the Apache Licence Version 2.0 +# which can be obtained at http://www.apache.org/licenses/LICENSE-2.0. +# +# @Program Functionality: +# This program prepares the final version of the grib files which are +# then used by FLEXPART. It converts the bunch of grib files extracted +# via get_mars_data by doing for example the necessary conversion to get +# consistent grids or the disaggregation of flux data. Finally, the +# program combines the data fields in files per available hour with the +# naming convention xxYYMMDDHH, where xx should be 2 arbitrary letters +# (mostly xx is chosen to be "EN"). +# +# @Program Content: +# - main +# - prepare_flexpart +# +#******************************************************************************* + +# ------------------------------------------------------------------------------ +# MODULES +# ------------------------------------------------------------------------------ +import datetime +import os +import inspect +import sys +import socket +import _config + +# software specific classes and modules from flex_extract +from UioFiles import UioFiles +from tools import clean_up, get_cmdline_arguments, read_ecenv +from EcFlexpart import EcFlexpart + +ecapi = 'ecmwf' not in socket.gethostname() +try: + if ecapi: + import ecmwfapi +except ImportError: + ecapi = False + +# add path to pythonpath so that python finds its buddies +LOCAL_PYTHON_PATH = os.path.dirname(os.path.abspath( + inspect.getfile(inspect.currentframe()))) +if LOCAL_PYTHON_PATH not in sys.path: + sys.path.append(LOCAL_PYTHON_PATH) + + +# ------------------------------------------------------------------------------ +# FUNCTION +# ------------------------------------------------------------------------------ +def main(): + ''' + @Description: + If prepare_flexpart is called from command line, this function controls + the program flow and calls the argumentparser function and + the prepare_flexpart function for preparation of GRIB data for FLEXPART. + + @Input: + <nothing> + + @Return: + <nothing> + ''' + + args = get_cmdline_arguments() + + try: + c = ControlFile(args.controlfile) + except IOError: + try: + c = ControlFile(LOCAL_PYTHON_PATH + args.controlfile) + except IOError: + print 'Could not read CONTROL file "' + args.controlfile + '"' + print 'Either it does not exist or its syntax is wrong.' + print 'Try "' + sys.argv[0].split('/')[-1] + \ + ' -h" to print usage information' + sys.exit(1) + + env_parameter = read_ecenv(c.ecmwfdatadir + 'python/ECMWF_ENV') + c.assign_args_to_control(args, env_parameter) + c.assign_envs_to_control(env_parameter) + c.check_conditions() + prepare_flexpart(args.ppid, c) + + return + +def prepare_flexpart(ppid, c): + ''' + @Description: + Lists all grib files retrieved from MARS with get_mars_data and + uses prepares data for the use in FLEXPART. Specific data fields + are converted to a different grid and the flux data are going to be + disaggregated. The data fields are collected by hour and stored in + a file with a specific FLEXPART relevant naming convention. + + @Input: + ppid: int + Contains the ppid number of the current ECMWF job. If it is called + from this script, it is "None". + + c: instance of class ControlFile + Contains all the parameters of CONTROL file, which are e.g.: + DAY1(start_date), DAY2(end_date), DTIME, MAXSTEP, TYPE, TIME, + STEP, CLASS(marsclass), STREAM, NUMBER, EXPVER, GRID, LEFT, + LOWER, UPPER, RIGHT, LEVEL, LEVELIST, RESOL, GAUSS, ACCURACY, + OMEGA, OMEGADIFF, ETA, ETADIFF, DPDETA, SMOOTH, FORMAT, + ADDPAR, WRF, CWC, PREFIX, ECSTORAGE, ECTRANS, ECFSDIR, + MAILOPS, MAILFAIL, GRIB2FLEXPART, FLEXPARTDIR, BASETIME + DATE_CHUNK, DEBUG, INPUTDIR, OUTPUTDIR, FLEXPART_ROOT_SCRIPTS + + For more information about format and content of the parameter + see documentation. + + @Return: + <nothing> + ''' + + if not ppid: + c.ppid = str(os.getppid()) + else: + c.ppid = ppid + + c.ecapi = ecapi + + # create the start and end date + start = datetime.date(year=int(c.start_date[:4]), + month=int(c.start_date[4:6]), + day=int(c.start_date[6:])) + + end = datetime.date(year=int(c.end_date[:4]), + month=int(c.end_date[4:6]), + day=int(c.end_date[6:])) + + # assign starting date minus 1 day + # since for basetime 00 we need the 12 hours upfront + # (the day before from 12 UTC to current day 00 UTC) + if c.basetime == '00': + start = start - datetime.timedelta(days=1) + + print 'Prepare ' + start.strftime("%Y%m%d") + \ + "/to/" + end.strftime("%Y%m%d") + + # create output dir if necessary + if not os.path.exists(c.outputdir): + os.makedirs(c.outputdir) + + # get all files with flux data to be deaccumulated + inputfiles = UioFiles(c.inputdir, '*OG_acc_SL*.' + c.ppid + '.*') + + # deaccumulate the flux data + flexpart = EcFlexpart(c, fluxes=True) + flexpart.write_namelist(c, 'fort.4') + flexpart.deacc_fluxes(inputfiles, c) + + # get a list of all files from the root inputdir + inputfiles = UioFiles(c.inputdir, '????__??.*' + c.ppid + '.*') + + # produce FLEXPART-ready GRIB files and process them - + # copy/transfer/interpolate them or make them GRIB2 + flexpart = EcFlexpart(c, fluxes=False) + flexpart.create(inputfiles, c) + flexpart.process_output(c) + + # check if in debugging mode, then store all files + # otherwise delete temporary files + if int(c.debug) != 0: + print '\nTemporary files left intact' + else: + clean_up(c) + + return + +if __name__ == "__main__": + main() diff --git a/python/pythontest/TestInstallTar/flex_extract_v7.1/python/profiling.py b/python/pythontest/TestInstallTar/flex_extract_v7.1/python/profiling.py new file mode 100644 index 0000000000000000000000000000000000000000..4511af2aca3a41265a9dd035b11430e84626ac62 --- /dev/null +++ b/python/pythontest/TestInstallTar/flex_extract_v7.1/python/profiling.py @@ -0,0 +1,72 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +#************************************************************************ +# ToDo AP +# - check license of book content +#************************************************************************ +#******************************************************************************* +# +# @Author: Anne Philipp (University of Vienna) +# +# @Date: March 2018 +# +# @License: +# (C) Copyright 2018. +# +# This software is licensed under the terms of the Apache Licence Version 2.0 +# which can be obtained at http://www.apache.org/licenses/LICENSE-2.0. +# +# @Program functionality: +# This module is not part of flex_extract. It is just used for testing and +# performance analysis of some functions. +# +# @Program Content: +# - timefn +# +#******************************************************************************* + +# ------------------------------------------------------------------------------ +# MODULES +# ------------------------------------------------------------------------------ +from functools import wraps +import time + +# ------------------------------------------------------------------------------ +# FUNCTION +# ------------------------------------------------------------------------------ +def timefn(fn): + ''' + @Description: + Decorator function. It takes the inner function as an argument. + ''' + @wraps(fn) + def measure_time(*args, **kwargs): + ''' + @Descripton: + Passes the arguments through fn for execution. Around the + execution of fn the time is captured to execute the fn function + and prints the result along with the function name. + + This is taken from the book "High Performance Python" from + Micha Gorelick and Ian Ozsvald, O'Reilly publisher, 2014, + ISBN: 978-1-449-36159-4 + + @Input: + *args: undefined + A variable number of positional arguments. + + **kwargs: undefined + A variable number of key/value arguments. + + @Return: + <nothing> + ''' + + t1 = time.time() + result = fn(*args, **kwargs) + t2 = time.time() + print "@timefn:" + fn.func_name + " took " + str(t2 - t1) + " seconds" + + return result + + return measure_time diff --git a/python/pythontest/TestInstallTar/flex_extract_v7.1/python/submit.py b/python/pythontest/TestInstallTar/flex_extract_v7.1/python/submit.py new file mode 100755 index 0000000000000000000000000000000000000000..967ed945810218d72a7e3b53215c279ca40824d2 --- /dev/null +++ b/python/pythontest/TestInstallTar/flex_extract_v7.1/python/submit.py @@ -0,0 +1,200 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +#******************************************************************************* +# @Author: Anne Fouilloux (University of Oslo) +# +# @Date: October 2014 +# +# @Change History: +# +# November 2015 - Leopold Haimberger (University of Vienna): +# - job submission on ecgate and cca +# - job templates suitable for twice daily operational dissemination +# +# February 2018 - Anne Philipp (University of Vienna): +# - applied PEP8 style guide +# - added documentation +# - minor changes in programming style (for consistence) +# +# @License: +# (C) Copyright 2014-2018. +# +# This software is licensed under the terms of the Apache Licence Version 2.0 +# which can be obtained at http://www.apache.org/licenses/LICENSE-2.0. +# +# @Program Functionality: +# This program is the main program of flex_extract and controls the +# program flow. +# If it is supposed to work locally then it works through the necessary +# functions get_mars_data and prepareFlexpart. Otherwise it prepares +# a shell job script which will do the necessary work on the +# ECMWF server and is submitted via ecaccess-job-submit. +# +# @Program Content: +# - main +# - submit +# +#******************************************************************************* + +# ------------------------------------------------------------------------------ +# MODULES +# ------------------------------------------------------------------------------ +import os +import sys +import subprocess +import inspect +import collections + +# software specific classes and modules from flex_extract +import _config +from tools import normal_exit, get_cmdline_arguments, submit_job_to_ecserver, \ + read_ecenv +from get_mars_data import get_mars_data +from prepare_flexpart import prepare_flexpart +from ControlFile import ControlFile + +# ------------------------------------------------------------------------------ +# FUNCTIONS +# ------------------------------------------------------------------------------ + +def main(): + ''' + @Description: + Get the arguments from script call and from CONTROL file. + Decides from the argument "queue" if the local version + is done "queue=None" or the gateway version with "queue=ecgate" + or "queue=cca". + + @Input: + <nothing> + + @Return: + <nothing> + ''' + + called_from_dir = os.getcwd() + + args = get_cmdline_arguments() + + try: + c = ControlFile(args.controlfile) + except IOError: + try: + c = ControlFile(_config.PATH_LOCAL_PYTHON + args.controlfile) + except IOError: + print 'Could not read CONTROL file "' + args.controlfile + '"' + print 'Either it does not exist or its syntax is wrong.' + print 'Try "' + sys.argv[0].split('/')[-1] + \ + ' -h" to print usage information' + sys.exit(1) + + env_parameter = read_ecenv(c.ecmwfdatadir + 'python/ECMWF_ENV') + c.assign_args_to_control(args) + c.assign_envs_to_control(env_parameter) + c.check_conditions() + + # on local side + # on ECMWF server this would also be the local side + if args.queue is None: + if c.inputdir[0] != '/': + c.inputdir = os.path.join(called_from_dir, c.inputdir) + if c.outputdir[0] != '/': + c.outputdir = os.path.join(called_from_dir, c.outputdir) + get_mars_data(c) + if c.request == 0 or c.request == 2: + prepare_flexpart(args.ppid, c) + normal_exit(c.mailfail, 'FLEX_EXTRACT IS DONE!') + else: + normal_exit(c.mailfail, 'PRINTING MARS_REQUESTS DONE!') + # on ECMWF server + else: + submit(args.job_template, c, args.queue) + + return + +def submit(jtemplate, c, queue): + ''' + @Description: + Prepares the job script and submit it to the specified queue. + + @Input: + jtemplate: string + Job template file for submission to ECMWF. It contains all necessary + module and variable settings for the ECMWF environment as well as + the job call and mail report instructions. + Default is "job.temp". + + c: instance of class ControlFile + Contains all the parameters of CONTROL file, which are e.g.: + DAY1(start_date), DAY2(end_date), DTIME, MAXSTEP, TYPE, TIME, + STEP, CLASS(marsclass), STREAM, NUMBER, EXPVER, GRID, LEFT, + LOWER, UPPER, RIGHT, LEVEL, LEVELIST, RESOL, GAUSS, ACCURACY, + OMEGA, OMEGADIFF, ETA, ETADIFF, DPDETA, SMOOTH, FORMAT, + ADDPAR, WRF, CWC, PREFIX, ECSTORAGE, ECTRANS, ECFSDIR, + MAILOPS, MAILFAIL, GRIB2FLEXPART, FLEXPARTDIR, BASETIME + DATE_CHUNK, DEBUG, INPUTDIR, OUTPUTDIR, FLEXPART_ROOT_SCRIPTS + + For more information about format and content of the parameter + see documentation. + + queue: string + Name of queue for submission to ECMWF (e.g. ecgate or cca ) + + @Return: + <nothing> + ''' + + # read template file and get index for CONTROL input + with open(jtemplate) as f: + lftext = f.read().split('\n') + insert_point = lftext.index('EOF') + + if not c.basetime: + # --------- create on demand job script ------------------------------------ + if c.maxstep > 24: + print '---- Pure forecast mode! ----' + else: + print '---- On-demand mode! ----' + job_file = jtemplate[:-4] + 'ksh' + clist = c.to_list() + + lftextondemand = lftext[:insert_point] + clist + lftext[insert_point:] + + with open(job_file, 'w') as f: + f.write('\n'.join(lftextondemand)) + + result_code = submit_job_to_ecserver(queue, job_file) + + else: + # --------- create operational job script ---------------------------------- + print '---- Operational mode! ----' + job_file = jtemplate[:-5] + 'oper.ksh' + #colist = [] + + if c.maxstep: + mt = int(c.maxstep) + else: + mt = 0 + + c.start_date = '${MSJ_YEAR}${MSJ_MONTH}${MSJ_DAY}' + c.end_date = '${MSJ_YEAR}${MSJ_MONTH}${MSJ_DAY}' + c.base_time = '${MSJ_BASETIME}' + if mt > 24: + c.time = '${MSJ_BASETIME} {MSJ_BASETIME}' + + colist = c.to_list() + + lftextoper = lftext[:insert_point] + colist + lftext[insert_point + 2:] + + with open(job_file, 'w') as f: + f.write('\n'.join(lftextoper)) + + result_code = submit_job_to_ecserver(queue, job_file) + + # -------------------------------------------------------------------------- + print 'You should get an email with subject flex.hostname.pid' + + return + +if __name__ == "__main__": + main() diff --git a/python/pythontest/TestInstallTar/flex_extract_v7.1/python/test_suite.py b/python/pythontest/TestInstallTar/flex_extract_v7.1/python/test_suite.py new file mode 100755 index 0000000000000000000000000000000000000000..6cd9ed7cfb41cd7faf5b1f5487524535216a889d --- /dev/null +++ b/python/pythontest/TestInstallTar/flex_extract_v7.1/python/test_suite.py @@ -0,0 +1,104 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +#******************************************************************************* +# @Author: Leopold Haimberger (University of Vienna) +# +# @Date: December 2015 +# +# @Change History: +# +# February 2018 - Anne Philipp (University of Vienna): +# - applied PEP8 style guide +# - added documentation +# +# @License: +# (C) Copyright 2015-2018. +# +# This software is licensed under the terms of the Apache Licence Version 2.0 +# which can be obtained at http://www.apache.org/licenses/LICENSE-2.0. +# +# @Program Functionality: +# This script triggers the flex_extract test suite. Call with +# test_suite.py [test group] +# +# @Program Content: +# +#******************************************************************************* + +# ------------------------------------------------------------------------------ +# MODULES +# ------------------------------------------------------------------------------ +import os +import sys +import json +import subprocess + +# ------------------------------------------------------------------------------ +# PROGRAM +# ------------------------------------------------------------------------------ +try: + taskfile = open('test_suite.json') +except IOError: + print 'could not open suite definition file test_suite.json' + exit() + +if not os.path.isfile('../src/CONVERT2'): + print '../src/CONVERT2 could not be found' + print 'please run "install.py --target=local" first' + exit() + +fprs = os.getenv('FLEXPART_ROOT_SCRIPTS') +if fprs is None: + print 'FLEXPART_ROOT_SCRIPTS not set .. some test jobs may fail' + +tasks = json.load(taskfile, encoding='latin-1') +taskfile.close() +if not os.path.exists('../test'): + os.makedirs('../test') +if len(sys.argv) > 1: + groups = sys.argv[1:] +else: + groups = ['xinstall', 'default', 'ops', 'work', 'cv', 'fc']#,'hires'] +jobcounter = 0 +jobfailed = 0 +for g in groups: + try: + tk, tv = g, tasks[g] + finally: + pass + garglist = [] + for ttk, ttv in tv.iteritems(): + if isinstance(ttv, basestring): + if ttk != 'script': + garglist.append('--' + ttk) + if ttv[0] == '$': + garglist.append(os.path.expandvars(ttv)) + else: + garglist.append(ttv) + for ttk, ttv in tv.iteritems(): + if isinstance(ttv, dict): + arglist = [] + for tttk, tttv in ttv.iteritems(): + if isinstance(tttv, basestring): + arglist.append('--' + tttk) + if '$' in tttv[0]: + arglist.append(os.path.expandvars(tttv)) + else: + arglist.append(tttv) + print 'Command: ', ' '.join([tv['script']] + garglist + arglist) + o = '../test/' + tk + '_' + ttk + '_' + '_'.join(ttv.keys()) + print 'Output will be sent to ', o + f = open(o, 'w') + try: + p = subprocess.check_call([tv['script']] + garglist + arglist, + stdout=f, stderr=f) + except subprocess.CalledProcessError as e: + f.write('\nFAILED\n') + print 'FAILED' + jobfailed += 1 + jobcounter += 1 + f.close() + +print 'Test suite tasks completed' +print str(jobcounter-jobfailed) + ' successful, ' + str(jobfailed) + ' failed' +print 'If tasks have been submitted via ECACCESS please check emails' diff --git a/python/pythontest/TestInstallTar/flex_extract_v7.1/python/tools.py b/python/pythontest/TestInstallTar/flex_extract_v7.1/python/tools.py new file mode 100644 index 0000000000000000000000000000000000000000..8e1e405d3730febfcb3e473222fe332b0e104edd --- /dev/null +++ b/python/pythontest/TestInstallTar/flex_extract_v7.1/python/tools.py @@ -0,0 +1,531 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +#******************************************************************************* +# @Author: Anne Philipp (University of Vienna) +# +# @Date: May 2018 +# +# @Change History: +# October 2014 - Anne Fouilloux (University of Oslo) +# - created functions silent_remove and product (taken from ECMWF) +# +# November 2015 - Leopold Haimberger (University of Vienna) +# - created functions: interpret_args_and_control, clean_up +# my_error, normal_exit, init128, to_param_id +# +# April 2018 - Anne Philipp (University of Vienna): +# - applied PEP8 style guide +# - added documentation +# - moved all functions from file Flexparttools to this file tools +# - added function get_list_as_string +# - seperated args and control interpretation +# +# @License: +# (C) Copyright 2014-2018. +# +# This software is licensed under the terms of the Apache Licence Version 2.0 +# which can be obtained at http://www.apache.org/licenses/LICENSE-2.0. +# +# @Modul Description: +# This module contains a couple of helpful functions which are +# used in different places in flex_extract. +# +# @Module Content: +# - get_cmdline_arguments +# - clean_up +# - my_error +# - normal_exit +# - product +# - silent_remove +# - init128 +# - to_param_id +# - get_list_as_string +# - make_dir +# +#******************************************************************************* + +# ------------------------------------------------------------------------------ +# MODULES +# ------------------------------------------------------------------------------ +import os +import errno +import sys +import glob +import subprocess +import traceback +from argparse import ArgumentParser, ArgumentDefaultsHelpFormatter + +# ------------------------------------------------------------------------------ +# FUNCTIONS +# ------------------------------------------------------------------------------ + +def get_cmdline_arguments(): + ''' + @Description: + Decomposes the command line arguments and assigns them to variables. + Apply default values for non mentioned arguments. + + @Input: + <nothing> + + @Return: + args: instance of ArgumentParser + Contains the commandline arguments from script/program call. + ''' + + parser = ArgumentParser(description='Retrieve FLEXPART input from \ + ECMWF MARS archive', + formatter_class=ArgumentDefaultsHelpFormatter) + + # the most important arguments + parser.add_argument("--start_date", dest="start_date", default=None, + help="start date YYYYMMDD") + parser.add_argument("--end_date", dest="end_date", default=None, + help="end_date YYYYMMDD") + parser.add_argument("--date_chunk", dest="date_chunk", default=None, + help="# of days to be retrieved at once") + + # some arguments that override the default in the CONTROL file + parser.add_argument("--basetime", dest="basetime", default=None, + help="base such as 00/12 (for half day retrievals)") + parser.add_argument("--step", dest="step", default=None, + help="steps such as 00/to/48") + parser.add_argument("--levelist", dest="levelist", default=None, + help="Vertical levels to be retrieved, e.g. 30/to/60") + parser.add_argument("--area", dest="area", default=None, + help="area defined as north/west/south/east") + + # set the working directories + parser.add_argument("--inputdir", dest="inputdir", default=None, + help="root directory for storing intermediate files") + parser.add_argument("--outputdir", dest="outputdir", default=None, + help="root directory for storing output files") + parser.add_argument("--flexpart_root_scripts", dest="flexpart_root_scripts", + default=None, + help="FLEXPART root directory (to find grib2flexpart \ + and COMMAND file)\n Normally flex_extract resides in \ + the scripts directory of the FLEXPART distribution") + + # this is only used by prepare_flexpart.py to rerun a postprocessing step + parser.add_argument("--ppid", dest="ppid", default=None, + help="specify parent process id for \ + rerun of prepare_flexpart") + + # arguments for job submission to ECMWF, only needed by submit.py + parser.add_argument("--job_template", dest='job_template', + default="job.temp", + help="job template file for submission to ECMWF") + parser.add_argument("--queue", dest="queue", default=None, + help="queue for submission to ECMWF \ + (e.g. ecgate or cca )") + parser.add_argument("--controlfile", dest="controlfile", + default='CONTROL.temp', + help="file with CONTROL parameters") + parser.add_argument("--debug", dest="debug", default=None, + help="debug mode - leave temporary files intact") + parser.add_argument("--request", dest="request", default=None, + help="list all mars request in file mars_requests.dat \ + and skip submission to mars") + + args = parser.parse_args() + + return args + +def read_ecenv(filename): + ''' + @Description: + Reads the file into a dictionary where the key values are the parameter + names. + + @Input: + filename: string + Name of file where the ECMWV environment parameters are stored. + + @Return: + envs: dict + ''' + envs= {} + print filename + with open(filename, 'r') as f: + for line in f: + data = line.strip().split() + envs[str(data[0])] = str(data[1]) + + return envs + +def clean_up(c): + ''' + @Description: + Remove all files from intermediate directory + (inputdir from CONTROL file). + + @Input: + c: instance of class ControlFile + Contains all the parameters of CONTROL file, which are e.g.: + DAY1(start_date), DAY2(end_date), DTIME, MAXSTEP, TYPE, TIME, + STEP, CLASS(marsclass), STREAM, NUMBER, EXPVER, GRID, LEFT, + LOWER, UPPER, RIGHT, LEVEL, LEVELIST, RESOL, GAUSS, ACCURACY, + OMEGA, OMEGADIFF, ETA, ETADIFF, DPDETA, SMOOTH, FORMAT, + ADDPAR, WRF, CWC, PREFIX, ECSTORAGE, ECTRANS, ECFSDIR, + MAILOPS, MAILFAIL, GRIB2FLEXPART, FLEXPARTDIR, BASETIME + DATE_CHUNK, DEBUG, INPUTDIR, OUTPUTDIR, FLEXPART_ROOT_SCRIPTS + + For more information about format and content of the parameter + see documentation. + + @Return: + <nothing> + ''' + + print "clean_up" + + cleanlist = glob.glob(c.inputdir + "/*") + for clist in cleanlist: + if c.prefix not in clist: + silent_remove(clist) + if c.ecapi is False and (c.ectrans == '1' or c.ecstorage == '1'): + silent_remove(clist) + + print "Done" + + return + + +def my_error(users, message='ERROR'): + ''' + @Description: + Prints a specified error message which can be passed to the function + before exiting the program. + + @Input: + user: list of strings + Contains all email addresses which should be notified. + It might also contain just the ecmwf user name which wil trigger + mailing to the associated email address for this user. + + message: string, optional + Error message. Default value is "ERROR". + + @Return: + <nothing> + ''' + + print message + + # comment if user does not want email notification directly from python + for user in users: + if '${USER}' in user: + user = os.getenv('USER') + try: + p = subprocess.Popen(['mail', '-s flex_extract_v7.1 ERROR', + os.path.expandvars(user)], + stdin=subprocess.PIPE, + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + bufsize=1) + trace = '\n'.join(traceback.format_stack()) + pout = p.communicate(input=message + '\n\n' + trace)[0] + except ValueError as e: + print 'ERROR: ', e + sys.exit('Email could not be sent!') + else: + print 'Email sent to ' + os.path.expandvars(user) + ' ' + \ + pout.decode() + + sys.exit(1) + + return + + +def normal_exit(users, message='Done!'): + ''' + @Description: + Prints a specific exit message which can be passed to the function. + + @Input: + user: list of strings + Contains all email addresses which should be notified. + It might also contain just the ecmwf user name which wil trigger + mailing to the associated email address for this user. + + message: string, optional + Message for exiting program. Default value is "Done!". + + @Return: + <nothing> + + ''' + print message + + # comment if user does not want notification directly from python + for user in users: + if '${USER}' in user: + user = os.getenv('USER') + try: + p = subprocess.Popen(['mail', '-s flex_extract_v7.1 normal exit', + os.path.expandvars(user)], + stdin=subprocess.PIPE, + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + bufsize=1) + pout = p.communicate(input=message+'\n\n')[0] + except ValueError as e: + print 'ERROR: ', e + print 'Email could not be sent!' + else: + print 'Email sent to ' + os.path.expandvars(user) + ' ' + \ + pout.decode() + + return + + +def product(*args, **kwds): + ''' + @Description: + This method is taken from an example at the ECMWF wiki website. + https://software.ecmwf.int/wiki/display/GRIB/index.py; 2018-03-16 + + This method combines the single characters of the passed arguments + with each other. So that each character of each argument value + will be combined with each character of the other arguments as a tuple. + + Example: + product('ABCD', 'xy') --> Ax Ay Bx By Cx Cy Dx Dy + product(range(2), repeat = 3) --> 000 001 010 011 100 101 110 111 + + @Input: + *args: tuple + Positional arguments (arbitrary number). + + **kwds: dictionary + Contains all the keyword arguments from *args. + + @Return: + prod: tuple + Return will be done with "yield". A tuple of combined arguments. + See example in description above. + ''' + pools = map(tuple, args) * kwds.get('repeat', 1) + result = [[]] + for pool in pools: + result = [x + [y] for x in result for y in pool] + for prod in result: + yield tuple(prod) + + return + + +def silent_remove(filename): + ''' + @Description: + Remove file if it exists. + The function does not fail if the file does not exist. + + @Input: + filename: string + The name of the file to be removed without notification. + + @Return: + <nothing> + ''' + try: + os.remove(filename) + except OSError as e: + if e.errno != errno.ENOENT: + # errno.ENOENT = no such file or directory + raise # re-raise exception if a different error occured + + return + + +def init128(filepath): + ''' + @Description: + Opens and reads the grib file with table 128 information. + + @Input: + filepath: string + Path to file of ECMWF grib table number 128. + + @Return: + table128: dictionary + Contains the ECMWF grib table 128 information. + The key is the parameter number and the value is the + short name of the parameter. + ''' + table128 = dict() + with open(filepath) as f: + fdata = f.read().split('\n') + for data in fdata: + if data[0] != '!': + table128[data[0:3]] = data[59:64].strip() + + return table128 + + +def to_param_id(pars, table): + ''' + @Description: + Transform parameter names to parameter ids + with ECMWF grib table 128. + + @Input: + pars: string + Addpar argument from CONTROL file in the format of + parameter names instead of ids. The parameter short + names are sepearted with "/" and they are passed as + one single string. + + table: dictionary + Contains the ECMWF grib table 128 information. + The key is the parameter number and the value is the + short name of the parameter. + + @Return: + ipar: list of integer + List of addpar parameters from CONTROL file transformed to + parameter ids in the format of integer. + ''' + cpar = pars.upper().split('/') + ipar = [] + for par in cpar: + for k, v in table.iteritems(): + if par == k or par == v: + ipar.append(int(k)) + break + else: + print 'Warning: par ' + par + ' not found in table 128' + + return ipar + +def get_list_as_string(list_obj, concatenate_sign=', '): + ''' + @Description: + Converts a list of arbitrary content into a single string. + + @Input: + list_obj: list + A list with arbitrary content. + + concatenate_sign: string, optional + A string which is used to concatenate the single + list elements. Default value is ", ". + + @Return: + str_of_list: string + The content of the list as a single string. + ''' + + str_of_list = concatenate_sign.join(str(l) for l in list_obj) + + return str_of_list + +def make_dir(directory): + ''' + @Description: + Creates a directory and gives a warning if the directory + already exists. The program stops only if there is another problem. + + @Input: + directory: string + The directory name including the path which should be created. + + @Return: + <nothing> + ''' + try: + os.makedirs(directory) + except OSError as e: + if e.errno != errno.EEXIST: + # errno.EEXIST = directory already exists + raise # re-raise exception if a different error occured + else: + print 'WARNING: Directory {0} already exists!'.format(directory) + + return + +def put_file_to_ecserver(ecd, filename, target, ecuid, ecgid): + ''' + @Description: + Uses the ecaccess-file-put command to send a file to the ECMWF servers. + + NOTE: + The return value is just for testing reasons. It does not have + to be used from the calling function since the whole error handling + is done in here. + + @Input: + ecd: string + The path were the file is stored. + + filename: string + The name of the file to send to the ECMWF server. + + target: string + The target queue where the file should be sent to. + + ecuid: string + The user id on ECMWF server. + + ecgid: string + The group id on ECMWF server. + + @Return: + rcode: string + Resulting code of command execution. If successful the string + will be empty. + ''' + + try: + rcode = subprocess.check_output(['ecaccess-file-put', + ecd + '/' + filename, + target + ':/home/ms/' + + ecgid + '/' + ecuid + + '/' + filename], + stderr=subprocess.STDOUT) + except subprocess.CalledProcessError as e: + print('... ERROR CODE:\n ... ' + str(e.returncode)) + print('... ERROR MESSAGE:\n ... ' + str(e)) + + print('\n... Do you have a valid ecaccess certification key?') + sys.exit('... ECACCESS-FILE-PUT FAILED!') + + return rcode + +def submit_job_to_ecserver(target, jobname): + ''' + @Description: + Uses ecaccess-job-submit command to submit a job to the ECMWF server. + + NOTE: + The return value is just for testing reasons. It does not have + to be used from the calling function since the whole error handling + is done in here. + + @Input: + target: string + The target where the file should be sent to, e.g. the queue. + + jobname: string + The name of the jobfile to be submitted to the ECMWF server. + + @Return: + rcode: string + Resulting code of command execution. If successful the string + will contain an integer number, representing the id of the job + at the ecmwf server. + ''' + + try: + rcode = subprocess.check_output(['ecaccess-job-submit', + '-queueName', target, + jobname]) + except subprocess.CalledProcessError as e: + print('... ERROR CODE:\n ... ' + str(e.returncode)) + print('... ERROR MESSAGE:\n ... ' + str(e)) + + + print('\n... Do you have a valid ecaccess certification key?') + sys.exit('... ECACCESS-JOB-SUBMIT FAILED!') + + return rcode diff --git a/python/CONTROL.test b/python/pythontest/TestInstallTar/flex_extract_v7.1/run/control/CONTROL.test similarity index 100% rename from python/CONTROL.test rename to python/pythontest/TestInstallTar/flex_extract_v7.1/run/control/CONTROL.test diff --git a/python/pythontest/TestInstallTar/flex_extract_v7.1/src/Makefile.CRAY b/python/pythontest/TestInstallTar/flex_extract_v7.1/src/Makefile.CRAY new file mode 100644 index 0000000000000000000000000000000000000000..6ed57be95245136e040e65c5964a2ef4f93d48f9 --- /dev/null +++ b/python/pythontest/TestInstallTar/flex_extract_v7.1/src/Makefile.CRAY @@ -0,0 +1,62 @@ +############################################################################### +# +# Top level Makefile for ECMWFDATA7.0 software +# +# Last modified: December 1, 2015 +# +############################################################################### + + +.SUFFIXES: .o .c .c~ .f .f~ .F90 .f90 .f90~ .f95 .f95~ .F .F~ .y .y~ .l .l~ \ + .s .s~ .sh .sh~ .h .h~ .C .C~ .a + + +#GRIB_API_INCLUDE_DIR=/usr/local/gcc-4.9.3/grib1.12.3//include +#GRIB_API_LIB=-openmp -L/usr/local/gcc-4.9.3/grib1.12.3/lib -Bstatic -lgrib_api_f77 -lgrib_api_f90 -lgrib_api -Bdynamic -lm -ljasper +#EMOSLIB=-lemosR64 + +OPT = +DEBUG = -g +LIB = $(GRIB_API_LIBS) $(EMOSLIB) + +FC=ftn $(F90FLAGS) +F90C=ftn $(F90FLAGS) + +FFLAGS = $(OPT) -I. -r8 -I$(GRIB_API_INCLUDE_DIR) +F90FLAGS = $(OPT) -I. -r8 -I$(GRIB_API_INCLUDE_DIR) + +LDFLAGS = $(OPT) + +BINDIR = . + +EXE = CONVERT2 + + +.f.o: + $(F90C) -c $(F90FLAGS) $(DEBUG) $*.f +.f90.o: + $(F90C) -c $(F90FLAGS) $(DEBUG) $*.f90 + +all: ${EXE} + +clean: + rm *.o + +phgrreal.o: phgrreal.f + $(F90C) -c -g -O3 phgrreal.f + +grphreal.o: grphreal.f + $(F90C) -c -g -O3 grphreal.f + +ftrafo.o: ftrafo.f + $(F90C) -c -g -O3 ftrafo.f + +$(BINDIR)/CONVERT2: phgrreal.o grphreal.o ftrafo.o rwGRIB2.o posnam.o preconvert.o + $(F90C) $(DEBUG) $(OPT) -o $(BINDIR)/CONVERT2 ftrafo.o phgrreal.o grphreal.o rwGRIB2.o posnam.o preconvert.o ${LIB} + + +############################################################################### +# +# End of the Makefile +# +############################################################################### diff --git a/python/pythontest/TestInstallTar/flex_extract_v7.1/src/Makefile.gfortran b/python/pythontest/TestInstallTar/flex_extract_v7.1/src/Makefile.gfortran new file mode 100644 index 0000000000000000000000000000000000000000..58923fa5bf9713717ca12a4f420f9fb5ead4d6f0 --- /dev/null +++ b/python/pythontest/TestInstallTar/flex_extract_v7.1/src/Makefile.gfortran @@ -0,0 +1,62 @@ +############################################################################### +# +# Top level Makefile for ECMWFDATA7.0 software +# +# Last modified: December 1, 2015 +# +############################################################################### + + +.SUFFIXES: .o .c .c~ .f .f~ .F90 .f90 .f90~ .f95 .f95~ .F .F~ .y .y~ .l .l~ \ + .s .s~ .sh .sh~ .h .h~ .C .C~ .a + + +#GRIB_API_INCLUDE_DIR=/usr/local/gcc-4.9.3/grib1.12.3//include +#GRIB_API_LIB=-openmp -L/usr/local/gcc-4.9.3/grib1.12.3/lib -Bstatic -lgrib_api_f77 -lgrib_api_f90 -lgrib_api -Bdynamic -lm -ljasper +#EMOSLIB=-lemosR64 + +OPT = -g +DEBUG = -g +LIB = $(GRIB_API_LIB) $(EMOSLIB) + +FC=gfortran -m64 -fdefault-real-8 -fcray-pointer -fno-second-underscore -ffixed-line-length-132 -fopenmp -fconvert=big-endian +F90C=gfortran -m64 -fdefault-real-8 -fcray-pointer -fno-second-underscore -ffixed-line-length-132 -fopenmp -fconvert=big-endian + +FFLAGS = $(OPT) -I. -I$(GRIB_API_INCLUDE_DIR) +F90FLAGS = $(OPT) -I. -I$(GRIB_API_INCLUDE_DIR) + +LDFLAGS = $(OPT) + +BINDIR = . + +EXE = CONVERT2 + + +.f.o: + $(F90C) -c $(F90FLAGS) $(DEBUG) $*.f +.f90.o: + $(F90C) -c $(F90FLAGS) $(DEBUG) $*.f90 + +all: ${EXE} + +clean: + rm *.o + +phgrreal.o: phgrreal.f + $(F90C) -c -g -O3 -fopenmp phgrreal.f + +grphreal.o: grphreal.f + $(F90C) -c -g -O3 -fopenmp grphreal.f + +ftrafo.o: ftrafo.f + $(F90C) -c -g -O3 -fopenmp ftrafo.f + +$(BINDIR)/CONVERT2: phgrreal.o grphreal.o ftrafo.o rwGRIB2.o posnam.o preconvert.o + $(F90C) $(DEBUG) $(OPT) -o $(BINDIR)/CONVERT2 ftrafo.o phgrreal.o grphreal.o rwGRIB2.o posnam.o preconvert.o ${LIB} + + +############################################################################### +# +# End of the Makefile +# +############################################################################### diff --git a/python/pythontest/TestInstallTar/flex_extract_v7.1/src/Makefile.ifort b/python/pythontest/TestInstallTar/flex_extract_v7.1/src/Makefile.ifort new file mode 100644 index 0000000000000000000000000000000000000000..038a1689708cdf956c1f3a89621d3cad58654414 --- /dev/null +++ b/python/pythontest/TestInstallTar/flex_extract_v7.1/src/Makefile.ifort @@ -0,0 +1,61 @@ +############################################################################### +# +# Top level Makefile for ECMWFDATA7.0 software +# +# Last modified: December 1, 2015 +# +############################################################################### + + +.SUFFIXES: .o .c .c~ .f .f~ .F90 .f90 .f90~ .f95 .f95~ .F .F~ .y .y~ .l .l~ \ + .s .s~ .sh .sh~ .h .h~ .C .C~ .a + + +#GRIB_API_INCLUDE_DIR=/usr/local/ifort/grib1.12.3//include +#GRIB_API_LIB=-openmp -L/usr/local/ifort/grib1.12.3/lib -Bstatic -lgrib_api_f77 -lgrib_api_f90 -lgrib_api -Bdynamic -lm -ljasper + +OPT = -g +DEBUG = -g +LIB = $(GRIB_API_LIBS) -lemosR64 -lgfortran + +FC=ifort -132 -traceback -r8 +F90C=ifort -132 -traceback -r8 + +FFLAGS = $(OPT) -I. -I$(GRIB_API_INCLUDE_DIR) +F90FLAGS = $(OPT) -I. -I$(GRIB_API_INCLUDE_DIR) + +LDFLAGS = $(OPT) + +BINDIR = . + +EXE = CONVERT2 + + +.f.o: + $(F90C) -c $(F90FLAGS) -132 $(DEBUG) $*.f +.f90.o: + $(F90C) -c $(F90FLAGS) -132 $(DEBUG) $*.f90 + +all: ${EXE} + +clean: + rm *.o + +phgrreal.o: phgrreal.f + $(F90C) -c -g -O3 -fopenmp phgrreal.f + +grphreal.o: grphreal.f + $(F90C) -c -g -O3 -fopenmp grphreal.f + +ftrafo.o: ftrafo.f + $(F90C) -c -g -O3 -fopenmp ftrafo.f + +$(BINDIR)/CONVERT2: phgrreal.o grphreal.o ftrafo.o rwGRIB2.o posnam.o preconvert.o + $(F90C) $(DEBUG) $(OPT) -o $(BINDIR)/CONVERT2 ftrafo.o phgrreal.o grphreal.o rwGRIB2.o posnam.o preconvert.o ${LIB} + + +############################################################################### +# +# End of the Makefile +# +############################################################################### diff --git a/python/pythontest/TestInstallTar/flex_extract_v7.1/src/Makefile.local.gfortran b/python/pythontest/TestInstallTar/flex_extract_v7.1/src/Makefile.local.gfortran new file mode 100644 index 0000000000000000000000000000000000000000..3847d57ad9e6c0b92d823eda01fb6f83e2221d33 --- /dev/null +++ b/python/pythontest/TestInstallTar/flex_extract_v7.1/src/Makefile.local.gfortran @@ -0,0 +1,62 @@ +############################################################################### +# +# Top level Makefile for ECMWFDATA7.0 software +# +# Last modified: December 1, 2015 +# +############################################################################### + + +.SUFFIXES: .o .c .c~ .f .f~ .F90 .f90 .f90~ .f95 .f95~ .F .F~ .y .y~ .l .l~ \ + .s .s~ .sh .sh~ .h .h~ .C .C~ .a + + +GRIB_API_INCLUDE_DIR=/usr/local/gcc-4.9.3/grib_api-1.14.3//include +GRIB_API_LIB= -L/usr/local/gcc-4.9.3/grib_api-1.14.3/lib -Bstatic -lgrib_api_f77 -lgrib_api_f90 -lgrib_api -Bdynamic -lm -ljasper +EMOSLIB=-lemosR64 + +OPT = -g -O3 -fopenmp +DEBUG = -g +LIB = $(GRIB_API_LIB) $(EMOSLIB) + +FC=gfortran -m64 -fdefault-real-8 -fcray-pointer -fno-second-underscore -ffixed-line-length-132 -fopenmp -fconvert=big-endian +F90C=gfortran -m64 -fdefault-real-8 -fcray-pointer -fno-second-underscore -ffixed-line-length-132 -fopenmp -fconvert=big-endian + +FFLAGS = $(OPT) -I. -I$(GRIB_API_INCLUDE_DIR) +F90FLAGS = $(OPT) -I. -I$(GRIB_API_INCLUDE_DIR) + +LDFLAGS = $(OPT) + +BINDIR = . + +EXE = CONVERT2 + + +.f.o: + $(F90C) -c $(F90FLAGS) $(DEBUG) $*.f +.f90.o: + $(F90C) -c $(F90FLAGS) $(DEBUG) $*.f90 + +all: ${EXE} + +clean: + rm *.o + +phgrreal.o: phgrreal.f + $(F90C) -c -g -O3 -fopenmp phgrreal.f + +grphreal.o: grphreal.f + $(F90C) -c -g -O3 -fopenmp grphreal.f + +ftrafo.o: ftrafo.f + $(F90C) -c -g -O3 -fopenmp ftrafo.f + +$(BINDIR)/CONVERT2: phgrreal.o grphreal.o ftrafo.o rwGRIB2.o posnam.o preconvert.o + $(F90C) $(DEBUG) $(OPT) -o $(BINDIR)/CONVERT2 ftrafo.o phgrreal.o grphreal.o rwGRIB2.o posnam.o preconvert.o ${LIB} + + +############################################################################### +# +# End of the Makefile +# +############################################################################### diff --git a/python/pythontest/TestInstallTar/flex_extract_v7.1/src/Makefile.local.ifort b/python/pythontest/TestInstallTar/flex_extract_v7.1/src/Makefile.local.ifort new file mode 100644 index 0000000000000000000000000000000000000000..6f58a3532815bb2766f1bcd5fb2677958eaae355 --- /dev/null +++ b/python/pythontest/TestInstallTar/flex_extract_v7.1/src/Makefile.local.ifort @@ -0,0 +1,61 @@ +############################################################################### +# +# Top level Makefile for ECMWFDATA7.0 software +# +# Last modified: December 1, 2015 +# +############################################################################### + + +.SUFFIXES: .o .c .c~ .f .f~ .F90 .f90 .f90~ .f95 .f95~ .F .F~ .y .y~ .l .l~ \ + .s .s~ .sh .sh~ .h .h~ .C .C~ .a + + +GRIB_API_INCLUDE_DIR=/home/srvx1/tmc/TestEnv/Libraries/eccodes-2.6.0_ifort/include +GRIB_API_LIB= -L/home/srvx1/tmc/TestEnv/Libraries/eccodes-2.6.0_ifort/lib -Bstatic -leccodes_f90 -leccodes -Bdynamic -lm -ljasper + +OPT = -g -O3 -mcmodel=medium -unroll -inline -heap-arrays 32 +DEBUG = -g +LIB = $(GRIB_API_LIB) -lemosR64 -lgfortran + +FC=/opt/intel/bin/ifort -132 -traceback -r8 +F90C=/opt/intel/bin/ifort -132 -traceback -r8 + +FFLAGS = $(OPT) -I. -I$(GRIB_API_INCLUDE_DIR) +F90FLAGS = $(OPT) -I. -I$(GRIB_API_INCLUDE_DIR) + +LDFLAGS = $(OPT) + +BINDIR = . + +EXE = CONVERT2 + + +.f.o: + $(F90C) -c $(F90FLAGS) -132 $(DEBUG) $*.f +.f90.o: + $(F90C) -c $(F90FLAGS) -132 $(DEBUG) $*.f90 + +all: ${EXE} + +clean: + rm *.o + +phgrreal.o: phgrreal.f + $(F90C) -c -g -O3 phgrreal.f + +grphreal.o: grphreal.f + $(F90C) -c -g -O3 grphreal.f + +ftrafo.o: ftrafo.f + $(F90C) -c -g -O3 ftrafo.f + +$(BINDIR)/CONVERT2: phgrreal.o grphreal.o ftrafo.o rwGRIB2.o posnam.o preconvert.o + $(F90C) $(DEBUG) $(OPT) -o $(BINDIR)/CONVERT2 ftrafo.o phgrreal.o grphreal.o rwGRIB2.o posnam.o preconvert.o ${LIB} + + +############################################################################### +# +# End of the Makefile +# +############################################################################### diff --git a/python/pythontest/TestInstallTar/flex_extract_v7.1/src/Makefile.new b/python/pythontest/TestInstallTar/flex_extract_v7.1/src/Makefile.new new file mode 100644 index 0000000000000000000000000000000000000000..9953d130c88dbb3a07e54867ef5c700c12df799f --- /dev/null +++ b/python/pythontest/TestInstallTar/flex_extract_v7.1/src/Makefile.new @@ -0,0 +1,61 @@ +############################################################################### +# +# Top level Makefile for ECMWFDATA7.0 software +# +# Last modified: December 1, 2015 +# +############################################################################### + + +.SUFFIXES: .o .c .c~ .f .f~ .F90 .f90 .f90~ .f95 .f95~ .F .F~ .y .y~ .l .l~ \ + .s .s~ .sh .sh~ .h .h~ .C .C~ .a + + +GRIB_API_INCLUDE_DIR=/usr/local/ifort/grib1.12.3//include +GRIB_API_LIBS=-openmp -L/usr/local/ifort/grib1.12.3/lib -Bstatic -lgrib_api_f77 -lgrib_api_f90 -lgrib_api -Bdynamic -lm -ljasper + +OPT = -g +DEBUG = -g +LIB = $(GRIB_API_LIBS) -lemosR64 + +FC=ifort -132 -traceback -r8 +F90C=ifort -132 -traceback -r8 + +FFLAGS = $(OPT) -I. -I$(GRIB_API_INCLUDE_DIR) +F90FLAGS = $(OPT) -I. -I$(GRIB_API_INCLUDE_DIR) + +LDFLAGS = $(OPT) + +BINDIR = . + +EXE = CONVERT2 + + +.f.o: + $(F90C) -c $(F90FLAGS) -132 $(DEBUG) $*.f +.f90.o: + $(F90C) -c $(F90FLAGS) -132 $(DEBUG) $*.f90 + +all: ${EXE} + +clean: + rm *.o + +phgrreal.o: phgrreal.f + $(F90C) -c -g -O3 -fopenmp phgrreal.f + +grphreal.o: grphreal.f + $(F90C) -c -g -O3 -fopenmp grphreal.f + +ftrafo.o: ftrafo.f + $(F90C) -c -g -O3 -fopenmp ftrafo.f + +$(BINDIR)/CONVERT2: phgrreal.o grphreal.o ftrafo.o rwGRIB2.o posnam.o preconvert.o + $(F90C) $(DEBUG) $(OPT) -o $(BINDIR)/CONVERT2 ftrafo.o phgrreal.o grphreal.o rwGRIB2.o posnam.o preconvert.o ${LIB} + + +############################################################################### +# +# End of the Makefile +# +############################################################################### diff --git a/python/pythontest/TestInstallTar/flex_extract_v7.1/src/ftrafo.f b/python/pythontest/TestInstallTar/flex_extract_v7.1/src/ftrafo.f new file mode 100644 index 0000000000000000000000000000000000000000..affdccdcf8b2a439fbd5fd35435ac076eefe7ee5 --- /dev/null +++ b/python/pythontest/TestInstallTar/flex_extract_v7.1/src/ftrafo.f @@ -0,0 +1,504 @@ + MODULE FTRAFO + + CONTAINS + + + +C +C Implementierung der spektralen Transformationsmethode unter Verwendung +C des reduzierten Gauss'schen Gitters +C +C Berechnung der scale winds aus Vorticity und Divergenz +C uebergibt man in XMN die Divergenz, so wird der divergente Anteil des +C Windes (XPHI=Ud,XPHI=Vd) zurueckgegeben, uebergibt man die Vorticity, so +C erhaelt man den rotationellen Wind (XLAM=Vrot,XPHI=-Urot). +C Summiert man beide, erhaelt man den gesamten Scale wind +C GWSAVE ist ein Hilfsfeld fuer die FFT +C P enthaelt die assoziierten Legendrepolynome, H deren Ableitung +C MLAT enthaelt die Anzahl der Gitterpunkte pro Breitenkreis +C MNAUF gibt die spektrale Aufloesung an, +C NI = Anzahl der Gauss'schen Gitterpunkte pro Flaeche +C NJ = Anzahl der Gauss'schen Breiten, +C NK = Anzahl der Niveaus + + SUBROUTINE VDTOUV(XMN,XLAM,XPHI,GWSAVE,IFAX,P,MLAT,MNAUF,NI,NJ,NK) + + + USE PHTOGR + + IMPLICIT NONE + INTEGER J,N,NI,NJ,NK,MNAUF,GGIND(NJ/2) + INTEGER MLAT(NJ),IFAX(10,NJ) + REAL XMN(0:(MNAUF+1)*(MNAUF+2)-1,NK) + REAL P(0:(MNAUF+3)*(MNAUF+4)/2,NJ/2) + REAL H(0:(MNAUF+2)*(MNAUF+3)/2) + REAL XLAM(NI,NK),XPHI(NI,NK) + REAL GWSAVE(8*NJ+15,NJ/2) + REAL SCR,SCI,ACR,ACI,MUSCR,MUSCI,MUACR,MUACI + REAL RT,IT + + GGIND(1)=0 + DO 4 J = 2,NJ/2 + GGIND(J)=GGIND(J-1)+MLAT(J-1) +4 CONTINUE +!$OMP PARALLEL DO SCHEDULE(DYNAMIC) + DO 5 J = 1,NJ/2 + CALL VDUVSUB(J,XMN,XLAM,XPHI,GWSAVE,IFAX,P,GGIND(J), + *MLAT,MNAUF,NI,NJ,NK) + 5 CONTINUE +!$OMP END PARALLEL DO + RETURN + END SUBROUTINE VDTOUV + + SUBROUTINE VDUVSUB(J,XMN,XLAM,XPHI,GWSAVE,IFAX,P, + *GGIND,MLAT,MNAUF,NI,NJ,NK) + + USE PHTOGR + + IMPLICIT NONE + INTEGER J,K,M,N,NI,NJ,NK,MNAUF,GGIND,LL,LLP,LLH,LLS,LLPS,LLHS + INTEGER MLAT(NJ),IFAX(10,NJ) + REAL UFOUC(0:MAXAUF),MUFOUC(0:MAXAUF) + REAL VFOUC(0:MAXAUF),MVFOUC(0:MAXAUF) + REAL XMN(0:(MNAUF+1)*(MNAUF+2)-1,NK) + REAL P(0:(MNAUF+3)*(MNAUF+4)/2,NJ/2) + REAL H(0:(MNAUF+2)*(MNAUF+3)/2) + REAL XLAM(NI,NK),XPHI(NI,NK) + REAL GWSAVE(8*NJ+15,NJ/2) + REAL ERAD,SCR,SCI,ACR,ACI,MUSCR,MUSCI,MUACR,MUACI + REAL FAC(0:MNAUF),RT,IT + + + ERAD = 6367470.D0 + + FAC(0)=0.D0 + DO 12 N=1,MNAUF + FAC(N)=-ERAD/DBLE(N)/DBLE(N+1) +12 CONTINUE + + CALL DPLGND(MNAUF,P(0,J),H) + DO 3 K = 1,NK + LL=0 + LLP=0 + LLH=0 + DO 2 M = 0,MNAUF + SCR=0.D0 + SCI=0.D0 + ACR=0.D0 + ACI=0.D0 + MUSCR=0.D0 + MUSCI=0.D0 + MUACR=0.D0 + MUACI=0.D0 + LLS=LL + LLPS=LLP + LLHS=LLH + IF(2*M+1.LT.MLAT(J)) THEN + DO 1 N = M,MNAUF,2 + RT=XMN(2*LL,K)*FAC(N) + IT=XMN(2*LL+1,K)*FAC(N) + SCR =SCR+ RT*P(LLP,J) + SCI =SCI+ IT*P(LLP,J) + MUACR =MUACR+ RT*H(LLH) + MUACI =MUACI+ IT*H(LLH) + LL=LL+2 + LLP=LLP+2 + LLH=LLH+2 + 1 CONTINUE + LL=LLS+1 + LLP=LLPS+1 + LLH=LLHS+1 + DO 11 N = M+1,MNAUF,2 + RT=XMN(2*LL,K)*FAC(N) + IT=XMN(2*LL+1,K)*FAC(N) + ACR =ACR+ RT*P(LLP,J) + ACI =ACI+ IT*P(LLP,J) + MUSCR =MUSCR+ RT*H(LLH) + MUSCI =MUSCI+ IT*H(LLH) + LL=LL+2 + LLP=LLP+2 + LLH=LLH+2 + 11 CONTINUE + ENDIF + LL=LLS+(MNAUF-M+1) + LLP=LLPS+(MNAUF-M+3) + LLH=LLHS+(MNAUF-M+2) + + UFOUC(2*M)=-M*(SCI-ACI) + UFOUC(2*M+1)=M*(SCR-ACR) + VFOUC(2*M)=-M*(SCI+ACI) + VFOUC(2*M+1)=M*(SCR+ACR) + + MUFOUC(2*M)=-(MUSCR-MUACR) + MUFOUC(2*M+1)=-(MUSCI-MUACI) + MVFOUC(2*M)=-(MUSCR+MUACR) + MVFOUC(2*M+1)=-(MUSCI+MUACI) + 2 CONTINUE + + CALL RFOURTR(VFOUC, + *GWSAVE(:,J),IFAX(:,J),MNAUF,MLAT(J),1) + XLAM(GGIND+1:GGIND+MLAT(J),K)=VFOUC(0:MLAT(J)-1) + CALL RFOURTR(UFOUC, + *GWSAVE(:,J),IFAX(:,J),MNAUF,MLAT(J),1) + XLAM(NI-GGIND-MLAT(J)+1:NI-GGIND,K)=UFOUC(0:MLAT(J)-1) + + CALL RFOURTR(MVFOUC, + *GWSAVE(:,J),IFAX(:,J),MNAUF,MLAT(J),1) + XPHI(GGIND+1:GGIND+MLAT(J),K)=MVFOUC(0:MLAT(J)-1) + CALL RFOURTR(MUFOUC, + *GWSAVE(:,J),IFAX(:,J),MNAUF,MLAT(J),1) + XPHI(NI-GGIND-MLAT(J)+1:NI-GGIND,K)=MUFOUC(0:MLAT(J)-1) + +3 CONTINUE + + RETURN + END SUBROUTINE VDUVSUB + +C Berechnung des Gradienten eines Skalars aus dem Feld des +C Skalars XMN im Phasenraum. Zurueckgegeben werden die Felder der +C Komponenten des horizontalen Gradienten XLAM,XPHI auf dem Gauss'schen Gitter. +C GWSAVE ist ein Hilfsfeld fuer die FFT +C P enthaelt die assoziierten Legendrepolynome, H deren Ableitung +C MLAT enthaelt die Anzahl der Gitterpunkte pro Breitenkreis +C MNAUF gibt die spektrale Aufloesung an, +C NI = Anzahl der Gauss'schen Gitterpunkte, +C NJ = Anzahl der Gauss'schen Breiten, +C NK = Anzahl der Niveaus + + SUBROUTINE PHGRAD(XMN,XLAM,XPHI,GWSAVE,IFAX,P,H,MLAT, + *MNAUF,NI,NJ,NK) + + USE PHTOGR + IMPLICIT NONE + INTEGER J,K,M,N,NI,NJ,NK,MNAUF,GGIND,LL,LLP,LLH,LLS,LLPS,LLHS + INTEGER MLAT(NJ),IFAX(10,NJ) + REAL UFOUC(0:MAXAUF),MUFOUC(0:MAXAUF) + REAL VFOUC(0:MAXAUF),MVFOUC(0:MAXAUF) + REAL XMN(0:(MNAUF+1)*(MNAUF+2)-1,NK) + REAL P(0:(MNAUF+3)*(MNAUF+4)/2,NJ/2) + REAL H(0:(MNAUF+2)*(MNAUF+3)/2) + REAL XLAM(NI,NK),XPHI(NI,NK) + REAL GWSAVE(8*NJ+15,NJ/2) + REAL ERAD + REAL SCR,SCI,ACR,ACI,MUSCR,MUSCI,MUACR,MUACI,RT,IT + + ERAD = 6367470.0 + + GGIND=0 + DO 4 J = 1,NJ/2 + CALL DPLGND(MNAUF,P(0,J),H) + DO 3 K = 1,NK + LL=0 + LLP=0 + LLH=0 + DO 2 M = 0,MNAUF + SCR=0.D0 + SCI=0.D0 + ACR=0.D0 + ACI=0.D0 + MUSCR=0.D0 + MUSCI=0.D0 + MUACR=0.D0 + MUACI=0.D0 + LLS=LL + LLPS=LLP + LLHS=LLH + IF(2*M+1.LT.MLAT(J)) THEN + DO 1 N = M,MNAUF,2 + RT=XMN(2*LL,K) + IT=XMN(2*LL+1,K) + SCR =SCR+ RT*P(LLP,J) + SCI =SCI+ IT*P(LLP,J) + MUACR =MUACR+RT*H(LLH) + MUACI =MUACI+ IT*H(LLH) + LL=LL+2 + LLP=LLP+2 + LLH=LLH+2 + 1 CONTINUE + LL=LLS+1 + LLP=LLPS+1 + LLH=LLHS+1 + DO 11 N = M+1,MNAUF,2 + RT=XMN(2*LL,K) + IT=XMN(2*LL+1,K) + ACR =ACR+ RT*P(LLP,J) + ACI =ACI+ IT*P(LLP,J) + MUSCR =MUSCR+ RT*H(LLH) + MUSCI =MUSCI+ IT*H(LLH) + LL=LL+2 + LLP=LLP+2 + LLH=LLH+2 + 11 CONTINUE + ENDIF + LL=LLS+(MNAUF-M+1) + LLP=LLPS+(MNAUF-M+3) + LLH=LLHS+(MNAUF-M+2) + + UFOUC(2*M)=-M*(SCI-ACI)/ERAD + UFOUC(2*M+1)=M*(SCR-ACR)/ERAD + VFOUC(2*M)=-M*(SCI+ACI)/ERAD + VFOUC(2*M+1)=M*(SCR+ACR)/ERAD + + MUFOUC(2*M)=-(MUSCR-MUACR)/ERAD + MUFOUC(2*M+1)=-(MUSCI-MUACI)/ERAD + MVFOUC(2*M)=-(MUSCR+MUACR)/ERAD + MVFOUC(2*M+1)=-(MUSCI+MUACI)/ERAD +2 CONTINUE + + CALL RFOURTR(VFOUC, + *GWSAVE(:,J),IFAX(:,J),MNAUF,MLAT(J),1) + XLAM(GGIND+1:GGIND+MLAT(J),K)=VFOUC(0:MLAT(J)-1) + CALL RFOURTR(UFOUC, + *GWSAVE(:,J),IFAX(:,J),MNAUF,MLAT(J),1) + XLAM(NI-GGIND-MLAT(J)+1:NI-GGIND,K)=UFOUC(0:MLAT(J)-1) + + CALL RFOURTR(MVFOUC, + *GWSAVE(:,J),IFAX(:,J),MNAUF,MLAT(J),1) + XPHI(GGIND+1:GGIND+MLAT(J),K)=MVFOUC(0:MLAT(J)-1) + CALL RFOURTR(MUFOUC, + *GWSAVE(:,J),IFAX(:,J),MNAUF,MLAT(J),1) + XPHI(NI-GGIND-MLAT(J)+1:NI-GGIND,K)=MUFOUC(0:MLAT(J)-1) + +3 CONTINUE + GGIND=GGIND+MLAT(J) +4 CONTINUE + + + RETURN + END SUBROUTINE PHGRAD + +C Berechnung des Gradienten eines Skalars aus dem Feld des +C Skalars XMN im Phasenraum. Zurueckgegeben werden die Felder der +C Komponenten des horizontalen Gradienten XLAM,XPHI auf dem Gauss'schen Gitter. +C GWSAVE ist ein Hilfsfeld fuer die FFT +C P enthaelt die assoziierten Legendrepolynome, H deren Ableitung +C MLAT enthaelt die Anzahl der Gitterpunkte pro Breitenkreis +C MNAUF gibt die spektrale Aufloesung an, +C NI = Anzahl der Gauss'schen Gitterpunkte, +C NJ = Anzahl der Gauss'schen Breiten, +C NK = Anzahl der Niveaus + + SUBROUTINE PHGRACUT(XMN,XLAM,XPHI,GWSAVE,IFAX,P,H,MAUF, + *MNAUF,NI,NJ,MANF,NK) + + USE PHTOGR + IMPLICIT NONE + INTEGER J,K,M,N,NI,NJ,NK,MNAUF,GGIND,LL,LLP,LLH,LLS,LLPS,LLHS + INTEGER MAUF,MANF,I,IFAX(10) + REAL UFOUC(0:MAXAUF),MUFOUC(0:MAXAUF) + REAL VFOUC(0:MAXAUF),MVFOUC(0:MAXAUF) + REAL XMN(0:(MNAUF+1)*(MNAUF+2)-1,NK) + REAL P(0:(MNAUF+3)*(MNAUF+4)/2,NJ) + REAL H(0:(MNAUF+2)*(MNAUF+3)/2) + REAL XLAM(NI,NJ,NK),XPHI(NI,NJ,NK) + REAL HLAM(MAXAUF,2),HPHI(MAXAUF,2) + REAL GWSAVE(4*MAUF+15) + REAL ERAD + REAL SCR,SCI,ACR,ACI,MUSCR,MUSCI,MUACR,MUACI,RT,IT + + ERAD = 6367470.0 + + GGIND=0 + DO 4 J = 1,NJ + CALL DPLGND(MNAUF,P(0,J),H) + DO 3 K = 1,NK + LL=0 + LLP=0 + LLH=0 + DO 2 M = 0,MNAUF + SCR=0.D0 + SCI=0.D0 + ACR=0.D0 + ACI=0.D0 + MUSCR=0.D0 + MUSCI=0.D0 + MUACR=0.D0 + MUACI=0.D0 + LLS=LL + LLPS=LLP + LLHS=LLH + IF(2*M+1.LT.MAUF) THEN + DO 1 N = M,MNAUF,2 + RT=XMN(2*LL,K) + IT=XMN(2*LL+1,K) + SCR =SCR+ RT*P(LLP,J) + SCI =SCI+ IT*P(LLP,J) + MUACR =MUACR+RT*H(LLH) + MUACI =MUACI+ IT*H(LLH) + LL=LL+2 + LLP=LLP+2 + LLH=LLH+2 + 1 CONTINUE + LL=LLS+1 + LLP=LLPS+1 + LLH=LLHS+1 + DO 11 N = M+1,MNAUF,2 + RT=XMN(2*LL,K) + IT=XMN(2*LL+1,K) + ACR =ACR+ RT*P(LLP,J) + ACI =ACI+ IT*P(LLP,J) + MUSCR =MUSCR+ RT*H(LLH) + MUSCI =MUSCI+ IT*H(LLH) + LL=LL+2 + LLP=LLP+2 + LLH=LLH+2 + 11 CONTINUE + ENDIF + LL=LLS+(MNAUF-M+1) + LLP=LLPS+(MNAUF-M+3) + LLH=LLHS+(MNAUF-M+2) + + UFOUC(2*M)=-M*(SCI-ACI)/ERAD + UFOUC(2*M+1)=M*(SCR-ACR)/ERAD + VFOUC(2*M)=-M*(SCI+ACI)/ERAD + VFOUC(2*M+1)=M*(SCR+ACR)/ERAD + + MUFOUC(2*M)=-(MUSCR-MUACR)/ERAD + MUFOUC(2*M+1)=-(MUSCI-MUACI)/ERAD + MVFOUC(2*M)=-(MUSCR+MUACR)/ERAD + MVFOUC(2*M+1)=-(MUSCI+MUACI)/ERAD +2 CONTINUE + + CALL RFOURTR(VFOUC, + *GWSAVE,IFAX,MNAUF,MAUF,1) + + CALL RFOURTR(MVFOUC, + *GWSAVE,IFAX,MNAUF,MAUF,1) + + DO 6 I=0,NI-1 + IF(MANF+I.LE. MAUF) THEN + XLAM(I+1,J,K)=VFOUC(MANF+I-1) + XPHI(I+1,J,K)=MVFOUC(MANF+I-1) + ELSE + XLAM(I+1,J,K)=VFOUC(MANF-MAUF+I-1) + XPHI(I+1,J,K)=MVFOUC(MANF-MAUF+I-1) + ENDIF + 6 CONTINUE +3 CONTINUE + GGIND=GGIND+MAUF +4 CONTINUE + + RETURN + END SUBROUTINE PHGRACUT + +C Berechnung der Divergenz aus dem Windfeld (U,V) +C im Phasenraum. Zurueckgegeben werden die Felder der +C Komponenten des horizontalen Gradienten XLAM,XPHI auf dem Gauss'schen Gitter. +C GWSAVE ist ein Hilfsfeld fuer die FFT +C P enthaelt die assoziierten Legendrepolynome, H deren Ableitung +C MLAT enthaelt die Anzahl der Gitterpunkte pro Breitenkreis +C MNAUF gibt die spektrale Aufloesung an, +C NI = Anzahl der Gauss'schen Gitterpunkte, +C NJ = Anzahl der Gauss'schen Breiten, +C NK = Anzahl der Niveaus +C Beachte, dass das Windfeld eine um 1 erhoehte Aufloesung in mu-Richtung hat. + + SUBROUTINE CONTGL(PS,DPSDL,DPSDM,DIV,U,V,BREITE,ETA, + *MLAT,A,B,NI,NJ,NK) + + IMPLICIT NONE + + INTEGER NI,NJ,NK,I,J,K,MLAT(NJ),L + + REAL A(NK+1),B(NK+1) + REAL PS(NI),DPSDL(NI),DPSDM(NI) + REAL DIV(NI,NK),U(NI,NK),V(NI,NK),ETA(NI,NK) + REAL BREITE(NJ) + + REAL DIVT1,DIVT2,POB,PUN,DPSDT,COSB + + L=0 + DO 4 J=1,NJ + COSB=(1.0-BREITE(J)*BREITE(J)) + DO 3 I=1,MLAT(J) + L=L+1 + DIVT1=0.0 + DIVT2=0.0 + DO 1 K=1,NK + POB=A(K)+B(K)*PS(L) + PUN=A(K+1)+B(K+1)*PS(L) + + DIVT1=DIVT1+DIV(L,K)*(PUN-POB) + if(cosb .gt. 0.) then + DIVT2=DIVT2+(B(K+1)-B(K))*PS(L)* + *(U(L,K)*DPSDL(L)+V(L,K)*DPSDM(L))/COSB + endif + + ETA(L,K)=-DIVT1-DIVT2 +1 CONTINUE + + DPSDT=(-DIVT1-DIVT2)/PS(L) + + DO 2 K=1,NK + ETA(L,K)=ETA(L,K)-DPSDT*B(K+1)*PS(L) +2 CONTINUE + PS(L)=DPSDT*PS(L) +3 CONTINUE +4 CONTINUE + RETURN + END SUBROUTINE CONTGL + +C OMEGA berechnet omega im Hybridkoordinatensystem +C PS ist der Bodendruck, +C DPSDL,DPSDM sind die Komponenten des Gradienten des Logarithmus des +C Bodendrucks +C DIV,U,V sind die horizontale Divergenz und das horizontale Windfeld +C BREITE ist das Feld der Gauss'schen Breiten +C E ist omega, + + SUBROUTINE OMEGA(PS,DPSDL,DPSDM,DIV,U,V,BREITE,E,MLAT,A,B,NGI + * ,NGJ,MKK) + + IMPLICIT NONE + + INTEGER I,J,K,L,NGI,NGJ,MKK,MLAT(NGJ) + + REAL PS(NGI),DPSDL(NGI),DPSDM(NGI),A(MKK+1),B(MKK+1) + REAL DIV(NGI,MKK),U(NGI,MKK),V(NGI,MKK),E(NGI,MKK) + REAL BREITE(NGJ) + + REAL DIVT1,DIVT2,POB,PUN,DP,X,Y,COSB + REAL DIVT3(MKK+2) + + L=0 + DO 4 J=1,NGJ + COSB=(1.0-BREITE(J)*BREITE(J)) + DO 3 I=1,MLAT(J) + L=L+1 + DIVT1=0.0 + DIVT2=0.0 + DIVT3(1)=0.0 + DO 1 K=1,MKK + POB=A(K)+B(K)*PS(L) + PUN=A(K+1)+B(K+1)*PS(L) + DP=PUN-POB + + Y=PS(L)*(U(L,K)*DPSDL(L)+V(L,K)*DPSDM(L))/COSB + IF(K.LT.3) THEN + X=0.0 + ELSE + X=(B(K+1)-B(K))*Y + ENDIF + + DIVT1=DIVT1+DIV(L,K)*DP + DIVT2=DIVT2+X + + DIVT3(K+1)=-DIVT1-DIVT2 + + IF(K.GT.1) THEN + E(L,K) = 0.5*(POB+PUN)/DP*Y* + *((B(K+1)-B(K))+(A(K+1)*B(K)-A(K)*B(K+1))/ + *DP*LOG(PUN/POB)) + ELSE + E(L,K) = 0.0 + ENDIF + + E(L,K) = E(L,K)+0.5*(DIVT3(K)+DIVT3(K+1)) + +1 CONTINUE +3 CONTINUE +4 CONTINUE + RETURN + END SUBROUTINE OMEGA + + END MODULE FTRAFO diff --git a/python/pythontest/TestInstallTar/flex_extract_v7.1/src/grphreal.f b/python/pythontest/TestInstallTar/flex_extract_v7.1/src/grphreal.f new file mode 100644 index 0000000000000000000000000000000000000000..dae342bf336d149d97ffa55234c0b1375eff2661 --- /dev/null +++ b/python/pythontest/TestInstallTar/flex_extract_v7.1/src/grphreal.f @@ -0,0 +1,188 @@ + MODULE GRTOPH + + USE PHTOGR + + CONTAINS +C + SUBROUTINE GRPH213(CXMN,FELD,WSAVE,IFAX,Z,W,MLAT, + *MNAUF,MAXL,MAXB,MLEVEL) + +C DIE ROUTINE F]HRT EINE TRANSFORMATION EINER +C FELDVARIABLEN VOM PHASENRAUM IN DEN PHYSIKALISCHEN +C RAUM AUF KUGELKOORDINATEN DURCH +C +C CXMN = SPEKTRALKOEFFIZIENTEN IN DER REIHENFOLGE +C CX00,CX01,CX11,CX02,....CXMNAUFMNAUF +C CXM = FOURIERKOEFFIZIENTEN - nur ein Hilfsfeld +C FELD = FELD DER METEOROLOGISCHEN VARIABLEN +C WSAVE = Working Array fuer Fouriertransformation +C Z = LEGENDREFUNKTIONSWERTE +C +C MNAUF ANZAHL DER FOURIERKOEFFIZIENTEN +C MAXL ANZAHL DER FUER DAS GITTER BENUTZTEN LAENGEN +C MAXB ANZAHL DER FUER DAS GITTER BENOETIGTEN BREITEN +C MLEVEL ANZAHL DER LEVELS, DIE TRANSFORMIERT WERDEN +C + IMPLICIT REAL (A-H,O-Z) + + +C Anzahl der Gitterpunkte pro Breitenkreis des reduzierten +C Gauss'schen Gitters + INTEGER MLAT(MAXB),ISIZE,IFAX(10,MAXB) + +C FELD DER LEGENDREPOLYNOME FUER EINE BREITE + REAL*8 Z(MAXB/2,0:((MNAUF+3)*(MNAUF+4))/2) + +C LOGICAL*1 USED(((216*217)/2+1)*160) + + DIMENSION CXMN(0:(MNAUF+1)*(MNAUF+2)-1,MLEVEL) + REAL FELD(MAXL,MLEVEL) + DIMENSION WSAVE(8*MAXB+15,MAXB/2) + REAL*8 W(MAXB) + DIMENSION IND(MAXB) + + + IND(1)=0 + DO 6 J=2,MAXB/2 + IND(j)=IND(J-1)+MLAT(J-1) + 6 CONTINUE +!$OMP PARALLEL DO SCHEDULE(DYNAMIC) + DO 16 L=1,MLEVEL + CALL GRPHSUB(L,IND,CXMN,FELD,WSAVE,IFAX,Z,W,MLAT, + *MNAUF,MAXL,MAXB,MLEVEL) +16 CONTINUE +!$omp end parallel do + + + RETURN + END SUBROUTINE GRPH213 +C + SUBROUTINE GRPHSUB(L,IND,CXMN,FELD,WSAVE,IFAX,Z,W,MLAT, + *MNAUF,MAXL,MAXB,MLEVEL) + +C DIE ROUTINE F]HRT EINE TRANSFORMATION EINER +C FELDVARIABLEN VOM PHASENRAUM IN DEN PHYSIKALISCHEN +C RAUM AUF KUGELKOORDINATEN DURCH +C +C CXMN = SPEKTRALKOEFFIZIENTEN IN DER REIHENFOLGE +C CX00,CX01,CX11,CX02,....CXMNAUFMNAUF +C CXM = FOURIERKOEFFIZIENTEN - nur ein Hilfsfeld +C FELD = FELD DER METEOROLOGISCHEN VARIABLEN +C WSAVE = Working Array fuer Fouriertransformation +C Z = LEGENDREFUNKTIONSWERTE +C +C MNAUF ANZAHL DER FOURIERKOEFFIZIENTEN +C MAXL ANZAHL DER FUER DAS GITTER BENUTZTEN LAENGEN +C MAXB ANZAHL DER FUER DAS GITTER BENOETIGTEN BREITEN +C MLEVEL ANZAHL DER LEVELS, DIE TRANSFORMIERT WERDEN +C + IMPLICIT REAL (A-H,O-Z) + +C FELD DER FOURIERKOEFFIZIENTEN + REAL CXMS(4*(MNAUF+1)) + REAL CXMA(4*(MNAUF+1)) + REAL,ALLOCATABLE :: CXM(:,:) + +C Anzahl der Gitterpunkte pro Breitenkreis des reduzierten +C Gauss'schen Gitters + INTEGER MLAT(MAXB),ISIZE + +C FELD DER LEGENDREPOLYNOME FUER EINE BREITE + REAL Z(MAXB/2,0:((MNAUF+3)*(MNAUF+4))/2) + +C LOGICAL*1 USED(((216*217)/2+1)*160) + + REAL CXMN(0:(MNAUF+1)*(MNAUF+2)-1,MLEVEL) + REAL FELD(MAXL,MLEVEL) + REAL WSAVE(8*MAXB+15,MAXB/2) + INTEGER IFAX(10,MAXB) + REAL W(MAXB) + INTEGER IND(MAXB) + + ALLOCATE(CXM( 4*MAXB,MAXB)) + DO 5 J=1,MAXB/2 + CXMS(1:MLAT(J))=FELD(IND(J)+1:IND(J)+MLAT(J),L) + CALL RFOUFTR(CXMS,WSAVE(1,J),IFAX(:,J),MNAUF,MLAT(J),1) + CXMA(1:MLAT(J))=FELD(MAXL-IND(J)-MLAT(J)+1:MAXL-IND(J),L) + CALL RFOUFTR(CXMA, + *WSAVE(1,J),IFAX(:,J),MNAUF,MLAT(J),1) + DO 4 I=1,2*(MNAUF+1) + CXM(I,J)=CXMS(I)+CXMA(I) + CXM(I,MAXB+1-J)=CXMS(I)-CXMA(I) +4 CONTINUE + 5 CONTINUE + CALL LGTR213(CXMN(0,L),CXM,Z,W,MLAT,MNAUF,MAXB) + + DEALLOCATE(CXM) + + RETURN + END SUBROUTINE GRPHSUB +C + SUBROUTINE LGTR213(CXMN,CXM,Z,W,MLAT,MNAUF,MAXB) + IMPLICIT REAL (A-H,O-Z) + INTEGER MLAT(MAXB) + DIMENSION CXM(0:4*MAXB-1,MAXB) + DIMENSION CXMN(0:2*(((MNAUF+1)*MNAUF)/2+MNAUF)+1) + REAL*8 Z(MAXB/2,0:((MNAUF+3)*(MNAUF+4))/2) + REAL*8 W(MAXB),CR,CI,HILF + LOGICAL EVEN +C +C DIESE ROUTINE BERECHNET DIE KFFKs CXMN +C + LL=0 + LLP=0 + DO 1 I=0,MNAUF + KM=0 + 9 KM=KM+1 + IF(MLAT(KM).LE.2*I) THEN + GOTO 9 + ENDIF + DO 2 J=I,MNAUF + CR=0 + CI=0 + EVEN=MOD(I+J,2).EQ.0 + IF(EVEN) THEN + DO 3 K=KM,MAXB/2 + HILF=W(K)*Z(K,LLP) + CR=CR+CXM(2*I,K)*HILF + CI=CI+CXM(2*I+1,K)*HILF + 3 CONTINUE + ELSE + DO 4 K=KM,MAXB/2 + HILF=W(K)*Z(K,LLP) + CR=CR+CXM(2*I,MAXB+1-K)*HILF + CI=CI+CXM(2*I+1,MAXB+1-K)*HILF + 4 CONTINUE + ENDIF + 5 CXMN(2*LL)=CR + CXMN(2*LL+1)=CI + LL=LL+1 + LLP=LLP+1 + 2 CONTINUE + LLP=LLP+2 + 1 CONTINUE + RETURN + END SUBROUTINE LGTR213 +C + +C + SUBROUTINE RFOUFTR(CXM,TRIGS,IFAX,MNAUF,MAXL,ISIGN) +C BERECHNET DIE FOURIERSUMME MIT EINEM FFT-ALGORITHMUS + IMPLICIT REAL (A-H,O-Z) + DIMENSION CXM(0:2*MAXL-1) + DIMENSION FELD(MAXL),TRIGS(2*MAXL) + DIMENSION WSAVE(MAXAUF) + INTEGER IFAX(10) + + +C NORMIERUNG... + WSAVE(1)=CXM(MAXL-1) + + CXM(1:MAXL)=CXM(0:MAXL-1)/2 + CXM(0)=WSAVE(1)/2 +! CALL CFFTF(MAXL,CXM,WSAVE) + CALL FFT99(CXM,WSAVE,TRIGS,IFAX,1,1,MAXL,1,-1) + RETURN + END SUBROUTINE RFOUFTR + + END MODULE GRTOPH diff --git a/python/pythontest/TestInstallTar/flex_extract_v7.1/src/jparams.h b/python/pythontest/TestInstallTar/flex_extract_v7.1/src/jparams.h new file mode 100644 index 0000000000000000000000000000000000000000..146a7f05c10fdeaadb611686c4c95f7fdc2f7a28 --- /dev/null +++ b/python/pythontest/TestInstallTar/flex_extract_v7.1/src/jparams.h @@ -0,0 +1,34 @@ +C +C Parameters +C + INTEGER JP32, JPLONO, J2NFFT, JPFFT, JPLOOK, JPMAX, JPMAXITER + INTEGER JPMXTRY, JPTRNC, JPK, JPTRP1 + PARAMETER ( JP32 = 32 ) +C +C The following value for JPLONO (2560) will handle regular grids +C from N1 to N720 derived from spectral truncations from T1 to +C T639. +C +Cjdc PARAMETER ( JPLONO = 2560 , J2NFFT = 2 + JPLONO, JPFFT = 12000) + PARAMETER ( JPLONO = 6000 , J2NFFT = 2 + JPLONO, JPFFT = 12000) + PARAMETER ( JPLOOK = 50) + PARAMETER ( JPMAX = 2048 ) + PARAMETER ( JPMAXITER = 10) + PARAMETER ( JPMXTRY = 3 ) + PARAMETER ( JPTRNC = 2047, JPK = (JPTRNC + 1)*(JPTRNC + 4) ) + PARAMETER ( JPTRP1 = (JPTRNC + 1) ) +C + REAL PPEPSA, PPQUART, PPHALF, PPTWO, PP90 + PARAMETER ( PPEPSA = 1.0E-6) + PARAMETER ( PPQUART = 0.25E0) + PARAMETER ( PPHALF = 0.5E0) + PARAMETER ( PPTWO = 2.0E0) + PARAMETER ( PP90 = 90.0E0) +C + REAL PPI + PARAMETER ( PPI = 3.14159265358979 ) +C +C Debug parameters +C + INTEGER NDBG, NDBGLP + COMMON /JDCNDBG/ NDBG, NDBGLP diff --git a/python/pythontest/TestInstallTar/flex_extract_v7.1/src/phgrreal.f b/python/pythontest/TestInstallTar/flex_extract_v7.1/src/phgrreal.f new file mode 100644 index 0000000000000000000000000000000000000000..aa3658c917fd35d15dd4c2b9959f4fcf074923a1 --- /dev/null +++ b/python/pythontest/TestInstallTar/flex_extract_v7.1/src/phgrreal.f @@ -0,0 +1,553 @@ + MODULE PHTOGR + + INTEGER, PARAMETER :: MAXAUF=36000 + + CONTAINS + + SUBROUTINE PHGR213(CXMN,FELD,WSAVE,IFAX,Z,MLAT,MNAUF, + *MAXL,MAXB,MLEVEL) + +C DIE ROUTINE F]HRT EINE TRANSFORMATION EINER +C FELDVARIABLEN VOM PHASENRAUM IN DEN PHYSIKALISCHEN +C RAUM AUF DAS REDUZIERTE GAUSS'SCHE GITTER DURCH +C +C CXMN = SPEKTRALKOEFFIZIENTEN IN DER REIHENFOLGE +C CX00,CX01,CX11,CX02,....CXMNAUFMNAUF +C FELD = FELD DER METEOROLOGISCHEN VARIABLEN +C WSAVE = Working Array fuer Fouriertransformation +C Z = LEGENDREFUNKTIONSWERTE +C +C MNAUF ANZAHL DER FOURIERKOEFFIZIENTEN +C MAXL ANZAHL DER FUER DAS GITTER BENUTZTEN LAENGEN +C MAXB ANZAHL DER FUER DAS GITTER BENOETIGTEN BREITEN +C MLEVEL ANZAHL DER LEVELS, DIE TRANSFORMIERT WERDEN +C + IMPLICIT NONE + +C Anzahl der Gitterpunkte auf jedem Breitenkreis + INTEGER MLAT(MAXB/2) + INTEGER K,MAXL,MAXB,MLEVEL,MNAUF + INTEGER IND(MAXB) + + +C FELD DER LEGENDREPOLYNOME FUER EINE BREITE + REAL Z(0:((MNAUF+3)*(MNAUF+4))/2,MAXB/2) + + REAL CXMN(0:(MNAUF+1)*(MNAUF+2)-1,MLEVEL) + REAL FELD(MAXL,MLEVEL) + REAL WSAVE(8*MAXB+15,MAXB/2) + INTEGER :: IFAX(10,MAXB) + + IND(1)=0 + DO 7 K=2,MAXB/2 + IND(K)=IND(K-1)+MLAT(K-1) +7 CONTINUE + +!$OMP PARALLEL DO SCHEDULE(DYNAMIC) + DO 17 K=1,MAXB/2 + CALL PHSYM(K,IND,CXMN,FELD,Z,WSAVE,IFAX,MLAT, + *MNAUF,MAXL,MAXB,MLEVEL) + +17 CONTINUE +!$OMP END PARALLEL DO + + RETURN + END SUBROUTINE PHGR213 +C +C + SUBROUTINE PHSYM(K,IND,CXMN,FELD,Z,WSAVE,IFAX,MLAT, + *MNAUF,MAXL,MAXB,MLEVEL) + + IMPLICIT NONE + + INTEGER MLAT(MAXB/2) + INTEGER K,L,I,J,LLS,LLPS,LL,LLP,MAXL,MAXB,MLEVEL,MNAUF + INTEGER IND(MAXB) + INTEGER :: IFAX(10,MAXB) + + +C FELD DER FOURIERKOEFFIZIENTEN + REAL :: CXMS(0:MAXAUF-1),CXMA(0:MAXAUF-1) + +C FELD DER LEGENDREPOLYNOME FUER EINE BREITE + REAL Z(0:((MNAUF+3)*(MNAUF+4))/2,MAXB/2) + REAL ACR,ACI,SCR,SCI + + REAL CXMN(0:(MNAUF+1)*(MNAUF+2)-1,MLEVEL) + REAL FELD(MAXL,MLEVEL) + REAL WSAVE(8*MAXB+15,MAXB/2) + + DO 6 L=1,MLEVEL + LL=0 + LLP=0 + DO 1 I=0,MNAUF + SCR=0.D0 + SCI=0.D0 + ACR=0.D0 + ACI=0.D0 + LLS=LL + LLPS=LLP + IF(2*I+1.LT.MLAT(K)) THEN +C Innerste Schleife aufgespalten um if-Abfrage zu sparen + DO 18 J=I,MNAUF,2 + SCR=SCR+Z(LLP,K)*CXMN(2*LL,L) + SCI=SCI+Z(LLP,K)*CXMN(2*LL+1,L) + LL=LL+2 + LLP=LLP+2 +18 CONTINUE + LL=LLS+1 + LLP=LLPS+1 + DO 19 J=I+1,MNAUF,2 + ACR=ACR+Z(LLP,K)*CXMN(2*LL,L) + ACI=ACI+Z(LLP,K)*CXMN(2*LL+1,L) + LL=LL+2 + LLP=LLP+2 +19 CONTINUE + ENDIF + LL=LLS+(MNAUF-I+1) + LLP=LLPS+(MNAUF-I+3) + CXMS(2*I)=SCR+ACR + CXMS(2*I+1)=SCI+ACI + CXMA(2*I)=SCR-ACR + CXMA(2*I+1)=SCI-ACI + 1 CONTINUE +C CALL FOURTR(CXMS,FELD(IND(k)+1,L),WSAVE(:,K),MNAUF, +C *MLAT(K),1) +C CALL FOURTR(CXMA,FELD(MAXL-IND(k)-MLAT(K)+1,L), +C *WSAVE(:,K),MNAUF,MLAT(K),1) + CALL RFOURTR(CXMS,WSAVE(:,K),IFAX(:,K),MNAUF, + *MLAT(K),1) + FELD(IND(k)+1:IND(K)+MLAT(K),L)=CXMS(0:MLAT(K)-1) + CALL RFOURTR(CXMA, + *WSAVE(:,K),IFAX(:,K),MNAUF,MLAT(K),1) + FELD(MAXL-IND(k)-MLAT(K)+1:MAXL-IND(k),L)=CXMA(0:MLAT(K)-1) +C WRITE(*,*) IND+1,FELD(IND+1,L) +6 CONTINUE + + END SUBROUTINE PHSYM + + SUBROUTINE PHGCUT(CXMN,FELD,WSAVE,IFAX,Z, + * MNAUF,MMAX,MAUF,MANF,MAXL,MAXB,MLEVEL) + +C DIE ROUTINE FUEHRT EINE TRANSFORMATION EINER +C FELDVARIABLEN VOM PHASENRAUM IN DEN PHYSIKALISCHEN +C RAUM AUF KUGELKOORDINATEN DURCH. Es kann ein Teilausschnitt +C Der Erde angegeben werden. Diese Routine ist langsamer als +C phgrph +C +C CXMN = SPEKTRALKOEFFIZIENTEN IN DER REIHENFOLGE +C CX00,CX01,CX11,CX02,....CXMNAUFMNAUF +C FELD = FELD DER METEOROLOGISCHEN VARIABLEN +C BREITE = SINUS DER GEOGRAFISCHEN BREITEN +C +C MNAUF ANZAHL DER FOURIERKOEFFIZIENTEN +C MAUF ANZAHL DER LAENGEN UND DER FOURIERKOEFFIZIENTEN +C MANF ANFANG DES LAENGENBEREICHS FUER DAS GITTER, +C AUF DAS INTERPOLIERT WERDEN SOLL +C MAXL ANZAHL DER FUER DAS GITTER BENUTZTEN LAENGEN +C MAXB ANZAHL DER FUER DAS GITTER BENOETIGTEN BREITEN +C MLEVEL ANZAHL DER LEVELS, DIE TRANSFORMIERT WERDEN +C + IMPLICIT REAL (A-H,O-Z) + +C FELD DER FOURIERKOEFFIZIENTEN + +C FELD DER LEGENDREPOLYNOME FUER EINE BREITE + REAL Z(0:((MMAX+3)*(MMAX+4))/2,MAXB) + + DIMENSION CXMN(0:(MMAX+1)*(MMAX+2)-1,MLEVEL) + REAL FELD(MAXL,MAXB,MLEVEL) + DIMENSION WSAVE(4*MAUF+15) + INTEGER:: IFAX(10) + + LOGICAL SYM + +C +C write(*,*)mauf,mnauf,manf,maxl + + + IF(MAUF.LE.MNAUF) WRITE(*,*) 'TOO COARSE LONGITUDE RESOLUTION' + IF((MANF.LT.1).OR.(MAXL.LT.1).OR. + * (MANF.GT.MAUF).OR.(MAXL.GT.MAUF)) THEN + WRITE(*,*) 'WRONG LONGITUDE RANGE',MANF,MAXL + STOP + ENDIF + +C Pruefe, ob Ausgabegitter symmetrisch zum Aequator ist +C Wenn ja soll Symmetrie der Legendrepolynome ausgenutzt werden + IF(MAXB .GT. 4) THEN + SYM=.TRUE. + DO 11 J=5,5 + IF(ABS(ABS(Z(100,J))-ABS(Z(100,MAXB+1-J))).GT.1E-11) + * SYM=.FALSE. +C WRITE(*,*) ABS(Z(100,J)),ABS(Z(100,MAXB+1-J)) +11 CONTINUE + WRITE(*,*) 'Symmetrisch: ',SYM + ELSE + SYM=.FALSE. + ENDIF + + + IF(SYM) THEN +!$OMP PARALLEL DO + DO J=1,(MAXB+1)/2 + CALL PHSYMCUT(J,CXMN,FELD,Z,WSAVE,IFAX, + *MAUF,MNAUF,MAXL,MAXB,MLEVEL,MANF) + + ENDDO +!$OMP END PARALLEL DO + ELSE +!$OMP PARALLEL DO + DO J=1,MAXB + CALL PHGPNS(CXMN,FELD,Z,WSAVE,IFAX, + *J,MNAUF,MAUF,MANF,MAXL,MAXB,MLEVEL) + ENDDO +!$OMP END PARALLEL DO + + ENDIF + + + RETURN + END SUBROUTINE PHGCUT + + SUBROUTINE PHSYMCUT(J,CXMN,FELD,Z,WSAVE,IFAX, + *MAUF,MNAUF,MAXL,MAXB,MLEVEL,MANF) + + IMPLICIT REAL (A-H,O-Z) + +C FELD DER FOURIERKOEFFIZIENTEN + + REAL :: CXM(0:MAXAUF-1),CXMA(0:MAXAUF-1) + + +C FELD DER LEGENDREPOLYNOME FUER EINE BREITE + REAL Z(0:((MNAUF+3)*(MNAUF+4))/2,MAXB) + REAL SCR,SCI,ACR,ACI + + DIMENSION CXMN(0:(MNAUF+1)*(MNAUF+2)-1,MLEVEL) + REAL FELD(MAXL,MAXB,MLEVEL) + DIMENSION WSAVE(4*MAUF+15) + INTEGER :: IFAX(10) + + DO 16 L=1,MLEVEL + LL=0 + LLP=0 + DO 17 I=0,MNAUF + SCR=0.D0 + SCI=0.D0 + ACR=0.D0 + ACI=0.D0 + LLS=LL + LLPS=LLP +C Innerste Schleife aufgespalten um if-Abfrage zu sparen + DO 18 K=I,MNAUF,2 + SCR=SCR+Z(LLP,J)*CXMN(2*LL,L) + SCI=SCI+Z(LLP,J)*CXMN(2*LL+1,L) + LL=LL+2 + LLP=LLP+2 +18 CONTINUE + LL=LLS+1 + LLP=LLPS+1 + DO 19 K=I+1,MNAUF,2 + ACR=ACR+Z(LLP,J)*CXMN(2*LL,L) + ACI=ACI +Z(LLP,J)*CXMN(2*LL+1,L) + LL=LL+2 + LLP=LLP+2 +19 CONTINUE + LL=LLS+MNAUF-I+1 + LLP=LLPS+MNAUF-I+3 + CXM(2*I)=SCR+ACR + CXM(2*I+1)=SCI+ACI + CXMA(2*I)=SCR-ACR + CXMA(2*I+1)=SCI-ACI +17 CONTINUE + + CALL RFOURTR(CXM,WSAVE,IFAX,MNAUF,MAUF,1) + DO 26 I=0,MAXL-1 + IF(MANF+I.LE.MAUF) THEN + FELD(I+1,J,L)=CXM(MANF+I-1) + ELSE + FELD(I+1,J,L)=CXM(MANF-MAUF+I-1) + ENDIF +26 CONTINUE + CALL RFOURTR(CXMA,WSAVE,IFAX,MNAUF,MAUF,1) + DO 36 I=0,MAXL-1 + IF(MANF+I.LE.MAUF) THEN + FELD(I+1,MAXB+1-J,L)=CXMA(MANF+I-1) + ELSE + FELD(I+1,MAXB+1-J,L)=CXMA(MANF-MAUF+I-1) + ENDIF +36 CONTINUE +16 CONTINUE + + END SUBROUTINE PHSYMCUT + + SUBROUTINE PHGPNS(CXMN,FELD,Z,WSAVE,IFAX, + *J,MNAUF,MAUF,MANF,MAXL,MAXB,MLEVEL) + + IMPLICIT NONE + INTEGER,intent(in) :: MNAUF,MAUF,MANF,J,MAXL,MAXB,MLEVEL + REAL :: CXM(0:MAXAUF-1) + REAL,intent(in) :: Z(0:((MNAUF+3)*(MNAUF+4))/2,MAXB) + + REAL,intent(in) :: CXMN(0:(MNAUF+1)*(MNAUF+2)-1,MLEVEL) + + REAL,intent(in) :: WSAVE(4*MAUF+15) + + REAL :: FELD(MAXL,MAXB,MLEVEL) + INTEGER :: IFAX(10) + + INTEGER I,L + + DO L=1,MLEVEL + CALL LEGTR(CXMN(:,L),CXM,Z(:,J),MNAUF,MAUF) + CALL RFOURTR(CXM,WSAVE,IFAX,MNAUF,MAUF,1) + + DO I=0,MAXL-1 + IF(MANF+I.LE.MAUF) THEN + FELD(I+1,J,L)=CXM(MANF+I-1) + ELSE + FELD(I+1,J,L)=CXM(MANF-MAUF+I-1) + ENDIF + ENDDO + ENDDO + END SUBROUTINE PHGPNS +C + SUBROUTINE LEGTR(CXMN,CXM,Z,MNAUF,MAUF) + IMPLICIT NONE + INTEGER MNAUF,MAUF,LL,LLP,I,J + REAL CXM(0:MAXAUF-1) + REAL CXMN(0:(MNAUF+1)*(MNAUF+2)-1) + REAL Z(0:((MNAUF+3)*(MNAUF+4))/2) + REAL CI,CR +C +C DIESE ROUTINE BERECHNET DIE FOURIERKOEFFIZIENTEN CXM +C + LL=0 + LLP=0 + DO 1 I=0,MNAUF + CR=0.D0 + CI=0.D0 + DO 2 J=I,MNAUF + CR=CR+Z(LLP)*CXMN(2*LL) + CI=CI+Z(LLP)*CXMN(2*LL+1) + LL=LL+1 + LLP=LLP+1 + 2 CONTINUE + LLP=LLP+2 + CXM(2*I)=CR + CXM(2*I+1)=CI + 1 CONTINUE + RETURN + END SUBROUTINE LEGTR +C +C +C + SUBROUTINE RFOURTR(CXM,TRIGS,IFAX,MNAUF,MAXL,ISIGN) +C BERECHNET DIE FOURIERSUMME MIT EINEM FFT-ALGORITHMUS + IMPLICIT REAL (A-H,O-Z) + DIMENSION CXM(0:MAXAUF-1) + REAL :: WSAVE(2*MAXL),TRIGS(2*MAXL) + INTEGER IFAX(10) + + DO I=MNAUF+1,MAXL-1 + CXM(2*I)=0.0 + CXM(2*I+1)=0.0 + ENDDO + CALL FFT99(CXM,WSAVE,TRIGS,IFAX,1,1,MAXL,1,1) + DO I=0,MAXL-1 + CXM(I)=CXM(I+1) + ENDDO + + RETURN + END SUBROUTINE RFOURTR +C +C + SUBROUTINE GAULEG(X1,X2,X,W,N) +C BERECHNET DIE GAUSS+SCHEN BREITEN + IMPLICIT REAL (A-H,O-Z) + DIMENSION X(N),W(N) + PARAMETER (EPS=3.D-14) + M=(N+1)/2 + XM=0.5D0*(X2+X1) + XL=0.5D0*(X2-X1) + DO 12 I=1,M + Z=DCOS(3.141592654D0*(I-.25D0)/(N+.5D0)) +1 CONTINUE + P1=1.D0 + P2=0.D0 + DO 11 J=1,N + P3=P2 + P2=P1 + P1=((2.D0*J-1.D0)*Z*P2-(J-1.D0)*P3)/J +11 CONTINUE + PP=N*(Z*P1-P2)/(Z*Z-1.D0) + Z1=Z + Z=Z1-P1/PP + IF(ABS(Z-Z1).GT.EPS)GO TO 1 + X(I)=XM-XL*Z + X(N+1-I)=XM+XL*Z + W(I)=2.D0*XL/((1.D0-Z*Z)*PP*PP) + W(N+1-I)=W(I) +12 CONTINUE + RETURN + END SUBROUTINE GAULEG +C +C + SUBROUTINE PLGNFA(LL,X,Z) +C +C PLGNDN BERECHNET ALLE NORMIERTEN ASSOZIIERTEN +C LEGENDREFUNKTIONEN VON P00(X) BIS PLL(X) +C UND SCHREIBT SIE IN DAS FELD Z +C Die Polynome sind wie im ECMWF indiziert, d.h. +C P00,P10,P11,P20,P21,P22,... +C Ansonsten ist die Routine analog zu PLGNDN +C X IST DER COSINUS DES ZENITWINKELS ODER +C DER SINUS DER GEOGRAFISCHEN BREITE +C + IMPLICIT REAL (A-H,O-Z) + DIMENSION Z(0:((LL+3)*(LL+4))/2) +C + L=LL+2 + I=1 + Z(0)=1.D0 + FACT=1.D0 + POT=1.D0 + SOMX2=DSQRT(1.D0-X*X) + DO 14 J=0,L + DJ=DBLE(J) + IF(J.GT.0) THEN + FACT=FACT*(2.D0*DJ-1.D0)/(2.D0*DJ) + POT=POT*SOMX2 + Z(I)=DSQRT((2.D0*DJ+1.D0)*FACT)*POT + I=I+1 + ENDIF + IF(J.LT.L) THEN + Z(I)=X* + *DSQRT((4.D0*DJ*DJ+8.D0*DJ+3.D0)/(2.D0*DJ+1.D0))*Z(I-1) + I=I+1 + ENDIF + DK=DJ+2.D0 + DO 14 K=J+2,L + DDK=(DK*DK-DJ*DJ) + Z(I)=X*DSQRT((4.D0*DK*DK-1.D0)/DDK)*Z(I-1)- + * DSQRT(((2.D0*DK+1.D0)*(DK-DJ-1.D0)*(DK+DJ-1.D0))/ + * ((2.D0*DK-3.D0)*DDK))*Z(I-2) + DK=DK+1.D0 + I=I+1 +14 CONTINUE + RETURN + END SUBROUTINE PLGNFA + + + SUBROUTINE DPLGND(MNAUF,Z,DZ) +C +C DPLGND BERECHNET DIE ABLEITUNG DER NORMIERTEN ASSOZIIERTEN +C LEGENDREFUNKTIONEN VON P00(X) BIS PLL(X) +C UND SCHREIBT SIE IN DAS FELD DZ +C DIE REIHENFOLGE IST +C P00(X),P01(X),P11(X),P02(X),P12(X),P22(X),..PLL(X) +C + IMPLICIT REAL (A-H,O-Z) + DIMENSION Z(0:((MNAUF+3)*(MNAUF+4))/2) + DIMENSION DZ(0:((MNAUF+2)*(MNAUF+3))/2) +C + IF(Z(0).NE.1.D0) THEN + WRITE(*,*) 'DPLGND: Z(0) must be 1.0' + STOP + ENDIF + + LLP=0 + LLH=0 + DO 1 I=0,MNAUF+1 + DO 2 J=I,MNAUF+1 + IF(I.EQ.J) THEN + WURZELA= + *DSQRT(DBLE((J+1)*(J+1)-I*I)/DBLE(4*(J+1)*(J+1)-1)) + DZ(LLH)=DBLE(J)*WURZELA*Z(LLP+1) + ELSE + WURZELB= + *DSQRT(DBLE((J+1)*(J+1)-I*I)/DBLE(4*(J+1)*(J+1)-1)) + DZ(LLH)= + *DBLE(J)*WURZELB*Z(LLP+1)-DBLE(J+1)*WURZELA*Z(LLP-1) + WURZELA=WURZELB + ENDIF + LLH=LLH+1 + LLP=LLP+1 +2 CONTINUE + LLP=LLP+1 +1 CONTINUE + RETURN + END SUBROUTINE DPLGND + + +* Spectral Filter of Sardeshmukh and Hoskins (1984, MWR) +* MM=Spectral truncation of field +* MMAX= Spectral truncation of filter +* + SUBROUTINE SPFILTER(FELDMN,MM,MMAX) + + IMPLICIT NONE + + INTEGER MM,MMAX,I,J,K,L + REAL FELDMN(0:(MM+1)*(MM+2)-1) + REAL KMAX,SMAX,FAK + + SMAX=0.1 + KMAX=-ALOG(SMAX) + KMAX=KMAX/(float(MMAX)*float(MMAX+1))**2 +c WRITE(*,*)'alogsmax',alog(smax),'KMAX:',KMAX + l=0 + do i=0,MM + do j=i,MM +c write(*,*) i,j,feld(k),feld(k)*exp(-KMAX*(j*(j+1))**2) + if(j .le. MMAX) then +c fak=exp(-KMAX*(j*(j+1))**2) + fak=1.0 + feldmn(2*l)=feldmn(2*l)*fak + feldmn(2*l+1)=feldmn(2*l+1)*fak + else + feldmn(2*l)=0. + feldmn(2*l+1)=0. + endif + l=l+1 + enddo + enddo + END SUBROUTINE SPFILTER + + END MODULE PHTOGR + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/python/pythontest/TestInstallTar/flex_extract_v7.1/src/posnam.f b/python/pythontest/TestInstallTar/flex_extract_v7.1/src/posnam.f new file mode 100644 index 0000000000000000000000000000000000000000..c5d12d2b9928e581f67ef0c6388dd3e641693aed --- /dev/null +++ b/python/pythontest/TestInstallTar/flex_extract_v7.1/src/posnam.f @@ -0,0 +1,25 @@ + SUBROUTINE POSNAM(KULNAM,CDNAML) +!------------------------------------- + +!--- position in namelist file. +! author: Mats Hamrud, ECMWF + + INTEGER, INTENT(IN) :: KULNAM + CHARACTER*(*), INTENT(IN) :: CDNAML + CHARACTER*120 CLINE + CHARACTER*1 CLTEST + REWIND(KULNAM) + ILEN=LEN(CDNAML) + 102 CONTINUE + CLINE=' ' + READ(KULNAM,'(A)') CLINE + IND1=INDEX(CLINE,'&'//CDNAML) + IF(IND1.EQ.0) GO TO 102 + CLTEST=CLINE(IND1+ILEN+1:IND1+ILEN+1) + IF((LGE(CLTEST,'0').AND.LLE(CLTEST,'9')).OR. + & (LGE(CLTEST,'A').AND.LLE(CLTEST,'Z'))) GO TO 102 + BACKSPACE(KULNAM) + + RETURN + END SUBROUTINE POSNAM + diff --git a/python/pythontest/TestInstallTar/flex_extract_v7.1/src/preconvert.f90 b/python/pythontest/TestInstallTar/flex_extract_v7.1/src/preconvert.f90 new file mode 100644 index 0000000000000000000000000000000000000000..c28610f2aecb31a3f4c3860d7a482ae0fa85d23f --- /dev/null +++ b/python/pythontest/TestInstallTar/flex_extract_v7.1/src/preconvert.f90 @@ -0,0 +1,807 @@ + PROGRAM PRECONVERT +!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! +! ! +! PROGRAM PRECONVERT - PREPARES INPUT DATA FOR POP MODEL METEOR- ! +! OLOGICAL PREPROCESSOR ! +! ! +!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! +! ! +! CALCULATION OF ETAPOINT ON A REGULAR LAMDA/PHI GRID AND WRITING ! +! U,V,ETAPOINT,T,PS,Q,SD,MSL,TCC,10U, 10V, 2T,2D,LSP,CP,SSHF,SSR, ! +! EWSS,NSSS TO AN OUTPUT FILE (GRIB 1 or 2 FORMAT). ! +! ! +! AUTHORS: L. HAIMBERGER, G. WOTAWA, 1994-04 ! +! adapted: A. BECK ! +! 2003-05-11 ! +! L. Haimberger 2006-12 V2.0 ! +! modified to handle arbitrary regular grids ! +! and T799 resolution data ! +! L. Haimberger 2010-03 V4.0 ! +! modified to grib edition 2 fields ! +! and T1279 resolution data ! +! ! +!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! +! ! +! DESCRIPTION OF NEEDED INPUT: ! +! ! +! UNIT FILE PARAMETER(S) DATA REPRESENTATION ! +! ! +! 11 fort.11 T,U,V regular lamda phi grid ! +! 12 fort.12 D regular lamda phi grid ! +! 13 fort.13 LNSP fort.13 spherical harmonics ! +! 14 fort.14 SD,MSL,TCC,10U, ! +! 10V,2T,2D regular lamda phi grid ! +! 16 fort.16 LSP,CP,SSHF, ! +! SSR,EWSS,NSSS regular lamda phi grid ! +! 17 fort.17 Q regular lamda phi grid ! +! ! +!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! +! ! +! DESCRIPTION OF OUTPUT: ! +! ! +! UNIT FILE PARAMETER(S) DATA REPRESENTATION ! +! ! +! 15 fort.15 U,V,ETA,T,PS, ! +! Q,SD,MSL,TCC, ! +! 10U,10V,2T,2D, regular lamda phi grid ! +! LSP,CP,SSHF, ! +! SSR,EWSS,NSSS ! +! ! +!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! +! + + USE PHTOGR + USE GRTOPH + USE FTRAFO + USE RWGRIB2 + USE GRIB_API + + IMPLICIT NONE + + REAL, ALLOCATABLE, DIMENSION (:,:) :: LNPS + REAL, ALLOCATABLE, DIMENSION (:,:) :: Z + REAL, ALLOCATABLE, DIMENSION (:,:,:) :: T, UV , UV2 + REAL, ALLOCATABLE, DIMENSION (:,:,:) :: QA,OM,OMR + REAL, ALLOCATABLE, DIMENSION (:,:,:) :: DIV, ETA,ETAR + REAL, ALLOCATABLE, DIMENSION (:,:) :: DPSDL, DPSDM + REAL, ALLOCATABLE, DIMENSION (:,:,:) :: PS,DPSDT + REAL, ALLOCATABLE, DIMENSION (:,:,:) :: SURF,FLUX,OROLSM + REAL, ALLOCATABLE, DIMENSION (:) :: WSAVE,H,SINL,COSL,WSAVE2 + REAL, ALLOCATABLE, DIMENSION (:) :: BREITE, GBREITE,AK, BK,pv + +! Arrays for Gaussian grid calculations + + REAL :: X1,X2,RMS,MW,SIG,LAM + REAL,ALLOCATABLE :: CUA(:,:,:),CVA(:,:,:) + + REAL, ALLOCATABLE, DIMENSION (:,:) :: P,PP !,P2 + REAL, ALLOCATABLE, DIMENSION (:,:) :: XMN,HILFUV + REAL, ALLOCATABLE, DIMENSION (:) :: LNPMN,LNPMN2,LNPMN3 + REAL, ALLOCATABLE, DIMENSION (:) :: WEIGHT + REAL, ALLOCATABLE, DIMENSION (:,:) :: UGVG + REAL, ALLOCATABLE, DIMENSION (:,:) :: DG, ETAG + REAL, ALLOCATABLE, DIMENSION (:,:) :: GWSAVE + REAL, ALLOCATABLE, DIMENSION (:) :: PSG,HILF + +! end arrays for Gaussian grid calculations + + INTEGER, ALLOCATABLE, DIMENSION (:) :: MLAT,MPSURF,MPFLUX,MPORO,MPAR + INTEGER, ALLOCATABLE :: GIFAX(:,:) + + REAL PI,COSB,DAK,DBK,P00 + REAL URLAR8,JMIN1,LLLAR8,MAXBMIN1,PIR8,DCOSB + + INTEGER I,J,K,L,IERR,M,LTEST,MK,NGI,NGJ + INTEGER MFLUX,MSURF,MORO + INTEGER LUNIT,LUNIT2 + + INTEGER MAXL, MAXB, MLEVEL, LEVOUT,LEVMIN,LEVMAX + INTEGER MOMEGA,MOMEGADIFF,MGAUSS,MSMOOTH, MNAUF,META,METADIFF + INTEGER MDPDETA,METAPAR + REAL RLO0, RLO1, RLA0, RLA1 + CHARACTER*300 MLEVELIST + + INTEGER MAUF, MANF,IFAX(10) + + INTEGER IGRIB(1),iret,ogrib + + CHARACTER*80 FILENAME + + NAMELIST /NAMGEN/ & + MAXL, MAXB, & + MLEVEL,MLEVELIST,MNAUF,METAPAR, & + RLO0, RLO1, RLA0, RLA1, & + MOMEGA,MOMEGADIFF,MGAUSS,MSMOOTH,META,METADIFF,& + MDPDETA + + LTEST=1 + + call posnam (4,'NAMGEN') + read (4,NAMGEN) + + MAUF=INT(360.*(REAL(MAXL)-1.)/(RLO1-RLO0)+0.0001) +! PRINT*, MAUF + + MANF=INT(REAL(MAUF)/360.*(360.+RLO0)+1.0001) + IF(MANF .gt. MAUF) MANF=MANF-MAUF + + +!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! +! ALLOCATE VARIABLES ! +!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! + + ALLOCATE (LNPS(0:(MNAUF+1)*(MNAUF+2)-1,1)) + + ALLOCATE (H(0:(MNAUF+2)*(MNAUF+3)/2)) + + + ALLOCATE (OM(MAXL, MAXB, MLEVEL)) + + ALLOCATE (ETA(MAXL,MAXB,MLEVEL)) + + ALLOCATE (PS(MAXL, MAXB,1),DPSDT(MAXL, MAXB,1)) + + + ALLOCATE (WSAVE(4*MAUF+15),WSAVE2(4*MAUF+15)) + + ALLOCATE (BREITE(MAXB),AK(MLEVEL+1),BK(MLEVEL+1),pv(2*mlevel+2)) + + ALLOCATE (MPAR(2)) + + ALLOCATE (COSL(MAXL),SINL(MAXL)) + + ALLOCATE (CUA(2,4,MLEVEL),CVA(2,4,MLEVEL)) + +!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! +! GAUSS STUFF ! +!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! + + + IF(MGAUSS .EQ. 1) THEN + LUNIT=0 + FILENAME='fort.18' + + call grib_open_file(LUNIT, TRIM(FILENAME),'r') + + call grib_new_from_file(LUNIT,igrib(1), iret) + +! we can close the file + call grib_close_file(LUNIT) + +! call grib_get(igrib(1),'gridType', j) + + NGJ=MNAUF+1 + + ALLOCATE (GWSAVE(8*NGJ+15,NGJ/2)) + ALLOCATE(GIFAX(10,NGJ)) + ALLOCATE (GBREITE(NGJ),WEIGHT(NGJ)) + ALLOCATE (MLAT(NGJ)) + ALLOCATE (P(0:((MNAUF+3)*(MNAUF+4))/2,NGJ/2)) + ALLOCATE (PP(NGJ/2,0:((MNAUF+3)*(MNAUF+4))/2)) + ALLOCATE (Z(0:((MNAUF+3)*(MNAUF+4))/2,MAXB)) + + call grib_get(igrib(1),'numberOfPointsAlongAMeridian', NGJ) + + ! get as a integer + call grib_get(igrib(1),'pl', MLAT) + + NGI=SUM(MLAT) + + call grib_get(igrib(1),'numberOfVerticalCoordinateValues',mk) + + IF(mk/2-1 .ne. MLEVEL) THEN + WRITE(*,*) 'FATAL: Number of model levels',mk, & + ' does not agree with', MLEVEL,' in namelist' + STOP + ENDIF + call grib_get(igrib(1),'pv',pv) + AK=pv(1:1+MLEVEL) + BK=pv(2+MLEVEL:2*MLEVEL+2) + + ALLOCATE (LNPMN(0:(MNAUF+1)*(MNAUF+2)-1)) + ALLOCATE (LNPMN2(0:(MNAUF+1)*(MNAUF+2)-1)) + ALLOCATE (UGVG(NGI, 2*MLEVEL),HILFUV(2*MAXL,2)) + + + ALLOCATE (DPSDL(NGI,1),DPSDM(NGI,1)) + + ALLOCATE (PSG(NGI),HILF(NGI)) + ALLOCATE (UV(MAXL, MAXB, 2*MLEVEL)) +! ALLOCATE (UV2(MAXL, MAXB, 2*MLEVEL)) + + ALLOCATE (XMN(0:(MNAUF+1)*(MNAUF+2)-1, 2*MLEVEL)) + ALLOCATE (DG(NGI,MLEVEL),ETAG(NGI,MLEVEL)) + +! Initialisieren Legendretransformation +! auf das LaT/LON Gitter + + PI=ACOS(-1.D0) +!$OMP PARALLEL DO + DO 20 J=1,MAXB + + BREITE(J)=SIN((RLA1-(J-1.D0)*(RLA1-RLA0)/(MAXB-1))* PI/180.D0) + + CALL PLGNFA(MNAUF,BREITE(J),Z(0,J)) + +20 CONTINUE +!$OMP END PARALLEL DO + +! Avoid possible Pole problem +! IF(RLA0 .EQ. -90.0) BREITE(MAXB)=sin(-89.99*PI/180.d0) +! IF(RLA1 .EQ. 90.0) BREITE(1)=sin(89.99*PI/180.d0) + +! Initialisation of fields for FFT and Legendre transformation +! to Gaussian grid and back to phase space + X1=-1.D0 + X2=1.D0 + CALL GAULEG(X1,X2,GBREITE,WEIGHT,NGJ) + +!$OMP PARALLEL DO PRIVATE(M) + DO J=1,NGJ/2 + CALL PLGNFA(MNAUF,GBREITE(J),P(:,J)) + DO M=0,(MNAUF+3)*(MNAUF+4)/2 + PP(J,M)=P(M,J) + ENDDO + ENDDO +!$OMP END PARALLEL DO + + +! MPAR(1)=152 + FILENAME='fort.12' + CALL READSPECTRAL(FILENAME,LNPMN,MNAUF,1,MLEVEL,(/152/),AK,BK) +! goto 111 + CALL SET99(WSAVE,IFAX,mauf) + CALL PHGCUT(LNPMN,PS,WSAVE,IFAX,Z, & + MNAUF,MNAUF,MAUF,MANF,MAXL,MAXB,1) + CALL STATIS(MAXL,MAXB,1,EXP(PS),RMS,MW,SIG) + WRITE(*,'(A12,3F12.4)') 'STATISTICS: ',RMS,MW,SIG + + DO J=1,NGJ/2 + CALL SET99(GWSAVE(1,J),GIFAX(1,J),MLAT(J)) + ENDDO + CALL PHGR213(LNPMN,HILF,GWSAVE,GIFAX,P,MLAT,MNAUF,NGI,NGJ,1) + PSG=HILF + CALL GRPH213(LNPMN2,PSG,GWSAVE,GIFAX,PP,WEIGHT,MLAT, & + MNAUF,NGI,NGJ,1) + CALL PHGR213(LNPMN2,HILF,GWSAVE,GIFAX,P,MLAT,MNAUF,NGI,NGJ,1) + + + HILF=exp(PSG)-exp(HILF) + + CALL STATIS(NGI,1,1,HILF,RMS,MW,SIG) + WRITE(*,'(A12,3F11.4)') 'STATISTICS: ',RMS,MW,SIG + + PSG=EXP(PSG) + HILF=PSG + CALL STATIS(NGI,1,1,HILF,RMS,MW,SIG) + WRITE(*,'(A12,3F11.4)') 'STATISTICS: ',RMS,MW,SIG + + 111 FILENAME='fort.10' + CALL READSPECTRAL(FILENAME, & + XMN,MNAUF,2*MLEVEL,MLEVEL,(/131,132/),AK,BK) +! Transformieren des Windes auf das Gaussgitter + CALL PHGR213(XMN,UGVG,GWSAVE,GIFAX,P,MLAT,MNAUF,NGI,NGJ,2*MLEVEL) + DO K=1,MLEVEL +! North Pole + CALL JSPPOLE(XMN(:,K),1,MNAUF,.TRUE.,CUA(:,:,K)) + CALL JSPPOLE(XMN(:,MLEVEL+K),1,MNAUF,.TRUE.,CVA(:,:,K)) +! South Pole + CALL JSPPOLE(XMN(:,K),-1,MNAUF,.TRUE.,CUA(:,3:4,K)) + CALL JSPPOLE(XMN(:,MLEVEL+K),-1,MNAUF,.TRUE.,CVA(:,3:4,K)) + ENDDO + + DO K=1,2*MLEVEL + IF(MSMOOTH .ne. 0) CALL SPFILTER(XMN(:,K),MNAUF,MSMOOTH) + ENDDO + CALL PHGCUT(XMN,UV,WSAVE,IFAX,Z, & + MNAUF,MNAUF,MAUF,MANF,MAXL,MAXB,2*MLEVEL) + + + 112 FILENAME='fort.13' + CALL READSPECTRAL(FILENAME,XMN,MNAUF,MLEVEL,MLEVEL,(/155/),AK,BK) +! Transformieren der horizontalen Divergenz auf das Gaussgitter + CALL PHGR213(XMN,DG,GWSAVE,GIFAX,P,MLAT,MNAUF,NGI,NGJ,MLEVEL) + + +! Berechnung des Gradienten des Logarithmus des Bodendrucks +! auf dem Gaussgitter + CALL PHGRAD(LNPMN,DPSDL,DPSDM,GWSAVE,GIFAX,P,H,MLAT,MNAUF,NGI,NGJ,1) + +! Berechnung der Vertikalgeschwindigkeit auf dem Gaussgitter + CALL CONTGL(HILF,DPSDL,DPSDM,DG,UGVG(:,1),UGVG(:,MLEVEL+1), & + GBREITE,ETAG,MLAT,AK,BK,NGI,NGJ,MLEVEL) + + + CALL GRPH213(XMN,ETAG,GWSAVE,GIFAX,PP,WEIGHT,MLAT, & + MNAUF,NGI,NGJ,MLEVEL) + DO K=1,MLEVEL + IF(MSMOOTH .ne. 0) CALL SPFILTER(XMN(:,K),MNAUF,MSMOOTH) + ENDDO + CALL PHGCUT(XMN,ETA,WSAVE,IFAX,Z,MNAUF,MNAUF,MAUF,MANF,MAXL,MAXB,MLEVEL) + + CALL GRPH213(XMN,HILF,GWSAVE,GIFAX,PP,WEIGHT,MLAT, MNAUF,NGI,NGJ,1) + + IF(MSMOOTH .ne. 0) CALL SPFILTER(XMN(:,1),MNAUF,MSMOOTH) + CALL PHGCUT(XMN,DPSDT,WSAVE,IFAX,Z,MNAUF,MNAUF,MAUF,MANF,MAXL,MAXB,1) +! GOTO 114 + + CALL STATIS(MAXL,MAXB,1,DPSDT,RMS,MW,SIG) + WRITE(*,'(A12,3F11.4)') 'STATISTICS DPSDT: ',RMS,MW,SIG + + IF(MOMEGADIFF .ne. 0) THEN +! Berechnung von Omega auf dem Gaussgitter + CALL OMEGA(PSG,DPSDL,DPSDM,DG,UGVG(:,1),UGVG(:,MLEVEL+1), & + GBREITE,ETAG,MLAT,AK,BK,NGI ,NGJ,MLEVEL) + + CALL GRPH213(XMN,ETAG,GWSAVE,GIFAX,PP,WEIGHT,MLAT,& + MNAUF,NGI,NGJ,MLEVEL) + DO K=1,MLEVEL + IF(MSMOOTH .ne. 0) CALL SPFILTER(XMN(:,K),MNAUF,MSMOOTH) + ENDDO + CALL PHGCUT(XMN,OM,WSAVE,IFAX,Z,MNAUF,MNAUF,MAUF,MANF,MAXL,MAXB,MLEVEL) + + ENDIF !MOMEGA + + CALL GRPH213(XMN,PSG,GWSAVE,GIFAX,PP,WEIGHT,MLAT,MNAUF,NGI,NGJ,1) + CALL PHGCUT(XMN,PS,WSAVE,IFAX,Z,MNAUF,MNAUF,MAUF,MANF,MAXL,MAXB,1) + + CALL STATIS(MAXL,MAXB,1,PS,RMS,MW,SIG) + WRITE(*,'(A12,3F11.4)') 'STATISTICS: ',RMS,MW,SIG + + 114 DEALLOCATE(HILF,PSG,DPSDL,DPSDM,ETAG,DG,LNPMN) + +! ALLOCATE (UV(MAXL, MAXB, 2*MLEVEL)) +! CALL GRPH213(XMN,UGVG,GWSAVE,GIFAX,PP,WEIGHT,MLAT, +! *MNAUF,NGI,NGJ,2*MLEVEL) +! DO K=1,2*MLEVEL +! IF(MSMOOTH .ne. 0) CALL SPFILTER(XMN(:,K),MNAUF,MSMOOTH) +! ENDDO +! CALL PHGCUT(XMN,UV,WSAVE,IFAX,Z, +! *MNAUF,MNAUF,MAUF,MANF,MAXL,MAXB,2*MLEVEL) + DEALLOCATE(PP,P,UGVG,MLAT,GBREITE,WEIGHT,GWSAVE,XMN) + +! CALL ETAGAUSS(Z,WSAVE +! *,BREITE,UV,ETA,OM,PS, +! *MAUF,MAXB,MAXL,MANF,MNAUF,MLEVEL,MSMOOTH) + + ELSE + +!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! +! READING OF PREPARED METEOROLOGICAL FIELDS ! +! ! +! THE FOLLOWING FIELDS ARE EXPECTED: ! +! ! +! UNIT 11: T,U,V (REGULAR GRID) ! +! UNIT 17: Q (REGULAR GRID) ! +! UNIT 13: D (REGULAR GRID) ! +! UNIT 12: LNSP (SPHERICAL HARMONICS) ! +! UNIT 14: SURFACE DATA (REGULAR GRID) ! +! UNIT 16: FLUX DATA (REGULAR GRID) ! +!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! +! + ALLOCATE (MLAT(MAXB)) + MLAT=MAXL + ALLOCATE (Z(0:((MNAUF+3)*(MNAUF+4))/2,1)) + ALLOCATE (DPSDL(MAXL,MAXB),DPSDM(MAXL,MAXB)) + ALLOCATE (UV(MAXL, MAXB, 2*MLEVEL),DIV(MAXL,MAXB,MLEVEL)) + +!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! +! READING OF SURFACE PRESSURE ! +!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! + + FILENAME='fort.12' + CALL READSPECTRAL(FILENAME,LNPS,MNAUF,1,MLEVEL,(/152/),AK,BK) + +!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! +! READING OF U,V ! +!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! +! +! OPENING OF UNBLOCKED GRIB FILE +! + FILENAME='fort.10' + CALL READLATLON(FILENAME,UV,MAXL,MAXB,2*MLEVEL,(/131,132/)) + + + PI=ACOS(-1.D0) + DO J=1,MAXB + + BREITE(J)=SIN((RLA1-(J-1.D0)*(RLA1-RLA0)/(MAXB-1))*PI/180.D0) + + ENDDO +! Avoid possible Pole problem +! IF(RLA0 .EQ. -90.0) BREITE(MAXB)=sin(-89.99*PI/180.d0) +! IF(RLA1 .EQ. 90.0) BREITE(1)=sin(89.99*PI/180.d0) + + DO K=1,2*MLEVEL + DO J=1,MAXB + COSB=SQRT(1.0-(BREITE(J))*(BREITE(J))) + IF(RLA0 .EQ. -90.0 .AND. J .EQ. MAXB .OR. & + RLA1 .EQ. 90.0 .AND. J .EQ. 1) then + UV(:,J,K)=UV(:,J,K)/1.D6 + else + UV(:,J,K)=UV(:,J,K)*COSB + endif + ENDDO + ENDDO + +!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! +! READING OF LNSP on grid ! +!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! + +! For debugging only +! FILENAME='LNSPG_G.20060330.600' +! INQUIRE(FILE=FILENAME,EXIST=EX) +! CALL READLATLON(FILENAME,QA, +! *MAXL,MAXB,1,1,(/152/)) + +!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! +! READING OF DIVERGENCE ! +!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! + + IF(META .EQ. 0 .OR. METADIFF .EQ. 1) THEN + FILENAME='fort.13' + CALL READLATLON(FILENAME,DIV,MAXL,MAXB,MLEVEL,(/155/)) + ENDIF + + +!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! +! CALCULATION OF ETAPOINT --> TOTAL TIME DERIVATIVE OF ! +! ECMWF VERTICAL COORDINATE ETA MULTIPLIED BY DERIVATIVE ! +! OF PRESSURE IN ETA DIRECTION ! +!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! + +! Initialisieren Legendretransformation +! auf das LaT/LON Gitter +! Without Gaussian grid calculation Legendre Polynomials are calculated +! only for one latitude to save space + + DO J=1,MAXB + + CALL PLGNFA(MNAUF,BREITE(J),Z(0,1)) + + CALL PHGCUT(LNPS,PS(:,J,1),WSAVE,IFAX,Z,MNAUF,MNAUF,MAUF,MANF,MAXL,1,1) + + + IF(META .EQ. 0 .or. METADIFF .EQ. 1 ) THEN + CALL PHGRACUT(LNPS,DPSDL(:,J),DPSDM(:,J),WSAVE,IFAX,Z,H,MAUF, & + MNAUF,MAXL,1,MANF,1) + ENDIF + ENDDO + + PS=EXP(PS) + +! For debugging only + CALL STATIS(MAXL,MAXB,1,PS(:,:,1),RMS,MW,SIG) + WRITE(*,'(A12,3F11.4)') 'STATISTICS: ',RMS,MW,SIG + + + IF(MOMEGADIFF .ne. 0) THEN + + CALL OMEGA(PS,DPSDL,DPSDM,DIV,UV(:,:,1),UV(:,:,MLEVEL+1), & + BREITE,OM,MLAT,AK,BK,MAXL*MAXB,MAXB,MLEVEL) + ENDIF + + IF(META .EQ. 0 .OR. METADIFF .ne. 0) THEN + DPSDT=PS + CALL CONTGL(DPSDT,DPSDL,DPSDM,DIV,UV(:,:,1),UV(:,:,MLEVEL+1), & + BREITE,ETA,MLAT,AK,BK,MAXL*MAXB,MAXB,MLEVEL) + ENDIF + + ENDIF ! MGAUSS + +! CREATE FILE VERTICAL.EC NEEDED BY POP MODEL + + open(21,file='VERTICAL.EC') + write(21,'(a)') + write(21,'(a)') 'VERTICAL DISCRETIZATION OF POP MODEL' + write(21,'(a)') + write(21,'(i3,a)') MLEVEL,' number of layers' + write(21,'(a)') + write(21,'(a)') '* A(NLEV+1)' + write(21,'(a)') + do 205 i=1,MLEVEL+1 +205 write(21,'(f18.12)') AK(I) + write(21,'(a)') + write(21,'(a)') '* B(NLEV+1)' + write(21,'(a)') + do 210 i=1,MLEVEL+1 +210 write(21,'(f18.12)') BK(I) + close(21) + +!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! +! READING OF OMEGA ! +!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! + + IF(MOMEGA .NE. 0 ) THEN + + + + ALLOCATE (OMR(MAXL, MAXB, MLEVEL)) + + FILENAME='fort.19' + CALL READLATLON(FILENAME,OMR,MAXL,MAXB,MLEVEL,(/135/)) + + IF(MOMEGADIFF .NE. 0 ) THEN + + DO K=1,MLEVEL + CALL STATIS(MAXL,MAXB,1,ETA(:,:,K),RMS,MW,SIG) + WRITE(*,'(A12,I3,3F11.4)') ' ETA: ',K,RMS,MW,SIG + CALL STATIS(MAXL,MAXB,1,OMR(:,:,K),RMS,MW,SIG) + WRITE(*,'(A12,I3,3F11.4)') ' OMEGA: ',K,RMS,MW,SIG + CALL STATIS(MAXL,MAXB,1,OM(:,:,K)-OMR(:,:,K),RMS,MW,SIG) + WRITE(*,'(A12,I3,3F11.4)') 'OMEGA DIFF: ',K,RMS,MW,SIG + ENDDO + + ENDIF + ENDIF + +!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! +! READING OF ETA ! +!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! + + IF(META .NE. 0 ) THEN + + ALLOCATE (ETAR(MAXL, MAXB, MLEVEL)) + + P00=101325. + FILENAME='fort.21' + CALL READLATLON(FILENAME,ETAR,MAXL,MAXB,MLEVEL,(/77/)) + + if(MDPDETA .EQ. 1) THEN + DO K=1,MLEVEL + DAK=AK(K+1)-AK(K) + DBK=BK(K+1)-BK(K) + DO J=1,MAXB + DO I=1,MAXL + ETAR(I,J,K)=2*ETAR(I,J,K)*PS(I,J,1)*(DAK/PS(I,J,1)+DBK)/ & + (DAK/P00+DBK) + IF(K .GT. 1) ETAR(I,J,K)=ETAR(I,J,K)-ETAR(I,J,K-1) + ENDDO + ENDDO + ENDDO + ENDIF + + IF(METADIFF .NE. 0 ) THEN + + DO K=1,MLEVEL + CALL STATIS(MAXL,MAXB,1,ETA(:,:,K),RMS,MW,SIG) + WRITE(*,'(A12,I3,3F11.4)') ' ETA: ',K,RMS,MW,SIG + CALL STATIS(MAXL,MAXB,1,ETAR(:,:,K),RMS,MW,SIG) + WRITE(*,'(A12,I3,3F11.4)') ' ETAR: ',K,RMS,MW,SIG + CALL STATIS(MAXL,MAXB,1,ETA(:,:,K)-ETAR(:,:,K),RMS,MW,SIG) + WRITE(*,'(A12,I3,3F11.4)') 'ETA DIFF: ',K,RMS,MW,SIG + ENDDO + DO K=1,MLEVEL + WRITE(*,'(I3,2F11.4)') K,ETA(1,MAXB/2,K),ETAR(1,MAXB/2,K) + ENDDO + ELSE + ETA=ETAR + ENDIF + ENDIF + + ALLOCATE (T(MAXL, MAXB, MLEVEL)) + ALLOCATE (QA(MAXL, MAXB, MLEVEL)) +!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! +! READING OF T ! +!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! +! +! OPENING OF UNBLOCKED GRIB FILE +! + FILENAME='fort.11' + CALL READLATLON(FILENAME,T,MAXL,MAXB,MLEVEL,(/130/)) + +!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! +! READING OF SPECIFIC HUMIDITY ! +!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! + + FILENAME='fort.17' + CALL READLATLON(FILENAME,QA,MAXL,MAXB,MLEVEL,(/133/)) + +!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! +! TEST READING OF UV from MARS (debug only) ! +!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! + +! FILENAME='fort.22' +! CALL READLATLON(FILENAME,UV2,MAXL,MAXB,2*MLEVEL,2,(/131,132/)) + + + +!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! +! WRITE MODEL LEVEL DATA TO fort.15 ! +!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! + +! Calculation of etadot in CONTGL needed scaled winds (ucosphi,vcosphi) +! Now we are transforming back to the usual winds. + DO K=1,MLEVEL + DO J=2,MAXB-1 + COSB=SQRT(1.0-(BREITE(J))*(BREITE(J))) + UV(:,J,K)=UV(:,J,K)/COSB + UV(:,J,MLEVEL+K)=UV(:,J,MLEVEL+K)/COSB + ENDDO +! special treatment for poles, if necessary. + DO J=1,MAXB,MAXB-1 + COSB=SQRT(1.0-(BREITE(J))*(BREITE(J))) + if(1.0-BREITE(J)*BREITE(J) .gt. 0 .OR. MGAUSS .NE. 1) then + IF(RLA0 .EQ. -90.0 .AND. J .EQ. MAXB .OR. & + RLA1 .EQ. 90.0 .AND. J .EQ. 1) then + UV(:,J,K)=UV(:,J,K)*1.D6 + UV(:,J,MLEVEL+K)=UV(:,J,MLEVEL+K)*1.D6 + else + UV(:,J,K)=UV(:,J,K)/COSB + UV(:,J,MLEVEL+K)=UV(:,J,MLEVEL+K)/COSB + endif + else + HILFUV(5:MAXL,:)=0. + HILFUV(1:2,:)=0. + IF(J.EQ.MAXB) THEN +! Suedpol + HILFUV(3:4,1)=CUA(:,4,K) + HILFUV(3:4,2)=CVA(:,4,K) + ELSE +! Nordpol + HILFUV(3:4,1)=CUA(:,2,K) + HILFUV(3:4,2)=CVA(:,2,K) + ENDIF + CALL RFOURTR(HILFUV(:,1),WSAVE,IFAX,MAXL/2-1,MAXL,-1) + DO I=0,MAXL-1 + IF(MANF+I.LE.MAXL) THEN + UV(I+1,J,K)=HILFUV(MANF+I,1) + ELSE + UV(I+1,J,K)=HILFUV(MANF-MAXL+I,1) + ENDIF + ENDDO + CALL RFOURTR(HILFUV(:,2),WSAVE,IFAX,MAXL/2-1,MAXL,-1) + DO I=0,MAXL-1 + IF(MANF+I.LE.MAXL) THEN + UV(I+1,J,MLEVEL+K)=HILFUV(MANF+I,2) + ELSE + UV(I+1,J,MLEVEL+K)=HILFUV(MANF-MAXL+I,2) + ENDIF + ENDDO + endif + ENDDO + ENDDO + +! open output file + call grib_open_file(LUNIT,'fort.15','w') + +! we use temperature on lat/lon on model levels as template for model level data + LUNIT2=0 + call grib_open_file(LUNIT2,'fort.11','r') + call grib_new_from_file(LUNIT2,igrib(1), iret) + call grib_close_file(LUNIT2) + + + CALL WRITELATLON(LUNIT,igrib(1),ogrib,UV(:,:,1),MAXL,MAXB,MLEVEL,MLEVELIST,1,(/131/)) + + CALL WRITELATLON(LUNIT,igrib(1),ogrib,UV(:,:,MLEVEL+1),MAXL,MAXB,MLEVEL,MLEVELIST,1,(/132/)) + + IF(MDPDETA .ne. 1 .AND. MGAUSS .EQ. 0 .and. META .eq. 1) THEN + CALL WRITELATLON(LUNIT,igrib(1),ogrib,ETA,MAXL,MAXB,MLEVEL,MLEVELIST,1,(/77/)) + ELSE + CALL WRITELATLON(LUNIT,igrib(1),ogrib,ETA,MAXL,MAXB,MLEVEL,MLEVELIST,1,(/METAPAR/)) + ENDIF + + CALL WRITELATLON(LUNIT,igrib(1),ogrib,T,MAXL,MAXB,MLEVEL,MLEVELIST,1,(/130/)) + + CALL WRITELATLON(LUNIT,igrib(1),ogrib,PS,MAXL,MAXB,1,'1',1,(/134/)) + + call grib_set(igrib(1),"levelType","ml") + call grib_set(igrib(1),"typeOfLevel","hybrid") + CALL WRITELATLON(LUNIT,igrib(1),ogrib,QA,MAXL,MAXB,MLEVEL,MLEVELIST,1,(/133/)) + + + IF(MOMEGA .EQ. 1) THEN + call grib_open_file(LUNIT2,'fort.25','w') + CALL WRITELATLON(LUNIT2,igrib(1),ogrib,OMR,MAXL,MAXB,MLEVEL,MLEVELIST,1,(/135/)) + + IF(MOMEGADIFF .EQ. 1) THEN + + CALL WRITELATLON(LUNIT2,igrib(1),ogrib,DPSDT,MAXL,MAXB,1,'1',1,(/158/)) + + OM=OM-OMR + CALL WRITELATLON(LUNIT2,igrib(1),ogrib,OM,MAXL,MAXB,MLEVEL,MLEVELIST,1,(/001/)) + call grib_close_file(LUNIT2) + ENDIF + ENDIF + + IF(META .EQ. 1 .and. METADIFF .EQ. 1) THEN + call grib_open_file(LUNIT2,'fort.26','w') + CALL WRITELATLON(LUNIT2,igrib(1),ogrib,ETAR,MAXL,MAXB,MLEVEL,MLEVELIST,1,(/135/)) + +! IF(MOMEGADIFF .EQ. 1) THEN + + CALL WRITELATLON(LUNIT2,igrib(1),ogrib,DPSDT,MAXL,MAXB,1,'1',1,(/158/)) + + OM=ETA-ETAR + CALL WRITELATLON(LUNIT2,igrib(1),ogrib,OM,MAXL,MAXB,MLEVEL,MLEVELIST,1,(/001/)) + call grib_close_file(LUNIT2) +! ENDIF + ENDIF + + + call grib_close_file(LUNIT) + + + + 2000 STOP 'SUCCESSFULLY FINISHED CONVERT_PRE: CONGRATULATIONS' + 3000 STOP 'ROUTINE CONVERT_PRE: ERROR' + 9999 stop 'ROUTINE CONVERT_PRE: ERROR' + END + + + +!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! + INTEGER FUNCTION IA (FIELD1,NI,NJ,NK,G) + + IMPLICIT NONE + INTEGER NI,NJ,NK,I,J,K + REAL FIELD1(NI,NJ,NK) + REAL G + REAL RMIN,RMAX,XMAX,A,A1,A2 + + RMAX=FIELD1(1,1,1) + RMIN=FIELD1(1,1,1) + + DO 100 K=1,NK + DO 100 J=1,NJ + DO 100 I=1,NI + IF (FIELD1(I,J,K).GT.RMAX)RMAX=FIELD1(I,J,K) + IF (FIELD1(I,J,K).LT.RMIN)RMIN=FIELD1(I,J,K) +100 CONTINUE + + IF (ABS(RMIN).GT.RMAX.OR.ABS(RMIN).EQ.RMAX) THEN + XMAX=ABS(RMIN) + ELSE + XMAX=RMAX + ENDIF + + IF (XMAX.EQ.0) THEN + IA = 0 + RETURN + ENDIF + + A1=LOG10 ((G/10.d0)/XMAX) + A2=LOG10 ( G/XMAX ) + IF(A1 .gt. A2) THEN + A=A2 + ELSE + A=A1 + ENDIF + + IF (A.GT.0) IA=INT(A) + IF (A.LT.0) IA=INT(A-1.0) + + RETURN + END + + SUBROUTINE STATIS (NI,NJ,NK,PHI,RMS,MW,SIG) + IMPLICIT REAL (A-H,O-Z) + + REAL PHI(NI,NJ,NK),SIG,MW,RMS,P + + N=NI*NJ*NK + + RMS=0. + MW=0. + + DO 10 I=1,NI + DO 10 J=1,NJ + DO 10 K=1,NK + P=PHI(I,J,K) + RMS=RMS+P*P + MW=MW+P +10 CONTINUE + + RMS=SQRT(RMS/N) + MW=MW/N + + IF(RMS*RMS-MW*MW.LT.0.) THEN + SIG=0.0 + ELSE + SIG=SQRT(RMS*RMS-MW*MW) + ENDIF + + RETURN + END + diff --git a/python/pythontest/TestInstallTar/flex_extract_v7.1/src/rwGRIB2.f90 b/python/pythontest/TestInstallTar/flex_extract_v7.1/src/rwGRIB2.f90 new file mode 100644 index 0000000000000000000000000000000000000000..09ec94e305507d30e37c43009d306ffd343cbe05 --- /dev/null +++ b/python/pythontest/TestInstallTar/flex_extract_v7.1/src/rwGRIB2.f90 @@ -0,0 +1,263 @@ + MODULE RWGRIB2 + + CONTAINS + + SUBROUTINE READLATLON(FILENAME,FELD,MAXL,MAXB,MLEVEL,MPAR) + + USE GRIB_API + + IMPLICIT NONE + + integer :: ifile + integer :: iret + integer :: n,mk,parid,nm + integer :: i,k + integer,dimension(:),allocatable :: igrib + integer :: numberOfPointsAlongAParallel + integer :: numberOfPointsAlongAMeridian + real, dimension(:), allocatable :: values + integer :: numberOfValues + real,dimension(maxl,maxb,mlevel) :: feld + integer:: maxl,maxb,mlevel,mstride,mpar(:),irest,div,level + integer :: l(size(mpar)) + character*(*):: filename + + call grib_open_file(ifile, TRIM(FILENAME),'r') + + ! count the messages in the file + call grib_count_in_file(ifile,n) + allocate(igrib(n)) + igrib=-1 + + ! Load the messages from the file. + DO i=1,n + call grib_new_from_file(ifile,igrib(i), iret) + END DO + + ! we can close the file + call grib_close_file(ifile) + + nm=size(mpar) + div=mlevel/nm + l=0 + + ! Loop on all the messages in memory + iloop: DO i=1,n +! write(*,*) 'processing message number ',i + ! get as a integer + call grib_get(igrib(i),'numberOfPointsAlongAParallel', & + numberOfPointsAlongAParallel) + + ! get as a integer + call grib_get(igrib(i),'numberOfPointsAlongAMeridian', & + numberOfPointsAlongAMeridian) + + call grib_get(igrib(i),'numberOfVerticalCoordinateValues',mk) + + call grib_get_size(igrib(i),'values',numberOfValues) +! write(*,*) 'numberOfValues=',numberOfValues + + allocate(values(numberOfValues), stat=iret) + ! get data values + call grib_get(igrib(i),'values',values) + + call grib_get(igrib(i),'paramId',parid) + call grib_get(igrib(i),'level',level) + + kloop: do k=1,nm + if(parid .eq. mpar(k)) then +! l(k)=l(k)+1 + feld(:,:,(k-1)*div+level)=reshape(values,(/maxl,maxb/)) +! print*,(k-1)*div+l(k),parid + exit kloop + endif + enddo kloop + if(k .gt. nm .and. parid .ne. mpar(nm)) then + write(*,*) k,nm,parid,mpar(nm) + write(*,*) 'ERROR readlatlon: parameter ',parid,'is not',mpar + stop + endif + +! print*,i + END DO iloop + write(*,*) 'readlatlon: ',i-1,' records read' + + DO i=1,n + call grib_release(igrib(i)) + END DO + + deallocate(values) + deallocate(igrib) + + END SUBROUTINE READLATLON + + SUBROUTINE WRITELATLON(iunit,igrib,ogrib,FELD,MAXL,MAXB,MLEVEL,& + MLEVELIST,MSTRIDE,MPAR) + + USE GRIB_API + + IMPLICIT NONE + + INTEGER IFIELD,MLEVEL,MNAUF,I,J,K,L,MSTRIDE,IERR,JOUT + INTEGER MPAR(MSTRIDE),MAXL,MAXB,LEVMIN,LEVMAX + INTEGER IUNIT,igrib,ogrib + REAL ZSEC4(MAXL*MAXB) + REAL FELD(MAXL,MAXB,MLEVEL) + CHARACTER*(*) MLEVELIST + INTEGER ILEVEL(MLEVEL),MLINDEX(MLEVEL+1),LLEN + + ! parse MLEVELIST + + LLEN=len(trim(MLEVELIST)) + if(index(MLEVELIST,'to') .ne. 0 .or. index(MLEVELIST,'TO') .ne. 0) THEN + i=index(MLEVELIST,'/') + read(MLEVELIST(1:i-1),*) LEVMIN + i=index(MLEVELIST,'/',.true.) + read(MLEVELIST(i+1:LLEN),*) LEVMAX + l=0 + do i=LEVMIN,LEVMAX + l=l+1 + ILEVEL(l)=i + enddo + else + l=1 + MLINDEX(1)=0 + do i=1,LLEN + if(MLEVELIST(i:i) .eq. '/') THEN + l=l+1 + MLINDEX(l)=i + endif + enddo + MLINDEX(l+1)=LLEN+1 + do i=1,l + read(MLEVELIST(MLINDEX(i)+1:MLINDEX(i+1)-1),*) ILEVEL(i) + enddo + endif + + DO k=1,l + call grib_set(igrib,"level",ILEVEL(k)) + DO j=1,MSTRIDE + call grib_set(igrib,"paramId",MPAR(j)) +! if(MPAR(j) .eq. 87) then +! call grib_set(igrib,"shortName","etadot") +! call grib_set(igrib,"units","Pa,s**-1") +! endif +! if(MPAR(j) .eq. 77) then +! call grib_set(igrib,"shortName","etadot") +! call grib_set(igrib,"units","s**-1") +! endif + if(l .ne. mlevel) then + zsec4(1:maxl*maxb)=RESHAPE(FELD(:,:,ILEVEL(k)),(/maxl*maxb/)) + else + zsec4(1:maxl*maxb)=RESHAPE(FELD(:,:,k),(/maxl*maxb/)) + endif + call grib_set(igrib,"values",zsec4) + + call grib_write(igrib,iunit) + + ENDDO + ENDDO + + + + END SUBROUTINE WRITELATLON + + SUBROUTINE READSPECTRAL(FILENAME,CXMN,MNAUF,MLEVEL,& + MAXLEV,MPAR,A,B) + + USE GRIB_API + + IMPLICIT NONE + + + integer :: ifile + integer :: iret + integer :: n,mk,div,nm,k + integer :: i,j,parid + integer,dimension(:),allocatable :: igrib + real, dimension(:), allocatable :: values + integer :: numberOfValues,maxlev + REAL :: A(MAXLEV+1),B(MAXLEV+1),pv(2*MAXLEV+2) + REAL:: CXMN(0:(MNAUF+1)*(MNAUF+2)-1,MLEVEL) +integer:: maxl,maxb,mlevel,mstride,mpar(:),mnauf,ioffset,ipar,ilev,l(size(mpar)) +character*(*):: filename + + call grib_open_file(ifile, TRIM(FILENAME),'r') + + ! count the messages in the file + call grib_count_in_file(ifile,n) + allocate(igrib(n)) + igrib=-1 + + ! Load the messages from the file. + DO i=1,n + call grib_new_from_file(ifile,igrib(i), iret) + END DO + + ! we can close the file + call grib_close_file(ifile) + + l=0 + ! Loop on all the messages in memory + iloop: DO i=1,n + ! write(*,*) 'processing message number ',i + ! get as a integer + call grib_get(igrib(i),'pentagonalResolutionParameterJ', j) + + call grib_get_size(igrib(i),'values',numberOfValues) + ! write(*,*) 'numberOfValues=',numberOfValues + + call grib_get(igrib(i),'numberOfVerticalCoordinateValues',mk) + + call grib_get(igrib(i),'level',ilev) + + + + call grib_get(igrib(i),'pv',pv) + + allocate(values(numberOfValues), stat=iret) + ! get data values + call grib_get(igrib(i),'values',values) + +! IOFFSET=mod(i-1,MSTRIDE)*(mk/2-1) +! CXMN(:,IOFFSET+ilev)=values(1:(MNAUF+1)*(MNAUF+2)) + + call grib_get(igrib(i),'paramId',parid) + nm=size(mpar) + div=mlevel/nm + kloop: do k=1,nm + if(parid .eq. mpar(k)) then + l(k)=l(k)+1 + cxmn(:,(k-1)*div+l(k))=values(1:(MNAUF+1)*(MNAUF+2)) +! print*,(k-1)*div+l(k),parid + exit kloop + endif + + enddo kloop + if(k .gt. nm .and. parid .ne. mpar(nm)) then + write(*,*) k,nm,parid,mpar(nm) + write(*,*) 'ERROR readspectral: parameter ',parid,'is not',mpar + stop + endif + +! print*,i + + END DO iloop + + write(*,*) 'readspectral: ',i-1,' records read' + + DO i=1,n + call grib_release(igrib(i)) + END DO + + deallocate(values) + deallocate(igrib) + + + + A=pv(1:1+MAXLEV) + B=pv(2+MAXLEV:2*MAXLEV+2) + + END SUBROUTINE READSPECTRAL + + END MODULE RWGRIB2 diff --git a/python/pythontest/TestInstallTar/test_untar/_templates/compilejob.temp b/python/pythontest/TestInstallTar/test_untar/_templates/compilejob.temp new file mode 100644 index 0000000000000000000000000000000000000000..715308b3d361e2b0f5c088b344ecde539d74243e --- /dev/null +++ b/python/pythontest/TestInstallTar/test_untar/_templates/compilejob.temp @@ -0,0 +1,77 @@ +#!/bin/ksh + +# ON ECGB: +# start with ecaccess-job-submit -queueName ecgb NAME_OF_THIS_FILE on gateway server +# start with sbatch NAME_OF_THIS_FILE directly on machine + +#SBATCH --workdir=/scratch/ms/spatlh00/lh0 +#SBATCH --qos=normal +#SBATCH --job-name=flex_ecmwf +#SBATCH --output=flex_ecmwf.%j.out +#SBATCH --error=flex_ecmwf.%j.out +#SBATCH --mail-type=FAIL +#SBATCH --time=12:00:00 + +## CRAY specific batch requests +##PBS -N flex_ecmwf +##PBS -q ns +##PBS -S /usr/bin/ksh +# -o /scratch/ms/no/sbc/flex_ecmwf.$Jobname.$Job_ID.out +# job output is in .ecaccess_DO_NOT_REMOVE +##PBS -j oe +##PBS -V +##PBS -l EC_threads_per_task=1 +##PBS -l EC_memory_per_task=3200MB + +set -x +export VERSION=7.1 +case $HOST in + *ecg*) + module load python + module unload grib_api + module unload emos + module load grib_api/1.14.5 + module load emos/437-r64 + export FLEXPART_ROOT_SCRIPTS= + export MAKEFILE=Makefile.gfortran + ;; + *cca*) + module switch PrgEnv-cray PrgEnv-intel + module load grib_api + module load emos + module load python + echo ${GROUP} + echo ${HOME} + echo $HOME | awk -F / '{print $1, $2, $3, $4}' + export GROUP=`echo $HOME | awk -F / '{print $4}'` + export SCRATCH=/scratch/ms/${GROUP}/${USER} + export FLEXPART_ROOT_SCRIPTS= + export MAKEFILE=Makefile.CRAY + ;; +esac + +mkdir -p $FLEXPART_ROOT_SCRIPTS/flex_extract_v$VERSION +cd $FLEXPART_ROOT_SCRIPTS/flex_extract_v$VERSION # if FLEXPART_ROOT is not set this means cd to the home directory +tar -xvf $HOME/flex_extract_v$VERSION.tar +cd src +\rm *.o *.mod CONVERT2 +make -f $MAKEFILE >flexcompile 2>flexcompile + +ls -l CONVERT2 >>flexcompile +if [ $? -eq 0 ]; then + echo 'SUCCESS!' >>flexcompile + mail -s flexcompile.$HOST.$$ $USER <flexcompile +else + echo Environment: >>flexcompile + env >> flexcompile + mail -s "ERROR! flexcompile.$HOST.$$" $USER <flexcompile +fi + + + + + + + + + diff --git a/python/pythontest/TestInstallTar/test_untar/_templates/ecmwf_grib1_table_128 b/python/pythontest/TestInstallTar/test_untar/_templates/ecmwf_grib1_table_128 new file mode 100644 index 0000000000000000000000000000000000000000..b14d7afb69fd6f687840d2c406698f56f3d696d8 --- /dev/null +++ b/python/pythontest/TestInstallTar/test_untar/_templates/ecmwf_grib1_table_128 @@ -0,0 +1,197 @@ +! +! ECMWFGRIB128.TBL -- GRIB 2 parameter conversion table version 128 +! +!ID# NAME UNITS GNAM SCALE MISSING +! +001 Stream function m**2 s**-1 STRF 0 -9999.00 +002 Velocity potential m**2 s**-1 VPOT 0 -9999.00 +003 Potential temperature K THTA 0 -9999.00 +004 Equivalent potential temperature K THTE 0 -9999.00 +005 Saturated eq. pot. temperature K STHE 0 -9999.00 +!006-010 Reserved for Metview +011 U component of divergent wind m s**-1 UDVW 0 -9999.00 +012 V component of divergent wind m s**-1 VDVW 0 -9999.00 +013 U component of rotational wind m s**-1 URTW 0 -9999.00 +014 V component of rotational wind m s**-1 VRTW 0 -9999.00 +!015-020 Reserved for Metview +021 Unbalanced component of temp. K UCTP 0 -9999.00 +022 Unbal. comp. of log surface pres ln(Pa) UCLN 0 -9999.00 +023 Unbal. comp. of divergence s**-1 UCDV 0 -9999.00 +024 Reserved for future unbal. comp. - X 0 -9999.00 +023 Reserved for future unbal. comp. - X 0 -9999.00 +026 Lake cover (0-1) fraction CL 0 -9999.00 +027 Low vegetation cover (0-1) fraction CVL 0 -9999.00 +028 High vegetation cover (0-1) fraction CVH 0 -9999.00 +029 Type of low vegetation type TVL 0 -9999.00 +030 Type of high vegetation type TVH 0 -9999.00 +031 Sea-ice cover (0-1) fraction CI 0 -9999.00 +032 Snow albedo (0-1) fraction ASN 0 -9999.00 +033 Snow density kg m**-3 RSN 0 -9999.00 +034 Sea surface temperature K SST 0 -9999.00 +035 Ice surface temperature layer 1 K ISTL1 0 -9999.00 +036 Ice surface temperature layer 2 K ISTL2 0 -9999.00 +037 Ice surface temperature layer 3 K ISTL3 0 -9999.00 +038 Ice surface temperature layer 4 K ISTL4 0 -9999.00 +039 Volumetric soil water layer 1 m**3 m**3 SWVL1 0 -9999.00 +040 Volumetric soil water layer 2 m**3 m**3 SWVL2 0 -9999.00 +041 Volumetric soil water layer 3 m**3 m**3 SWVL3 0 -9999.00 +042 Volumetric soil water layer 4 m**3 m**3 SWVL4 0 -9999.00 +043 Soil type type SLT 0 -9999.00 +044 Snow evaporation m (H2O) ES 0 -9999.00 +045 Snowmelt m (H2) SMLT 0 -9999.00 +046 Solar duration s SDUR 0 -9999.00 +047 Direct solar radiation W m**-2 DSRP 0 -9999.00 +048 Magnitude of surface stress N m**-2 s MAGSS 0 -9999.00 +049 10 metre wind gust m s**-1 10FG 0 -9999.00 +050 Large-scale precip. fraction s SLPF 0 -9999.00 +051 Maximum 2 metre temperature K MX2T24 0 -9999.00 +052 Minimum 2 metre temperature K MN2T24 0 -9999.00 +053 Montgomery potential m**2 s**-2 MONT 0 -9999.00 +054 Pressure Pa PRES 0 -9999.00 +055 Mean 2m temp.in past 24 hours K MN2T24 0 -9999.00 +056 Mean 2m dewpt. temp. in past 24h K MN2D24 0 -9999.00 +057 Downward UV radiation at sfc. W m**-2 s UVB 0 -9999.00 +058 Photo. active rad. at sfc. W m**-2 s PAR 0 -9999.00 +059 Convective available pot. energy J kg**-1 CAPE 0 -9999.00 +060 Potential vorticity K m**2 kg**-1 s**-1 PVOR 0 -9999.00 +061 Total precipitation from obs. mm*100+Nobs TPO 0 -9999.00 +062 Observation count count OBCT 0 -9999.00 +063 Start time for skin temp. diff. s TSDIFS 0 -9999.00 +064 Finish time for skin temp. diff. s TSDIFE 0 -9999.00 +065 Skin temperature difference K TSDIF 0 -9999.00 +!66 to 77 Unused +078 Total column liquid water kg m**-2 TCLW 0 -9999.00 +079 Total column ice water kg m**-2 TCIW 0 -9999.00 +!80 to 120 Experimental products (contents may vary) +!121 to 124 Unused +125 Vert. integrated tot. energy J m**-2 COLENR 0 -9999.00 +126 Param. for sensitive area pred. - SENPRM 0 -9999.00 +127 Atmospheric tide - AT 0 -9999.00 +128 Budget values - BV 0 -9999.00 +129 Geopotential m**2 s**-2 Z 0 -9999.00 +130 Temperature K T 0 -9999.00 +131 U velocity m s**-1 U 0 -9999.00 +132 V velocity m s**-1 V 0 -9999.00 +133 Specific humidity kg (H2O) kg**1 Q 0 -9999.00 +134 Surface pressure Pa SP -2 -9999.00 +135 Vertical velocity Pa s**-1 W -2 -9999.00 +136 Total column water kg m**-2 TCW 0 -9999.00 +137 Total column water vapour kg m**-2 TCWV 0 -9999.00 +138 Vorticity (relative) s**-1 VO 0 -9999.00 +139 Soil temperature level 1 K STL1 0 -9999.00 +140 Soil wetness level 1 m (H2O) SWL1 0 -9999.00 +141 Snow depth m (H2O) SD 0 -9999.00 +142 Large scale precipitation m LSP 0 -9999.00 +143 Convective precipitation m CP 0 -9999.00 +144 Snowfall (conv. + strat.) m (H2O) SF 0 -9999.00 +145 Boundary layer dissipation W m**-2 s BLD 0 -9999.00 +146 Surface sensible heat flux W m**-2 s SSHF 0 -9999.00 +147 Surface latent heat flux W m**-2 s SLHF 0 -9999.00 +148 Charnock - CHNK 0 -9999.00 +149 Surface net radiation W m**-2 s SNR 0 -9999.00 +150 Top net radiation W m**-2 s TNR 0 -9999.00 +151 Mean sea level pressure Pa MSLP 0 -9999.00 +152 Logarithm of surface pressure ln(Pa) LNSP 0 -9999.00 +153 Short-wave heating rate K SWHR 0 -9999.00 +154 Long-wave heating rate K LWHR 0 -9999.00 +155 Divergence s**-1 D 0 -9999.00 +156 Height m HGHT 0 -9999.00 +157 Relative humidity % RELH 0 -9999.00 +158 Tendency of surface pressure Pa s**-1 PTND -2 -9999.00 +159 Boundary layer height m ZPBL 0 -9999.00 +160 Standard deviation of orography m SDOR 0 -9999.00 +161 Anisotropy of sub-gridscale oro. - ISOR 0 -9999.00 +162 Angle of sub-gridscale orography rad ANOR 0 -9999.00 +163 Slope of sub-gridscale orography - SLOR 0 -9999.00 +164 Total cloud cover (0-1) fraction TCC 0 -9999.00 +165 10 metre U wind component m s**-1 10U 0 -9999.00 +166 10 metre V wind component m s**-1 10V 0 -9999.00 +167 2 metre temperature K 2T 0 -9999.00 +168 2 metre dewpoint temperature K 2D 0 -9999.00 +169 Surface solar radiation downwards W m**-2 s SSRD 0 -9999.00 +170 Soil temperature level 2 K STL2 0 -9999.00 +171 Soil wetness level 2 m (H2O) SWL2 0 -9999.00 +172 Land-sea mask (0,1) logical LSM 0 -9999.00 +173 Surface roughness m SR 0 -9999.00 +174 Albedo (0-1) fraction ALBD 0 -9999.00 +175 Surface thermal radiation down W m**-2 s STRD 0 -9999.00 +176 Surface solar radiation W m**-2 s SSR 0 -9999.00 +177 Surface thermal radiation W m**-2 s STR 0 -9999.00 +178 Top solar radiation W m**-2 s TSR 0 -9999.00 +179 Top thermal radiation W m**-2 s TTR 0 -9999.00 +180 East-West surface stress N m**-2 s EWSS 0 -9999.00 +181 North-South surface stress N m**-2 s NSSS 0 -9999.00 +182 Evaporation m (H2O) EVAP 0 -9999.00 +183 Soil temperature level 3 K STL3 0 -9999.00 +184 Soil wetness level 3 m (H2O) SWL3 0 -9999.00 +185 Convective cloud cover (0-1) fraction CCC 0 -9999.00 +186 Low cloud cover (0-1) fraction LCC 0 -9999.00 +187 Medium cloud cover (0-1) fraction MCC 0 -9999.00 +188 High cloud cover (0-1) fraction HCC 0 -9999.00 +189 Sunshine duration s SUND 0 -9999.00 +190 E-W comp. of subgrid oro. var. m**2 EWOV 0 -9999.00 +191 N-S comp. of subgrid oro. var. m**2 NSOV 0 -9999.00 +192 NW-SE comp. of subgrid oro. var. m**2 NWOV 0 -9999.00 +193 NE-SW comp. of subgrid oro. var. m**2 NEOV 0 -9999.00 +194 Brightness temperature K BTMP 0 -9999.00 +195 Lat. comp. of gravity wave stress N m**-2 s LGWS 0 -9999.00 +196 Mer. comp. of gravity wave stress N m**-2 s MGWS 0 -9999.00 +197 Gravity wave dissipation W m**-2 s GWD 0 -9999.00 +198 Skin reservoir content m (H2O) SRC 0 -9999.00 +199 Vegetation fraction (0-1) fraction VEG 0 -9999.00 +200 Variance of subgrid orography m**2 VSO 0 -9999.00 +201 Max. 2m temp. since post-process. K MX2T 0 -9999.00 +202 Min. 2m temp. since post-process. K MN2T 0 -9999.00 +203 Ozone mass mixing ratio kg (O3) kg**-1 OZMR 0 -9999.00 +204 Precipiation analysis weights - PAW 0 -9999.00 +205 Runoff m RO 0 -9999.00 +206 Total column ozone kg m**-2 TOZO 0 -9999.00 +207 10 metre wind speed m s**-1 10SI 0 -9999.00 +208 Top net solar rad., clear sky W m**-2 s TSRC 0 -9999.00 +209 Top net thermal rad., clear sky W m**-2 s TTRC 0 -9999.00 +210 Surface net solar rad., clear sky W m**-2 s SSRC 0 -9999.00 +211 Sfc. net thermal rad., clear sky W m**-2 s STRC 0 -9999.00 +212 Solar insolation W m**-2 s SI 0 -9999.00 +213 Unused +214 Diabatic heating by radiation K DHR 0 -9999.00 +215 Diab. heating by vert. diffusion K DHVD 0 -9999.00 +216 Diab. heating by cumulus convec. K DHCC 0 -9999.00 +217 Diab. heating resolved conden. K DHLC 0 -9999.00 +218 Vertical diffusion of zonal wind m s**-1 VDZW 0 -9999.00 +219 Vertical diffusion of mer.. wind m s**-1 VDMW 0 -9999.00 +220 E-W gravity wave drag tendency m s**-1 EWGD 0 -9999.00 +221 N-S gravity wave drag tendency m s**-1 NSGD 0 -9999.00 +222 Convective tendency of zonal wind m s**-1 CTZW 0 -9999.00 +223 Convective tendency of mer. wind m s**-1 CTMW 0 -9999.00 +224 Vertical diffusion of humidity kg (H2O) kg**-1 VDH 0 -9999.00 +225 Humid. tend. by cumulus convec. kg (H2O) kg**-1 HTCC 0 -9999.00 +226 Humid. tend. by resolved conden. kg (H2O) kg**-1 HTLC 0 -9999.00 +227 Change from removing neg. humid. kg (H2O) kg**-1 CRNH 0 -9999.00 +228 Total precipitation m P--M 0 -9999.00 +229 Instantaneous X surface stress N m**-2 IEWS 0 -9999.00 +230 Instantaneous Y surface stress N m**-2 INSS 0 -9999.00 +231 Instantaneous surface heat flux W m**-2 ISHF 0 -9999.00 +232 Instantaneous moisture flux kg m**-2 s IE 0 -9999.00 +233 Apparent surface humidity kg (H2O) kg**-1 ASQ 0 -9999.00 +234 Log of sfc. rough. length (heat) ln(m) LSRH 0 -9999.00 +235 Skin temperature K SKT 0 -9999.00 +236 Soil temperature level 4 K STL4 0 -9999.00 +237 Soil wetness level 4 m (H2O) SWL4 0 -9999.00 +238 Temperature of snow layer K TSN 0 -9999.00 +239 Convective snowfall m (H2O) CSF 0 -9999.00 +240 Large-scale snowfall m (H2O) LSF 0 -9999.00 +241 Accum. cloud frac. tend. (-1 - 1) fraction ACF 0 -9999.00 +242 Accum liquid water tend. (-1 - 1) fraction ALW 0 -9999.00 +243 Forecast albedo (0-1) fraction FAL 0 -9999.00 +244 Forecast surface roughness m FSR 0 -9999.00 +245 Fcst. log of src. rough. (heat) log(m) FLSR 0 -9999.00 +246 Cloud liquid water content kg (H2O) kg**-1 CLWC 0 -9999.00 +247 Cloud ice water content kg kg**-1 CIWC 0 -9999.00 +248 Cloud cover (0-1) fraction CC 0 -9999.00 +249 Accum. ice water tend. (-1 - 1) fraction AIW 0 -9999.00 +250 Ice age (1,0) 0-first 1-multi logical ICE 0 -9999.00 +251 Adiabatic tendency of temperature K ATTE 0 -9999.00 +252 Adiabatic tendency of humidity kg (H2O) kg**-1 ATHE 0 -9999.00 +253 Adiabatic tendency of zonal wind m s**-1 ATZE 0 -9999.00 +254 Adiabatic tendency of mer. wind m s**-1 ATMW 0 -9999.00 +255 Indicates a missing value - MISS 0 -9999.00 \ No newline at end of file diff --git a/python/pythontest/TestInstallTar/test_untar/python/ControlFile.py b/python/pythontest/TestInstallTar/test_untar/python/ControlFile.py new file mode 100644 index 0000000000000000000000000000000000000000..59a4752c3a28c5b087528b3ce9bfc49787b215c9 --- /dev/null +++ b/python/pythontest/TestInstallTar/test_untar/python/ControlFile.py @@ -0,0 +1,534 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +#******************************************************************************* +# @Author: Leopold Haimberger (University of Vienna) +# +# @Date: November 2015 +# +# @Change History: +# +# February 2018 - Anne Philipp (University of Vienna): +# - applied PEP8 style guide +# - added documentation +# - applied some minor modifications in programming style/structure +# - changed name of class Control to ControlFile for more +# self-explanation naming +# - outsource of class ControlFile +# - initialisation of class attributes ( to avoid high number of +# conditional statements and set default values ) +# - divided assignment of attributes and the check of conditions +# - outsourced the commandline argument assignments to control attributes +# +# @License: +# (C) Copyright 2015-2018. +# +# This software is licensed under the terms of the Apache Licence Version 2.0 +# which can be obtained at http://www.apache.org/licenses/LICENSE-2.0. +# +# @Class Description: +# The CONTROL file is the steering part of the FLEXPART extraction +# software. All necessary parameters needed to retrieve the data fields +# from the MARS archive for driving FLEXPART are set in a CONTROL file. +# Some specific parameters like the start and end dates can be overwritten +# by the command line parameters, but in generel all parameters needed +# for a complete set of fields for FLEXPART can be set in the CONTROL file. +# +# @Class Content: +# - __init__ +# - __read_controlfile__ +# - __str__ +# - assign_args_to_control +# - assign_envs_to_control +# - check_conditions +# - check_install_conditions +# - to_list +# +# @Class Attributes: +# +# +#******************************************************************************* + +# ------------------------------------------------------------------------------ +# MODULES +# ------------------------------------------------------------------------------ +import os +import re +import sys +import inspect + +import _config + +# ------------------------------------------------------------------------------ +# CLASS +# ------------------------------------------------------------------------------ +class ControlFile(object): + ''' + Class containing the information of the flex_extract CONTROL file. + + Contains all the parameters of CONTROL file, which are e.g.: + DAY1(start_date), DAY2(end_date), DTIME, MAXSTEP, TYPE, TIME, + STEP, CLASS(marsclass), STREAM, NUMBER, EXPVER, GRID, LEFT, + LOWER, UPPER, RIGHT, LEVEL, LEVELIST, RESOL, GAUSS, ACCURACY, + OMEGA, OMEGADIFF, ETA, ETADIFF, DPDETA, SMOOTH, FORMAT, + ADDPAR, WRF, CWC, PREFIX, ECSTORAGE, ECTRANS, ECFSDIR, + MAILOPS, MAILFAIL, GRIB2FLEXPART, FLEXPARTDIR, + BASETIME, DATE_CHUNK, DEBUG, INPUTDIR, OUTPUTDIR, FLEXPART_ROOT_SCRIPTS + + For more information about format and content of the parameter + see documentation. + + ''' + + def __init__(self, filename): + ''' + @Description: + Initialises the instance of ControlFile class and defines + all class attributes with default values. Afterwards calls + function __read_controlfile__ to read parameter from + Control file. + + @Input: + self: instance of ControlFile class + Description see class documentation. + + filename: string + Name of CONTROL file. + + @Return: + <nothing> + ''' + + # list of all possible class attributes and their default values + self.controlfile = filename + self.start_date = None + self.end_date = None + self.date_chunk = 3 + self.dtime = None + self.basetime = None + self.maxstep = None + self.type = None + self.time = None + self.step = None + self.marsclass = None + self.stream = None + self.number = 'OFF' + self.expver = None + self.grid = None + self.area = '' + self.left = None + self.lower = None + self.upper = None + self.right = None + self.level = None + self.levelist = None + self.resol = None + self.gauss = 0 + self.accuracy = 24 + self.omega = 0 + self.omegadiff = 0 + self.eta = 0 + self.etadiff = 0 + self.etapar = 77 + self.dpdeta = 1 + self.smooth = 0 + self.format = 'GRIB1' + self.addpar = None + self.prefix = 'EN' + self.cwc = 0 + self.wrf = 0 + self.ecfsdir = 'ectmp:/${USER}/econdemand/' + self.mailfail = ['${USER}'] + self.mailops = ['${USER}'] + self.grib2flexpart = 0 + self.ecstorage = 0 + self.ectrans = 0 + self.inputdir = '../work' + self.outputdir = self.inputdir + self.ecmwfdatadir = None + self.exedir = None + self.flexpart_root_scripts = None + self.makefile = None + self.destination = None + self.gateway = None + self.ecuid = None + self.ecgid = None + self.install_target = None + self.debug = 0 + self.request = 0 + + self.__read_controlfile__() + + return + + def __read_controlfile__(self): + ''' + @Description: + Read CONTROL file and assign all CONTROL file variables. + + @Input: + self: instance of ControlFile class + Description see class documentation. + + @Return: + <nothing> + ''' + from tools import my_error + + # read whole CONTROL file + with open(self.controlfile) as f: + fdata = f.read().split('\n') + + # go through every line and store parameter + for ldata in fdata: + data = ldata.split() + if len(data) > 1: + if 'm_' in data[0].lower(): + data[0] = data[0][2:] + if data[0].lower() == 'class': + data[0] = 'marsclass' + if data[0].lower() == 'day1': + data[0] = 'start_date' + if data[0].lower() == 'day2': + data[0] = 'end_date' + if data[0].lower() == 'addpar': + if '/' in data[1]: + # remove leading '/' sign from addpar content + if data[1][0] == '/': + data[1] = data[1][1:] + dd = data[1].split('/') + data = [data[0]] + for d in dd: + data.append(d) + if len(data) == 2: + if '$' in data[1]: + setattr(self, data[0].lower(), data[1]) + while '$' in data[1]: + i = data[1].index('$') + j = data[1].find('{') + k = data[1].find('}') + var = os.getenv(data[1][j+1:k]) + if var is not None: + data[1] = data[1][:i] + var + data[1][k+1:] + else: + my_error(self.mailfail, + 'Could not find variable ' + + data[1][j+1:k] + ' while reading ' + + self.controlfile) + setattr(self, data[0].lower() + '_expanded', data[1]) + else: + if data[1].lower() != 'none': + setattr(self, data[0].lower(), data[1]) + else: + setattr(self, data[0].lower(), None) + elif len(data) > 2: + setattr(self, data[0].lower(), (data[1:])) + else: + pass + + # script directory + self.ecmwfdatadir = os.path.dirname(os.path.abspath(inspect.getfile( + inspect.currentframe()))) + '/../' + + # Fortran source directory + self.exedir = self.ecmwfdatadir + 'src/' + + return + + def __str__(self): + ''' + @Description: + Prepares a string which have all the ControlFile + class attributes with its associated values. + Each attribute is printed in one line and in + alphabetical order. + + Example: + 'age': 10 + 'color': 'Spotted' + 'kids': 0 + 'legs': 2 + 'name': 'Dog' + 'smell': 'Alot' + + @Input: + self: instance of ControlFile class + Description see class documentation. + + @Return: + string of ControlFile class attributes with their values + ''' + import collections + + attrs = vars(self) + attrs = collections.OrderedDict(sorted(attrs.items())) + + return '\n'.join("%s: %s" % item for item in attrs.items()) + + def assign_args_to_control(self, args): + ''' + @Description: + Overwrites the existing ControlFile instance attributes with + the command line arguments. + + @Input: + self: instance of ControlFile class + Description see class documentation. + + args: instance of ArgumentParser + Contains the commandline arguments from script/program call. + + @Return: + <nothing> + ''' + + # get dictionary of command line parameters and eliminate all + # parameters which are None (were not specified) + args_dict = vars(args) + arguments = {k : args_dict[k] for k in args_dict + if args_dict[k] != None} + + # assign all passed command line arguments to ControlFile instance + for k, v in arguments.iteritems(): + setattr(self, str(k), v) + + return + + def assign_envs_to_control(self, envs): + ''' + @Description: + Assigns the ECMWF environment parameter. + + @Input: + envs: dict of strings + Contains the ECMWF environment parameternames "ECUID", "ECGID", + "DESTINATION" and "GATEWAY" with its corresponding values. + They were read from the file "ECMWF_ENV". + + @Return: + <nothing> + ''' + + for k, v in envs.iteritems(): + setattr(self, str(k).lower(), str(v)) + + return + + def check_conditions(self): + ''' + @Description: + Checks a couple of necessary attributes and conditions, + such as if they exist and contain values. + Otherwise set default values. + + @Input: + self: instance of ControlFile class + Description see class documentation. + + @Return: + <nothing> + ''' + from tools import my_error + import numpy as np + + # check for having at least a starting date + # otherwise program is not allowed to run + if self.start_date is None: + print 'start_date specified neither in command line nor ' + \ + 'in CONTROL file ' + self.controlfile + print 'Try "' + sys.argv[0].split('/')[-1] + \ + ' -h" to print usage information' + sys.exit(1) + + # retrieve just one day if end_date isn't set + if self.end_date is None: + self.end_date = self.start_date + + # assure consistency of levelist and level + if self.levelist is None: + if self.level is None: + print 'Warning: neither levelist nor level ' + \ + 'specified in CONTROL file' + sys.exit(1) + else: + self.levelist = '1/to/' + self.level + else: + if 'to' in self.levelist.lower(): + self.level = self.levelist.split('/')[2] + else: + self.level = self.levelist.split('/')[-1] + + # if area was provided at command line + # decompse area into its 4 components + if self.area: + afloat = '.' in self.area + l = self.area.split('/') + if afloat: + for i, item in enumerate(l): + item = str(int(float(item) * 1000)) + self.upper, self.left, self.lower, self.right = l + + # prepare step for correct usage + if '/' in self.step: + l = self.step.split('/') + if 'to' in self.step.lower(): + if 'by' in self.step.lower(): + ilist = np.arange(int(l[0]), int(l[2]) + 1, int(l[4])) + self.step = ['{:0>3}'.format(i) for i in ilist] + else: + my_error(self.mailfail, self.step + ':\n' + + 'if "to" is used, please use "by" as well') + else: + self.step = l + + # if maxstep wasn't provided + # search for it in the "step" parameter + if self.maxstep is None: + self.maxstep = 0 + for s in self.step: + if int(s) > self.maxstep: + self.maxstep = int(s) + else: + self.maxstep = int(self.maxstep) + + # set root scripts since it is needed later on + if not self.flexpart_root_scripts: + self.flexpart_root_scripts = self.ecmwfdatadir + + if not isinstance(self.mailfail, list): + if ',' in self.mailfail: + self.mailfail = self.mailfail.split(',') + elif ' ' in self.mailfail: + self.mailfail = self.mailfail.split() + else: + self.mailfail = [self.mailfail] + + if not isinstance(self.mailops, list): + if ',' in self.mailops: + self.mailops = self.mailops.split(',') + elif ' ' in self.mailops: + self.mailops = self.mailops.split() + else: + self.mailops = [self.mailops] + + if not self.gateway or not self.destination or \ + not self.ecuid or not self.ecgid: + print '\nEnvironment variables GATWAY, DESTINATION, ECUID and ' + \ + 'ECGID were not set properly!' + print 'Please check for excistence of file "ECMWF_ENV" in the ' + \ + 'python directory!' + sys.exit(1) + + if self.request != 0: + marsfile = os.path.join(_config.PATH_RUN_DIR + os.path.sep + + _config.FILE_MARS_REQUESTS) + if os.path.isfile(marsfile): + os.remove(marsfile) + + # check logical variables for data type + # if its a string change to integer + logicals = ['gauss', 'omega', 'omegadiff', 'eta', 'etadiff', + 'dpdeta', 'cwc', 'wrf', 'grib2flexpart', 'ecstorage', + 'ectrans', 'debug', 'request'] + + for var in logicals: + if not isinstance(getattr(self, var), int): + setattr(self, var, int(getattr(self, var))) + + return + + def check_install_conditions(self): + ''' + @Description: + Checks a couple of necessary attributes and conditions + for the installation such as if they exist and contain values. + Otherwise set default values. + + @Input: + self: instance of ControlFile class + Description see class documentation. + + @Return: + <nothing> + ''' + + if self.install_target and \ + self.install_target not in ['local', 'ecgate', 'cca']: + print('ERROR: unknown or missing installation target ') + print('target: ', self.install_target) + print('please specify correct installation target ' + + '(local | ecgate | cca)') + print('use -h or --help for help') + sys.exit(1) + + if self.install_target and self.install_target != 'local': + if not self.ecgid or not self.ecuid or \ + not self.gateway or not self.destination: + print('Please enter your ECMWF user id and group id as well ' + + 'as the \nname of the local gateway and the ectrans ' + + 'destination ') + print('with command line options --ecuid --ecgid \ + --gateway --destination') + print('Try "' + sys.argv[0].split('/')[-1] + \ + ' -h" to print usage information') + print('Please consult ecaccess documentation or ECMWF user \ + support for further details') + sys.exit(1) + + if not self.flexpart_root_scripts: + self.flexpart_root_scripts = '${HOME}' + else: + self.flexpart_root_scripts = self.flexpart_root_scripts + else: # local + if not self.flexpart_root_scripts: + self.flexpart_root_scripts = '../' + + if not self.makefile: + self.makefile = 'Makefile.gfortran' + + return + + def to_list(self): + ''' + @Description: + Just generates a list of strings containing the attributes and + assigned values except the attributes "_expanded", "exedir", + "ecmwfdatadir" and "flexpart_root_scripts". + + @Input: + self: instance of ControlFile class + Description see class documentation. + + @Return: + l: list + A sorted list of the all ControlFile class attributes with + their values except the attributes "_expanded", "exedir", + "ecmwfdatadir" and "flexpart_root_scripts". + ''' + + import collections + + attrs = collections.OrderedDict(sorted(vars(self).items())) + + l = list() + + for item in attrs.items(): + if '_expanded' in item[0]: + pass + elif 'exedir' in item[0]: + pass + elif 'flexpart_root_scripts' in item[0]: + pass + elif 'ecmwfdatadir' in item[0]: + pass + else: + if isinstance(item[1], list): + stot = '' + for s in item[1]: + stot += s + ' ' + + l.append("%s %s" % (item[0], stot)) + else: + l.append("%s %s" % item) + + return sorted(l) + diff --git a/python/pythontest/TestInstallTar/test_untar/python/ECMWF_ENV b/python/pythontest/TestInstallTar/test_untar/python/ECMWF_ENV new file mode 100644 index 0000000000000000000000000000000000000000..5af57721445813f4a44c0b9562b8219196daeda9 --- /dev/null +++ b/python/pythontest/TestInstallTar/test_untar/python/ECMWF_ENV @@ -0,0 +1,4 @@ +ECUID km4a +ECGID at +GATEWAY srvx8.img.univie.ac.at +DESTINATION annep@genericSftp diff --git a/python/pythontest/TestInstallTar/test_untar/python/EcFlexpart.py b/python/pythontest/TestInstallTar/test_untar/python/EcFlexpart.py new file mode 100644 index 0000000000000000000000000000000000000000..57b2da630d336ba16c58042ab2c67a64b0c28e84 --- /dev/null +++ b/python/pythontest/TestInstallTar/test_untar/python/EcFlexpart.py @@ -0,0 +1,1314 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +#******************************************************************************* +# @Author: Anne Fouilloux (University of Oslo) +# +# @Date: October 2014 +# +# @Change History: +# +# November 2015 - Leopold Haimberger (University of Vienna): +# - extended with class Control +# - removed functions mkdir_p, daterange, years_between, months_between +# - added functions darain, dapoly, to_param_id, init128, normal_exit, +# my_error, clean_up, install_args_and_control, +# interpret_args_and_control, +# - removed function __del__ in class EIFLexpart +# - added the following functions in EIFlexpart: +# - create_namelist +# - process_output +# - deacc_fluxes +# - modified existing EIFlexpart - functions for the use in +# flex_extract +# - retrieve also longer term forecasts, not only analyses and +# short term forecast data +# - added conversion into GRIB2 +# - added conversion into .fp format for faster execution of FLEXPART +# (see https://www.flexpart.eu/wiki/FpCtbtoWo4FpFormat) +# +# February 2018 - Anne Philipp (University of Vienna): +# - applied PEP8 style guide +# - added documentation +# - removed function getFlexpartTime in class EcFlexpart +# - outsourced class ControlFile +# - outsourced class MarsRetrieval +# - changed class name from EIFlexpart to EcFlexpart +# - applied minor code changes (style) +# - removed "dead code" , e.g. retrieval of Q since it is not needed +# - removed "times" parameter from retrieve-method since it is not used +# +# @License: +# (C) Copyright 2014-2018. +# +# This software is licensed under the terms of the Apache Licence Version 2.0 +# which can be obtained at http://www.apache.org/licenses/LICENSE-2.0. +# +# @Class Description: +# FLEXPART needs grib files in a specifc format. All necessary data fields +# for one time step are stored in a single file. The class represents an +# instance with all the parameter and settings necessary for retrieving +# MARS data and modifing them so they are fitting FLEXPART need. The class +# is able to disaggregate the fluxes and convert grid types to the one needed +# by FLEXPART, therefore using the FORTRAN program. +# +# @Class Content: +# - __init__ +# - write_namelist +# - retrieve +# - process_output +# - create +# - deacc_fluxes +# +# @Class Attributes: +# - dtime +# - basetime +# - server +# - marsclass +# - stream +# - resol +# - accuracy +# - number +# - expver +# - glevelist +# - area +# - grid +# - level +# - levelist +# - types +# - dates +# - area +# - gaussian +# - params +# - inputdir +# - outputfilelist +# +#******************************************************************************* +#pylint: disable=unsupported-assignment-operation +# this is disabled because its an error in pylint for this specific case +#pylint: disable=consider-using-enumerate +# this is not useful in this case +# ------------------------------------------------------------------------------ +# MODULES +# ------------------------------------------------------------------------------ +import subprocess +import shutil +import os +import glob +from datetime import datetime, timedelta +import numpy as np +from gribapi import grib_set, grib_index_select, grib_new_from_index, grib_get,\ + grib_write, grib_get_values, grib_set_values, grib_release,\ + grib_index_release, grib_index_get + +# software specific classes and modules from flex_extract +import _config +from GribTools import GribTools +from tools import init128, to_param_id, silent_remove, product, my_error +from MarsRetrieval import MarsRetrieval +import disaggregation + +# ------------------------------------------------------------------------------ +# CLASS +# ------------------------------------------------------------------------------ +class EcFlexpart(object): + ''' + Class to retrieve FLEXPART specific ECMWF data. + ''' + # -------------------------------------------------------------------------- + # CLASS FUNCTIONS + # -------------------------------------------------------------------------- + def __init__(self, c, fluxes=False): + ''' + @Description: + Creates an object/instance of EcFlexpart with the + associated settings of its attributes for the retrieval. + + @Input: + self: instance of EcFlexpart + The current object of the class. + + c: instance of class ControlFile + Contains all the parameters of CONTROL file, which are e.g.: + DAY1(start_date), DAY2(end_date), DTIME, MAXSTEP, TYPE, TIME, + STEP, CLASS(marsclass), STREAM, NUMBER, EXPVER, GRID, LEFT, + LOWER, UPPER, RIGHT, LEVEL, LEVELIST, RESOL, GAUSS, ACCURACY, + OMEGA, OMEGADIFF, ETA, ETADIFF, DPDETA, SMOOTH, FORMAT, + ADDPAR, WRF, CWC, PREFIX, ECSTORAGE, ECTRANS, ECFSDIR, + MAILOPS, MAILFAIL, GRIB2FLEXPART, FLEXPARTDIR, BASETIME + DATE_CHUNK, DEBUG, INPUTDIR, OUTPUTDIR, FLEXPART_ROOT_SCRIPTS + + For more information about format and content of the parameter + see documentation. + + fluxes: boolean, optional + Decides if the flux parameter settings are stored or + the rest of the parameter list. + Default value is False. + + @Return: + <nothing> + ''' + + # different mars types for retrieving data for flexpart + self.types = dict() + + if c.maxstep > len(c.type): # Pure forecast mode + c.type = [c.type[1]] + c.step = ['{:0>3}'.format(int(c.step[0]))] + c.time = [c.time[0]] + for i in range(1, c.maxstep + 1): + c.type.append(c.type[0]) + c.step.append('{:0>3}'.format(i)) + c.time.append(c.time[0]) + + self.inputdir = c.inputdir + self.basetime = c.basetime + self.dtime = c.dtime + i = 0 + if fluxes and c.maxstep <= 24: + # no forecast beyond one day is needed! + # Thus, prepare flux data manually as usual + # with only forecast fields with start times at 00/12 + # (but without 00/12 fields since these are + # the initialisation times of the flux fields + # and therefore are zero all the time) + self.types[c.type[1]] = {'times': '00/12', 'steps': + '{}/to/12/by/{}'.format(c.dtime, c.dtime)} + else: + for ty, st, ti in zip(c.type, c.step, c.time): + btlist = range(24) + if c.basetime == '12': + btlist = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12] + if c.basetime == '00': + btlist = [13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 0] + + if i % int(c.dtime) == 0 and (i in btlist or c.maxstep > 24): + + if ty not in self.types.keys(): + self.types[ty] = {'times': '', 'steps': ''} + + if ti not in self.types[ty]['times']: + if self.types[ty]['times']: + self.types[ty]['times'] += '/' + self.types[ty]['times'] += ti + + if st not in self.types[ty]['steps']: + if self.types[ty]['steps']: + self.types[ty]['steps'] += '/' + self.types[ty]['steps'] += st + i += 1 + + self.marsclass = c.marsclass + self.stream = c.stream + self.number = c.number + self.resol = c.resol + self.accuracy = c.accuracy + self.level = c.level + + if c.levelist: + self.levelist = c.levelist + else: + self.levelist = '1/to/' + c.level + + # for gaussian grid retrieval + self.glevelist = '1/to/' + c.level + + if hasattr(c, 'gaussian') and c.gaussian: + self.gaussian = c.gaussian + else: + self.gaussian = '' + + if hasattr(c, 'expver') and c.expver: + self.expver = c.expver + else: + self.expver = '1' + + if hasattr(c, 'number') and c.number: + self.number = c.number + else: + self.number = '0' + + if 'N' in c.grid: # Gaussian output grid + self.grid = c.grid + self.area = 'G' + else: + self.grid = '{}/{}'.format(int(c.grid) / 1000., int(c.grid) / 1000.) + self.area = '{}/{}/{}/{}'.format(int(c.upper) / 1000., + int(c.left) / 1000., + int(c.lower) / 1000., + int(c.right) / 1000.) + + self.outputfilelist = [] + + + # Now comes the nasty part that deals with the different + # scenarios we have: + # 1) Calculation of etadot on + # a) Gaussian grid + # b) Output grid + # c) Output grid using parameter 77 retrieved from MARS + # 3) Calculation/Retrieval of omega + # 4) Download also data for WRF + + + # Different grids need different retrievals + # SH = Spherical Harmonics, GG = Gaussian Grid, + # OG = Output Grid, ML = MultiLevel, SL = SingleLevel + self.params = {'SH__ML': '', 'SH__SL': '', + 'GG__ML': '', 'GG__SL': '', + 'OG__ML': '', 'OG__SL': '', + 'OG_OROLSM_SL': '', 'OG_acc_SL': ''} + + if fluxes is False: + self.params['SH__SL'] = ['LNSP', 'ML', '1', 'OFF'] + # "SD/MSL/TCC/10U/10V/2T/2D/129/172" + self.params['OG__SL'] = ["141/151/164/165/166/167/168/129/172", \ + 'SFC', '1', self.grid] + if c.addpar: + if c.addpar[0] == '/': + c.addpar = c.addpar[1:] + self.params['OG__SL'][0] += '/' + '/'.join(c.addpar) + + self.params['OG_OROLSM__SL'] = ["160/27/28/173", \ + 'SFC', '1', self.grid] + + self.params['OG__ML'] = ['T/Q', 'ML', self.levelist, self.grid] + + if c.gauss == '0' and c.eta == '1': + # the simplest case + self.params['OG__ML'][0] += '/U/V/77' + elif c.gauss == '0' and c.eta == '0': + # this is not recommended (inaccurate) + self.params['OG__ML'][0] += '/U/V' + elif c.gauss == '1' and c.eta == '0': + # this is needed for data before 2008, or for reanalysis data + self.params['GG__SL'] = ['Q', 'ML', '1', \ + '{}'.format((int(self.resol) + 1) / 2)] + self.params['SH__ML'] = ['U/V/D', 'ML', self.glevelist, 'OFF'] + else: + print('Warning: This is a very costly parameter combination, ' + 'use only for debugging!') + self.params['GG__SL'] = ['Q', 'ML', '1', \ + '{}'.format((int(self.resol) + 1) / 2)] + self.params['GG__ML'] = ['U/V/D/77', 'ML', self.glevelist, \ + '{}'.format((int(self.resol) + 1) / 2)] + + if hasattr(c, 'omega') and c.omega == '1': + self.params['OG__ML'][0] += '/W' + + # add cloud water content if necessary + if hasattr(c, 'cwc') and c.cwc == '1': + self.params['OG__ML'][0] += '/CLWC/CIWC' + + # add vorticity and geopotential height for WRF if necessary + if hasattr(c, 'wrf') and c.wrf == '1': + self.params['OG__ML'][0] += '/Z/VO' + if '/D' not in self.params['OG__ML'][0]: + self.params['OG__ML'][0] += '/D' + #wrf_sfc = 'sp/msl/skt/2t/10u/10v/2d/z/lsm/sst/ci/sd/stl1/ / + # stl2/stl3/stl4/swvl1/swvl2/swvl3/swvl4'.upper() + wrf_sfc = '134/235/167/165/166/168/129/172/34/31/141/ \ + 139/170/183/236/39/40/41/42'.upper() + lwrt_sfc = wrf_sfc.split('/') + for par in lwrt_sfc: + if par not in self.params['OG__SL'][0]: + self.params['OG__SL'][0] += '/' + par + + else: + self.params['OG_acc_SL'] = ["LSP/CP/SSHF/EWSS/NSSS/SSR", \ + 'SFC', '1', self.grid] + + # if needed, add additional WRF specific parameters here + + return + + + def write_namelist(self, c, filename): + ''' + @Description: + Creates a namelist file in the temporary directory and writes + the following values to it: maxl, maxb, mlevel, + mlevelist, mnauf, metapar, rlo0, rlo1, rla0, rla1, + momega, momegadiff, mgauss, msmooth, meta, metadiff, mdpdeta + + @Input: + self: instance of EcFlexpart + The current object of the class. + + c: instance of class ControlFile + Contains all the parameters of CONTROL files, which are e.g.: + DAY1(start_date), DAY2(end_date), DTIME, MAXSTEP, TYPE, TIME, + STEP, CLASS(marsclass), STREAM, NUMBER, EXPVER, GRID, LEFT, + LOWER, UPPER, RIGHT, LEVEL, LEVELIST, RESOL, GAUSS, ACCURACY, + OMEGA, OMEGADIFF, ETA, ETADIFF, DPDETA, SMOOTH, FORMAT, + ADDPAR, WRF, CWC, PREFIX, ECSTORAGE, ECTRANS, ECFSDIR, + MAILOPS, MAILFAIL, GRIB2FLEXPART, FLEXPARTDIR, BASETIME + DATE_CHUNK, DEBUG, INPUTDIR, OUTPUTDIR, FLEXPART_ROOT_SCRIPTS + + For more information about format and content of the parameter + see documentation. + + filename: string + Name of the namelist file. + + @Return: + <nothing> + ''' + + self.inputdir = c.inputdir + area = np.asarray(self.area.split('/')).astype(float) + grid = np.asarray(self.grid.split('/')).astype(float) + + if area[1] > area[3]: + area[1] -= 360 + maxl = int((area[3] - area[1]) / grid[1]) + 1 + maxb = int((area[0] - area[2]) / grid[0]) + 1 + + with open(self.inputdir + '/' + filename, 'w') as f: + f.write('&NAMGEN\n') + f.write(',\n '.join(['maxl = ' + str(maxl), 'maxb = ' + str(maxb), + 'mlevel = ' + str(self.level), + 'mlevelist = ' + '"' + str(self.levelist) + + '"', + 'mnauf = ' + str(self.resol), + 'metapar = ' + '77', + 'rlo0 = ' + str(area[1]), + 'rlo1 = ' + str(area[3]), + 'rla0 = ' + str(area[2]), + 'rla1 = ' + str(area[0]), + 'momega = ' + str(c.omega), + 'momegadiff = ' + str(c.omegadiff), + 'mgauss = ' + str(c.gauss), + 'msmooth = ' + str(c.smooth), + 'meta = ' + str(c.eta), + 'metadiff = ' + str(c.etadiff), + 'mdpdeta = ' + str(c.dpdeta)])) + + f.write('\n/\n') + + return + + def retrieve(self, server, dates, request, inputdir='.'): + ''' + @Description: + Finalizing the retrieval information by setting final details + depending on grid type. + Prepares MARS retrievals per grid type and submits them. + + @Input: + self: instance of EcFlexpart + The current object of the class. + + server: instance of ECMWFService or ECMWFDataServer + The connection to the ECMWF server. This is different + for member state users which have full access and non + member state users which have only access to the public + data sets. The decision is made from command line argument + "public"; for public access its True (ECMWFDataServer) + for member state users its False (ECMWFService) + + dates: string + Contains start and end date of the retrieval in the format + "YYYYMMDD/to/YYYYMMDD" + + inputdir: string, optional + Path to the directory where the retrieved data is about + to be stored. The default is the current directory ('.'). + + @Return: + <nothing> + ''' + self.dates = dates + self.server = server + self.inputdir = inputdir + oro = False + + for ftype in self.types: + for pk, pv in self.params.iteritems(): + if isinstance(pv, str): + continue + mftype = '' + ftype + mftime = self.types[ftype]['times'] + mfstep = self.types[ftype]['steps'] + mfdate = self.dates + mfstream = self.stream + mftarget = self.inputdir + "/" + ftype + pk + '.' + \ + self.dates.split('/')[0] + '.' + str(os.getppid()) +\ + '.' + str(os.getpid()) + ".grb" + if pk == 'OG__SL': + pass + if pk == 'OG_OROLSM__SL': + if oro is False: + mfstream = 'OPER' + mftype = 'AN' + mftime = '00' + mfstep = '000' + mfdate = self.dates.split('/')[0] + mftarget = self.inputdir + "/" + pk + '.' + mfdate + \ + '.' + str(os.getppid()) + '.' + \ + str(os.getpid()) + ".grb" + oro = True + else: + continue + if pk == 'GG__SL' and pv[0] == 'Q': + area = "" + gaussian = 'reduced' + else: + area = self.area + gaussian = self.gaussian + + # ------ on demand path -------------------------------------------------- + if self.basetime is None: + MR = MarsRetrieval(self.server, + marsclass=self.marsclass, + stream=mfstream, + type=mftype, + levtype=pv[1], + levelist=pv[2], + resol=self.resol, + gaussian=gaussian, + accuracy=self.accuracy, + grid=pv[3], + target=mftarget, + area=area, + date=mfdate, + time=mftime, + number=self.number, + step=mfstep, + expver=self.expver, + param=pv[0]) + + if request == 0: + MR.display_info() + MR.data_retrieve() + elif request == 1: + MR.print_info() + elif request == 2: + MR.print_info() + MR.display_info() + MR.data_retrieve() + else: + print 'Failure' + # ------ operational path ------------------------------------------------ + else: + # check if mars job requests fields beyond basetime. + # If yes eliminate those fields since they may not + # be accessible with user's credentials + if 'by' in mfstep: + sm1 = 2 + else: + sm1 = -1 + + if 'by' in mftime: + tm1 = 2 + else: + tm1 = -1 + + maxdate = datetime.strptime(mfdate.split('/')[-1] + + mftime.split('/')[tm1], + '%Y%m%d%H') + istep = int(mfstep.split('/')[sm1]) + maxtime = maxdate + timedelta(hours=istep) + + elimit = datetime.strptime(mfdate.split('/')[-1] + + self.basetime, '%Y%m%d%H') + + if self.basetime == '12': + # -------------- flux data ---------------------------- + if 'acc' in pk: + + # Strategy: + # if maxtime-elimit >= 24h reduce date by 1, + # if 12h <= maxtime-elimit<12h reduce time for last date + # if maxtime-elimit<12h reduce step for last time + # A split of the MARS job into 2 is likely necessary. + maxtime = elimit - timedelta(hours=24) + mfdate = '/'.join(['/'.join(mfdate.split('/')[:-1]), + datetime.strftime(maxtime, + '%Y%m%d')]) + + MR = MarsRetrieval(self.server, + marsclass=self.marsclass, + stream=self.stream, + type=mftype, + levtype=pv[1], + levelist=pv[2], + resol=self.resol, + gaussian=gaussian, + accuracy=self.accuracy, + grid=pv[3], + target=mftarget, + area=area, + date=mfdate, + time=mftime, + number=self.number, + step=mfstep, + expver=self.expver, + param=pv[0]) + + MR.display_info() + MR.data_retrieve() + + maxtime = elimit - timedelta(hours=12) + mfdate = datetime.strftime(maxtime, '%Y%m%d') + mftime = '00' + mftarget = self.inputdir + "/" + ftype + pk + \ + '.' + mfdate + '.' + str(os.getppid()) +\ + '.' + str(os.getpid()) + ".grb" + + MR = MarsRetrieval(self.server, + marsclass=self.marsclass, + stream=self.stream, + type=mftype, + levtype=pv[1], + levelist=pv[2], + resol=self.resol, + gaussian=gaussian, + accuracy=self.accuracy, + grid=pv[3], + target=mftarget, + area=area, + date=mfdate, + time=mftime, + number=self.number, + step=mfstep, + expver=self.expver, + param=pv[0]) + + MR.display_info() + + MR.data_retrieve() + # -------------- non flux data ------------------------ + else: + MR = MarsRetrieval(self.server, + marsclass=self.marsclass, + stream=self.stream, + type=mftype, + levtype=pv[1], + levelist=pv[2], + resol=self.resol, + gaussian=gaussian, + accuracy=self.accuracy, + grid=pv[3], + target=mftarget, + area=area, + date=mfdate, + time=mftime, + number=self.number, + step=mfstep, + expver=self.expver, + param=pv[0]) + + MR.display_info() + MR.data_retrieve() + else: # basetime == 0 ??? #AP + + maxtime = elimit - timedelta(hours=24) + mfdate = datetime.strftime(maxtime, '%Y%m%d') + mftimesave = ''.join(mftime) + + if '/' in mftime: + times = mftime.split('/') + while ((int(times[0]) + + int(mfstep.split('/')[0]) <= 12) and + (pk != 'OG_OROLSM__SL') and 'acc' not in pk): + times = times[1:] + if len(times) > 1: + mftime = '/'.join(times) + else: + mftime = times[0] + + MR = MarsRetrieval(self.server, + marsclass=self.marsclass, + stream=self.stream, + type=mftype, + levtype=pv[1], + levelist=pv[2], + resol=self.resol, + gaussian=gaussian, + accuracy=self.accuracy, + grid=pv[3], + target=mftarget, + area=area, + date=mfdate, + time=mftime, + number=self.number, + step=mfstep, + expver=self.expver, + param=pv[0]) + + MR.display_info() + MR.data_retrieve() + + if (int(mftimesave.split('/')[0]) == 0 and + int(mfstep.split('/')[0]) == 0 and + pk != 'OG_OROLSM__SL'): + + mfdate = datetime.strftime(elimit, '%Y%m%d') + mftime = '00' + mfstep = '000' + mftarget = self.inputdir + "/" + ftype + pk + \ + '.' + mfdate + '.' + str(os.getppid()) +\ + '.' + str(os.getpid()) + ".grb" + + MR = MarsRetrieval(self.server, + marsclass=self.marsclass, + stream=self.stream, + type=mftype, + levtype=pv[1], + levelist=pv[2], + resol=self.resol, + gaussian=gaussian, + accuracy=self.accuracy, + grid=pv[3], + target=mftarget, + area=area, + date=mfdate, + time=mftime, + number=self.number, + step=mfstep, + expver=self.expver, + param=pv[0]) + + MR.display_info() + MR.data_retrieve() + + if request == 0 or request == 2: + print('MARS retrieve done ... ') + elif request == 1: + print('MARS request printed ...') + + return + + + def process_output(self, c): + ''' + @Description: + The grib files are postprocessed depending on the selection in + CONTROL file. The resulting files are moved to the output + directory if its not equla to the input directory. + The following modifications might be done if + properly switched in CONTROL file: + GRIB2 - Conversion to GRIB2 + ECTRANS - Transfer of files to gateway server + ECSTORAGE - Storage at ECMWF server + GRIB2FLEXPART - Conversion of GRIB files to FLEXPART binary format + + @Input: + self: instance of EcFlexpart + The current object of the class. + + c: instance of class ControlFile + Contains all the parameters of CONTROL file, which are e.g.: + DAY1(start_date), DAY2(end_date), DTIME, MAXSTEP, TYPE, TIME, + STEP, CLASS(marsclass), STREAM, NUMBER, EXPVER, GRID, LEFT, + LOWER, UPPER, RIGHT, LEVEL, LEVELIST, RESOL, GAUSS, ACCURACY, + OMEGA, OMEGADIFF, ETA, ETADIFF, DPDETA, SMOOTH, FORMAT, + ADDPAR, WRF, CWC, PREFIX, ECSTORAGE, ECTRANS, ECFSDIR, + MAILOPS, MAILFAIL, GRIB2FLEXPART, FLEXPARTDIR, BASETIME + DATE_CHUNK, DEBUG, INPUTDIR, OUTPUTDIR, FLEXPART_ROOT_SCRIPTS + + For more information about format and content of the parameter + see documentation. + + @Return: + <nothing> + + ''' + + print '\n\nPostprocessing:\n Format: {}\n'.format(c.format) + + if c.ecapi is False: + print('ecstorage: {}\n ecfsdir: {}\n'. + format(c.ecstorage, c.ecfsdir)) + if not hasattr(c, 'gateway'): + c.gateway = os.getenv('GATEWAY') + if not hasattr(c, 'destination'): + c.destination = os.getenv('DESTINATION') + print('ectrans: {}\n gateway: {}\n destination: {}\n ' + .format(c.ectrans, c.gateway, c.destination)) + + print 'Output filelist: \n' + print self.outputfilelist + + if c.format.lower() == 'grib2': + for ofile in self.outputfilelist: + p = subprocess.check_call(['grib_set', '-s', 'edition=2, \ + productDefinitionTemplateNumber=8', + ofile, ofile + '_2']) + p = subprocess.check_call(['mv', ofile + '_2', ofile]) + + if int(c.ectrans) == 1 and c.ecapi is False: + for ofile in self.outputfilelist: + p = subprocess.check_call(['ectrans', '-overwrite', '-gateway', + c.gateway, '-remote', c.destination, + '-source', ofile]) + #print('ectrans:', p) + + if int(c.ecstorage) == 1 and c.ecapi is False: + for ofile in self.outputfilelist: + p = subprocess.check_call(['ecp', '-o', ofile, + os.path.expandvars(c.ecfsdir)]) + + if c.outputdir != c.inputdir: + for ofile in self.outputfilelist: + p = subprocess.check_call(['mv', ofile, c.outputdir]) + + # prepare environment for the grib2flexpart run + # to convert grib to flexpart binary + if c.grib2flexpart == '1': + + # generate AVAILABLE file + # Example of AVAILABLE file data: + # 20131107 000000 EN13110700 ON DISC + clist = [] + for ofile in self.outputfilelist: + fname = ofile.split('/') + if '.' in fname[-1]: + l = fname[-1].split('.') + timestamp = datetime.strptime(l[0][-6:] + l[1], + '%y%m%d%H') + timestamp += timedelta(hours=int(l[2])) + cdate = datetime.strftime(timestamp, '%Y%m%d') + chms = datetime.strftime(timestamp, '%H%M%S') + else: + cdate = '20' + fname[-1][-8:-2] + chms = fname[-1][-2:] + '0000' + clist.append(cdate + ' ' + chms + ' '*6 + + fname[-1] + ' '*14 + 'ON DISC') + clist.sort() + with open(c.outputdir + '/' + 'AVAILABLE', 'w') as f: + f.write('\n'.join(clist) + '\n') + + # generate pathnames file + pwd = os.path.abspath(c.outputdir) + with open(pwd + '/pathnames', 'w') as f: + f.write(pwd + '/Options/\n') + f.write(pwd + '/\n') + f.write(pwd + '/\n') + f.write(pwd + '/AVAILABLE\n') + f.write(' = == = == = == = == = == == = \n') + + # create Options dir if necessary + if not os.path.exists(pwd + '/Options'): + os.makedirs(pwd+'/Options') + + # read template COMMAND file + with open(os.path.expandvars(os.path.expanduser( + c.flexpart_root_scripts)) + '/../Options/COMMAND', 'r') as f: + lflist = f.read().split('\n') + + # find index of list where to put in the + # date and time information + # usually after the LDIRECT parameter + i = 0 + for l in lflist: + if 'LDIRECT' in l.upper(): + break + i += 1 + + # insert the date and time information of run start and end + # into the list of lines of COMMAND file + lflist = lflist[:i+1] + \ + [clist[0][:16], clist[-1][:16]] + \ + lflist[i+3:] + + # write the new COMMAND file + with open(pwd + '/Options/COMMAND', 'w') as g: + g.write('\n'.join(lflist) + '\n') + + # change to outputdir and start the grib2flexpart run + # afterwards switch back to the working dir + os.chdir(c.outputdir) + p = subprocess.check_call([ + os.path.expandvars(os.path.expanduser(c.flexpart_root_scripts)) + + '/../FLEXPART_PROGRAM/grib2flexpart', 'useAvailable', '.']) + os.chdir(pwd) + + return + + def create(self, inputfiles, c): + ''' + @Description: + This method is based on the ECMWF example index.py + https://software.ecmwf.int/wiki/display/GRIB/index.py + + An index file will be created which depends on the combination + of "date", "time" and "stepRange" values. This is used to iterate + over all messages in each grib file which were passed through the + parameter "inputfiles" to seperate specific parameters into fort.* + files. Afterwards the FORTRAN program Convert2 is called to convert + the data fields all to the same grid and put them in one file + per unique time step (combination of "date", "time" and + "stepRange"). + + @Input: + self: instance of EcFlexpart + The current object of the class. + + inputfiles: instance of UioFiles + Contains a list of files. + + c: instance of class ControlFile + Contains all the parameters of CONTROL files, which are e.g.: + DAY1(start_date), DAY2(end_date), DTIME, MAXSTEP, TYPE, TIME, + STEP, CLASS(marsclass), STREAM, NUMBER, EXPVER, GRID, LEFT, + LOWER, UPPER, RIGHT, LEVEL, LEVELIST, RESOL, GAUSS, ACCURACY, + OMEGA, OMEGADIFF, ETA, ETADIFF, DPDETA, SMOOTH, FORMAT, + ADDPAR, WRF, CWC, PREFIX, ECSTORAGE, ECTRANS, ECFSDIR, + MAILOPS, MAILFAIL, GRIB2FLEXPART, FLEXPARTDIR, BASETIME + DATE_CHUNK, DEBUG, INPUTDIR, OUTPUTDIR, FLEXPART_ROOT_SCRIPTS + + For more information about format and content of the parameter + see documentation. + + @Return: + <nothing> + ''' + + table128 = init128(_config.PATH_GRIBTABLE) + wrfpars = to_param_id('sp/mslp/skt/2t/10u/10v/2d/z/lsm/sst/ci/sd/\ + stl1/stl2/stl3/stl4/swvl1/swvl2/swvl3/swvl4', + table128) + + index_keys = ["date", "time", "step"] + indexfile = c.inputdir + "/date_time_stepRange.idx" + silent_remove(indexfile) + grib = GribTools(inputfiles.files) + # creates new index file + iid = grib.index(index_keys=index_keys, index_file=indexfile) + + # read values of index keys + index_vals = [] + for key in index_keys: + index_vals.append(grib_index_get(iid, key)) + print index_vals[-1] + # index_vals looks for example like: + # index_vals[0]: ('20171106', '20171107', '20171108') ; date + # index_vals[1]: ('0', '1200', '1800', '600') ; time + # index_vals[2]: ('0', '12', '3', '6', '9') ; stepRange + + fdict = {'10':None, '11':None, '12':None, '13':None, '16':None, + '17':None, '19':None, '21':None, '22':None, '20':None} + + for prod in product(*index_vals): + # flag for Fortran program CONVERT2 and file merging + convertFlag = False + print 'current prod: ', prod + # e.g. prod = ('20170505', '0', '12') + # ( date ,time, step) + # per date e.g. time = 0, 600, 1200, 1800 + # per time e.g. step = 0, 3, 6, 9, 12 + for i in range(len(index_keys)): + grib_index_select(iid, index_keys[i], prod[i]) + + # get first id from current product + gid = grib_new_from_index(iid) + + # if there is data for this product combination + # prepare some date and time parameter before reading the data + if gid is not None: + # Combine all temporary data files into final grib file if + # gid is at least one time not None. Therefore set convertFlag + # to save information. The fortran program CONVERT2 is also + # only done if convertFlag is True + convertFlag = True + # remove old fort.* files and open new ones + # they are just valid for a single product + for k, f in fdict.iteritems(): + silent_remove(c.inputdir + "/fort." + k) + fdict[k] = open(c.inputdir + '/fort.' + k, 'w') + + cdate = str(grib_get(gid, 'date')) + time = grib_get(gid, 'time') + step = grib_get(gid, 'step') + # create correct timestamp from the three time informations + # date, time, step + timestamp = datetime.strptime(cdate + '{:0>2}'.format(time/100), + '%Y%m%d%H') + timestamp += timedelta(hours=int(step)) + cdateH = datetime.strftime(timestamp, '%Y%m%d%H') + + if c.basetime is not None: + slimit = datetime.strptime(c.start_date + '00', '%Y%m%d%H') + bt = '23' + if c.basetime == '00': + bt = '00' + slimit = datetime.strptime(c.end_date + bt, '%Y%m%d%H')\ + - timedelta(hours=12-int(c.dtime)) + if c.basetime == '12': + bt = '12' + slimit = datetime.strptime(c.end_date + bt, '%Y%m%d%H')\ + - timedelta(hours=12-int(c.dtime)) + + elimit = datetime.strptime(c.end_date + bt, '%Y%m%d%H') + + if timestamp < slimit or timestamp > elimit: + continue + + try: + if c.wrf == '1': + if 'olddate' not in locals(): + fwrf = open(c.outputdir + '/WRF' + cdate + + '.{:0>2}'.format(time) + '.000.grb2', 'w') + olddate = cdate[:] + else: + if cdate != olddate: + fwrf = open(c.outputdir + '/WRF' + cdate + + '.{:0>2}'.format(time) + '.000.grb2', + 'w') + olddate = cdate[:] + except AttributeError: + pass + + # helper variable to remember which fields are already used. + savedfields = [] + while 1: + if gid is None: + break + paramId = grib_get(gid, 'paramId') + gridtype = grib_get(gid, 'gridType') + levtype = grib_get(gid, 'typeOfLevel') + if paramId == 133 and gridtype == 'reduced_gg': + # Specific humidity (Q.grb) is used as a template only + # so we need the first we "meet" + with open(c.inputdir + '/fort.18', 'w') as fout: + grib_write(gid, fout) + elif paramId == 131 or paramId == 132: + grib_write(gid, fdict['10']) + elif paramId == 130: + grib_write(gid, fdict['11']) + elif paramId == 133 and gridtype != 'reduced_gg': + grib_write(gid, fdict['17']) + elif paramId == 152: + grib_write(gid, fdict['12']) + elif paramId == 155 and gridtype == 'sh': + grib_write(gid, fdict['13']) + elif paramId in [129, 138, 155] and levtype == 'hybrid' \ + and c.wrf == '1': + pass + elif paramId == 246 or paramId == 247: + # cloud liquid water and ice + if paramId == 246: + clwc = grib_get_values(gid) + else: + clwc += grib_get_values(gid) + grib_set_values(gid, clwc) + grib_set(gid, 'paramId', 201031) + grib_write(gid, fdict['22']) + elif paramId == 135: + grib_write(gid, fdict['19']) + elif paramId == 77: + grib_write(gid, fdict['21']) + else: + if paramId not in savedfields: + grib_write(gid, fdict['16']) + savedfields.append(paramId) + else: + print 'duplicate ' + str(paramId) + ' not written' + + try: + if c.wrf == '1': + if levtype == 'hybrid': # model layer + if paramId in [129, 130, 131, 132, 133, 138, 155]: + grib_write(gid, fwrf) + else: # sfc layer + if paramId in wrfpars: + grib_write(gid, fwrf) + except AttributeError: + pass + + grib_release(gid) + gid = grib_new_from_index(iid) + + for f in fdict.values(): + f.close() + + # call for CONVERT2 if flag is True + if convertFlag: + pwd = os.getcwd() + os.chdir(c.inputdir) + if os.stat('fort.21').st_size == 0 and int(c.eta) == 1: + print 'Parameter 77 (etadot) is missing, most likely it is \ + not available for this type or date/time\n' + print 'Check parameters CLASS, TYPE, STREAM, START_DATE\n' + my_error(c.mailfail, 'fort.21 is empty while parameter eta \ + is set to 1 in CONTROL file') + + # create the corresponding output file fort.15 + # (generated by CONVERT2) + fort.16 (paramId 167 and 168) + p = subprocess.check_call( + [os.path.expandvars(os.path.expanduser(c.exedir)) + + '/CONVERT2'], shell=True) + os.chdir(pwd) + + # create final output filename, e.g. EN13040500 (ENYYMMDDHH) + fnout = c.inputdir + '/' + c.prefix + if c.maxstep > 12: + suffix = cdate[2:8] + '.{:0>2}'.format(time/100) + \ + '.{:0>3}'.format(step) + else: + suffix = cdateH[2:10] + fnout += suffix + print "outputfile = " + fnout + self.outputfilelist.append(fnout) # needed for final processing + + # create outputfile and copy all data from intermediate files + # to the outputfile (final GRIB files) + orolsm = os.path.basename(glob.glob( + c.inputdir + '/OG_OROLSM__SL.*.' + c.ppid + '*')[0]) + fluxfile = 'flux' + cdate[0:2] + suffix + if c.cwc != '1': + flist = ['fort.15', fluxfile, 'fort.16', orolsm] + else: + flist = ['fort.15', 'fort.22', fluxfile, 'fort.16', orolsm] + + with open(fnout, 'wb') as fout: + for f in flist: + shutil.copyfileobj( + open(c.inputdir + '/' + f, 'rb'), fout) + + if c.omega == '1': + with open(c.outputdir + '/OMEGA', 'wb') as fout: + shutil.copyfileobj( + open(c.inputdir + '/fort.25', 'rb'), fout) + + if hasattr(c, 'wrf') and c.wrf == '1': + fwrf.close() + + grib_index_release(iid) + + return + + def deacc_fluxes(self, inputfiles, c): + ''' + @Description: + Goes through all flux fields in ordered time and de-accumulate + the fields. Afterwards the fields are disaggregated in time. + Different versions of disaggregation is provided for rainfall + data (darain, modified linear) and the surface fluxes and + stress data (dapoly, cubic polynomial). + + @Input: + self: instance of EcFlexpart + The current object of the class. + + inputfiles: instance of UioFiles + Contains a list of files. + + c: instance of class ControlFile + Contains all the parameters of CONTROL file, which are e.g.: + DAY1(start_date), DAY2(end_date), DTIME, MAXSTEP, TYPE, TIME, + STEP, CLASS(marsclass), STREAM, NUMBER, EXPVER, GRID, LEFT, + LOWER, UPPER, RIGHT, LEVEL, LEVELIST, RESOL, GAUSS, ACCURACY, + OMEGA, OMEGADIFF, ETA, ETADIFF, DPDETA, SMOOTH, FORMAT, + ADDPAR, WRF, CWC, PREFIX, ECSTORAGE, ECTRANS, ECFSDIR, + MAILOPS, MAILFAIL, GRIB2FLEXPART, FLEXPARTDIR, BASETIME + DATE_CHUNK, DEBUG, INPUTDIR, OUTPUTDIR, FLEXPART_ROOT_SCRIPTS + + For more information about format and content of the parameter + see documentation. + + @Return: + <nothing> + ''' + + table128 = init128(_config.PATH_GRIBTABLE) + pars = to_param_id(self.params['OG_acc_SL'][0], table128) + index_keys = ["date", "time", "step"] + indexfile = c.inputdir + "/date_time_stepRange.idx" + silent_remove(indexfile) + grib = GribTools(inputfiles.files) + # creates new index file + iid = grib.index(index_keys=index_keys, index_file=indexfile) + + # read values of index keys + index_vals = [] + for key in index_keys: + key_vals = grib_index_get(iid, key) + print key_vals + # have to sort the steps for disaggregation, + # therefore convert to int first + if key == 'step': + key_vals = [int(k) for k in key_vals] + key_vals.sort() + key_vals = [str(k) for k in key_vals] + index_vals.append(key_vals) + # index_vals looks for example like: + # index_vals[0]: ('20171106', '20171107', '20171108') ; date + # index_vals[1]: ('0', '1200') ; time + # index_vals[2]: (3', '6', '9', '12') ; stepRange + + valsdict = {} + svalsdict = {} + stepsdict = {} + for p in pars: + valsdict[str(p)] = [] + svalsdict[str(p)] = [] + stepsdict[str(p)] = [] + + print 'maxstep: ', c.maxstep + + for prod in product(*index_vals): + # e.g. prod = ('20170505', '0', '12') + # ( date ,time, step) + # per date e.g. time = 0, 1200 + # per time e.g. step = 3, 6, 9, 12 + for i in range(len(index_keys)): + grib_index_select(iid, index_keys[i], prod[i]) + + gid = grib_new_from_index(iid) + if gid is not None: + cdate = grib_get(gid, 'date') + time = grib_get(gid, 'time') + step = grib_get(gid, 'step') + # date+time+step-2*dtime + # (since interpolated value valid for step-2*dtime) + sdate = datetime(year=cdate/10000, + month=(cdate % 10000)/100, + day=(cdate % 100), + hour=time/100) + fdate = sdate + timedelta(hours=step-2*int(c.dtime)) + sdates = sdate + timedelta(hours=step) + elimit = None + else: + break + + if c.maxstep > 12: + fnout = c.inputdir + '/flux' + \ + sdate.strftime('%Y%m%d') + '.{:0>2}'.format(time/100) + \ + '.{:0>3}'.format(step-2*int(c.dtime)) + gnout = c.inputdir + '/flux' + \ + sdate.strftime('%Y%m%d') + '.{:0>2}'.format(time/100) + \ + '.{:0>3}'.format(step-int(c.dtime)) + hnout = c.inputdir + '/flux' + \ + sdate.strftime('%Y%m%d') + '.{:0>2}'.format(time/100) + \ + '.{:0>3}'.format(step) + g = open(gnout, 'w') + h = open(hnout, 'w') + else: + fnout = c.inputdir + '/flux' + fdate.strftime('%Y%m%d%H') + gnout = c.inputdir + '/flux' + (fdate + + timedelta(hours=int(c.dtime)) + ).strftime('%Y%m%d%H') + hnout = c.inputdir + '/flux' + sdates.strftime('%Y%m%d%H') + g = open(gnout, 'w') + h = open(hnout, 'w') + + print "outputfile = " + fnout + f = open(fnout, 'w') + + # read message for message and store relevant data fields + # data keywords are stored in pars + while 1: + if gid is None: + break + cparamId = str(grib_get(gid, 'paramId')) + step = grib_get(gid, 'step') + atime = grib_get(gid, 'time') + ni = grib_get(gid, 'Ni') + nj = grib_get(gid, 'Nj') + if cparamId in valsdict.keys(): + values = grib_get_values(gid) + vdp = valsdict[cparamId] + svdp = svalsdict[cparamId] + sd = stepsdict[cparamId] + + if cparamId == '142' or cparamId == '143': + fak = 1. / 1000. + else: + fak = 3600. + + values = (np.reshape(values, (nj, ni))).flatten() / fak + vdp.append(values[:]) # save the accumulated values + if step <= int(c.dtime): + svdp.append(values[:] / int(c.dtime)) + else: # deaccumulate values + svdp.append((vdp[-1] - vdp[-2]) / int(c.dtime)) + + print(cparamId, atime, step, len(values), + values[0], np.std(values)) + # save the 1/3-hourly or specific values + # svdp.append(values[:]) + sd.append(step) + # len(svdp) correspond to the time + if len(svdp) >= 3: + if len(svdp) > 3: + if cparamId == '142' or cparamId == '143': + values = disaggregation.darain(svdp) + else: + values = disaggregation.dapoly(svdp) + + if not (step == c.maxstep and c.maxstep > 12 \ + or sdates == elimit): + vdp.pop(0) + svdp.pop(0) + else: + if c.maxstep > 12: + values = svdp[1] + else: + values = svdp[0] + + grib_set_values(gid, values) + if c.maxstep > 12: + grib_set(gid, 'step', max(0, step-2*int(c.dtime))) + else: + grib_set(gid, 'step', 0) + grib_set(gid, 'time', fdate.hour*100) + grib_set(gid, 'date', fdate.year*10000 + + fdate.month*100+fdate.day) + grib_write(gid, f) + + if c.basetime is not None: + elimit = datetime.strptime(c.end_date + + c.basetime, '%Y%m%d%H') + else: + elimit = sdate + timedelta(2*int(c.dtime)) + + # squeeze out information of last two steps contained + # in svdp + # if step+int(c.dtime) == c.maxstep and c.maxstep>12 + # or sdates+timedelta(hours = int(c.dtime)) + # >= elimit: + # Note that svdp[0] has not been popped in this case + + if step == c.maxstep and c.maxstep > 12 or \ + sdates == elimit: + + values = svdp[3] + grib_set_values(gid, values) + grib_set(gid, 'step', 0) + truedatetime = fdate + timedelta(hours= + 2*int(c.dtime)) + grib_set(gid, 'time', truedatetime.hour * 100) + grib_set(gid, 'date', truedatetime.year * 10000 + + truedatetime.month * 100 + + truedatetime.day) + grib_write(gid, h) + + #values = (svdp[1]+svdp[2])/2. + if cparamId == '142' or cparamId == '143': + values = disaggregation.darain(list(reversed(svdp))) + else: + values = disaggregation.dapoly(list(reversed(svdp))) + + grib_set(gid, 'step', 0) + truedatetime = fdate + timedelta(hours=int(c.dtime)) + grib_set(gid, 'time', truedatetime.hour * 100) + grib_set(gid, 'date', truedatetime.year * 10000 + + truedatetime.month * 100 + + truedatetime.day) + grib_set_values(gid, values) + grib_write(gid, g) + + grib_release(gid) + + gid = grib_new_from_index(iid) + + f.close() + g.close() + h.close() + + grib_index_release(iid) + + return diff --git a/python/pythontest/TestInstallTar/test_untar/python/GribTools.py b/python/pythontest/TestInstallTar/test_untar/python/GribTools.py new file mode 100644 index 0000000000000000000000000000000000000000..a68d1a5485f1b7e3039081ec7fb176d90466ab06 --- /dev/null +++ b/python/pythontest/TestInstallTar/test_untar/python/GribTools.py @@ -0,0 +1,318 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +#******************************************************************************* +# @Author: Anne Fouilloux (University of Oslo) +# +# @Date: July 2014 +# +# @Change History: +# February 2018 - Anne Philipp (University of Vienna): +# - applied PEP8 style guide +# - added documentation +# - changed some naming +# +# @License: +# (C) Copyright 2014-2018. +# +# This software is licensed under the terms of the Apache Licence Version 2.0 +# which can be obtained at http://www.apache.org/licenses/LICENSE-2.0. +# +# @Class Description: +# The GRIB API provides all necessary tools to work directly with the +# grib files. Nevertheless, the GRIB API tools are very basic and are in +# direct connection with the grib files. This class provides some higher +# functions which apply a set of GRIB API tools together in the respective +# context. So, the class initially contains a list of grib files (their +# names) and the using program then applies the methods directly on the +# class objects without having to think about how the actual GRIB API +# tools have to be arranged. +# +# @Class Content: +# - __init__ +# - get_keys +# - set_keys +# - copy +# - index +# +# @Class Attributes: +# - filenames +# +#******************************************************************************* + +# ------------------------------------------------------------------------------ +# MODULES +# ------------------------------------------------------------------------------ +import os +from gribapi import grib_new_from_file, grib_is_defined, grib_get, \ + grib_release, grib_set, grib_write, grib_index_read, \ + grib_index_new_from_file, grib_index_add_file, \ + grib_index_write + +# ------------------------------------------------------------------------------ +# CLASS +# ------------------------------------------------------------------------------ +class GribTools(object): + ''' + Class for GRIB utilities (new methods) based on GRIB API + ''' + # -------------------------------------------------------------------------- + # CLASS FUNCTIONS + # -------------------------------------------------------------------------- + def __init__(self, filenames): + ''' + @Description: + Initialise an object of GribTools and assign a list + of filenames. + + @Input: + filenames: list of strings + A list of filenames. + + @Return: + <nothing> + ''' + + self.filenames = filenames + + return + + + def get_keys(self, keynames, wherekeynames=[], wherekeyvalues=[]): + ''' + @Description: + get keyvalues for a given list of keynames + a where statement can be given (list of key and list of values) + + @Input: + keynames: list of strings + List of keynames. + + wherekeynames: list of strings, optional + Default value is an empty list. + + wherekeyvalues: list of strings, optional + Default value is an empty list. + + @Return: + return_list: list of strings + List of keyvalues for given keynames. + ''' + + fileid = open(self.filenames, 'r') + + return_list = [] + + while 1: + gid_in = grib_new_from_file(fileid) + + if gid_in is None: + break + + if len(wherekeynames) != len(wherekeyvalues): + raise Exception("Number of key values and key names must be \ + the same. Give a value for each keyname!") + + select = True + i = 0 + for wherekey in wherekeynames: + if not grib_is_defined(gid_in, wherekey): + raise Exception("where key was not defined") + + select = (select and (str(wherekeyvalues[i]) == + str(grib_get(gid_in, wherekey)))) + i += 1 + + if select: + llist = [] + for key in keynames: + llist.extend([str(grib_get(gid_in, key))]) + return_list.append(llist) + + grib_release(gid_in) + + fileid.close() + + return return_list + + + def set_keys(self, fromfile, keynames, keyvalues, wherekeynames=[], + wherekeyvalues=[], strict=False, filemode='w'): + ''' + @Description: + Opens the file to read the grib messages and then write + them to a new output file. By default all messages are + written out. Also, the keyvalues of the passed list of + keynames are set or only those meeting the where statement. + (list of key and list of values). + + @Input: + fromfile: string + Filename of the input file to read the grib messages from. + + keynames: list of strings + List of keynames. Default is an empty list. + + keyvalues: list of strings + List of keynames. Default is an empty list. + + wherekeynames: list of strings, optional + Default value is an empty list. + + wherekeyvalues: list of strings, optional + Default value is an empty list. + + strict: boolean, optional + Decides if everything from keynames and keyvalues + is written out the grib file (False) or only those + meeting the where statement (True). Default is False. + + filemode: string, optional + Sets the mode for the output file. Default is "w". + + @Return: + <nothing> + + ''' + fout = open(self.filenames, filemode) + fin = open(fromfile) + + while 1: + gid_in = grib_new_from_file(fin) + + if gid_in is None: + break + + if len(wherekeynames) != len(wherekeyvalues): + raise Exception("Give a value for each keyname!") + + select = True + i = 0 + for wherekey in wherekeynames: + if not grib_is_defined(gid_in, wherekey): + raise Exception("where Key was not defined") + + select = (select and (str(wherekeyvalues[i]) == + str(grib_get(gid_in, wherekey)))) + i += 1 + + if select: + i = 0 + for key in keynames: + grib_set(gid_in, key, keyvalues[i]) + i += 1 + + grib_write(gid_in, fout) + + grib_release(gid_in) + + fin.close() + fout.close() + + return + + def copy(self, filename_in, selectWhere=True, + keynames=[], keyvalues=[], filemode='w'): + ''' + Add the content of another input grib file to the objects file but + only messages corresponding to keys/values passed to the function. + The selectWhere switch decides if to copy the keys equal to (True) or + different to (False) the keynames/keyvalues list passed to the function. + + @Input: + filename_in: string + Filename of the input file to read the grib messages from. + + selectWhere: boolean, optional + Decides if to copy the keynames and values equal to (True) or + different to (False) the keynames/keyvalues list passed to the + function. Default is True. + + keynames: list of strings, optional + List of keynames. Default is an empty list. + + keyvalues: list of strings, optional + List of keynames. Default is an empty list. + + filemode: string, optional + Sets the mode for the output file. Default is "w". + + @Return: + <nothing> + ''' + + fin = open(filename_in) + fout = open(self.filenames, filemode) + + while 1: + gid_in = grib_new_from_file(fin) + + if gid_in is None: + break + + if len(keynames) != len(keyvalues): + raise Exception("Give a value for each keyname!") + + select = True + i = 0 + for key in keynames: + if not grib_is_defined(gid_in, key): + raise Exception("Key was not defined") + + if selectWhere: + select = (select and (str(keyvalues[i]) == + str(grib_get(gid_in, key)))) + else: + select = (select and (str(keyvalues[i]) != + str(grib_get(gid_in, key)))) + i += 1 + + if select: + grib_write(gid_in, fout) + + grib_release(gid_in) + + fin.close() + fout.close() + + return + + def index(self, index_keys=["mars"], index_file="my.idx"): + ''' + @Description: + Create index file from a list of files if it does not exist or + read an index file. + + @Input: + index_keys: list of strings, optional + Contains the list of key parameter names from + which the index is to be created. + Default is a list with a single entry string "mars". + + index_file: string, optional + Filename where the indices are stored. + Default is "my.idx". + + @Return: + iid: integer + Grib index id. + ''' + print "... index will be done" + iid = None + + if os.path.exists(index_file): + iid = grib_index_read(index_file) + print "Use existing index file: %s " % (index_file) + else: + for filename in self.filenames: + print "Inputfile: %s " % (filename) + if iid is None: + iid = grib_index_new_from_file(filename, index_keys) + else: + grib_index_add_file(iid, filename) + + if iid is not None: + grib_index_write(iid, index_file) + + print '... index done' + + return iid diff --git a/python/pythontest/TestInstallTar/test_untar/python/MarsRetrieval.py b/python/pythontest/TestInstallTar/test_untar/python/MarsRetrieval.py new file mode 100644 index 0000000000000000000000000000000000000000..fa4012c2e88578d0d2635dba2a6a7530ca67498c --- /dev/null +++ b/python/pythontest/TestInstallTar/test_untar/python/MarsRetrieval.py @@ -0,0 +1,419 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +#******************************************************************************* +# @Author: Anne Fouilloux (University of Oslo) +# +# @Date: October 2014 +# +# @Change History: +# +# November 2015 - Leopold Haimberger (University of Vienna): +# - optimized display_info +# - optimized data_retrieve and seperate between python and shell +# script call +# +# February 2018 - Anne Philipp (University of Vienna): +# - applied PEP8 style guide +# - added documentation +# - applied some minor modifications in programming style/structure +# +# @License: +# (C) Copyright 2015-2018. +# +# This software is licensed under the terms of the Apache Licence Version 2.0 +# which can be obtained at http://www.apache.org/licenses/LICENSE-2.0. +# +# @Class Description: +# A MARS revtrieval has a specific syntax with a selection of keywords and +# their corresponding values. This class provides the necessary functions +# by displaying the selected parameters and their values and the actual +# retrievement of the data through a mars request or a Python web api +# interface. The initialization already expects all the keyword values. +# +# @Class Content: +# - __init__ +# - display_info +# - data_retrieve +# +# @Class Attributes: +# - server +# - marsclass +# - dtype +# - levtype +# - levelist +# - repres +# - date +# - resol +# - stream +# - area +# - time +# - step +# - expver +# - number +# - accuracy +# - grid +# - gaussian +# - target +# - param +# +#******************************************************************************* + +# ------------------------------------------------------------------------------ +# MODULES +# ------------------------------------------------------------------------------ +import subprocess +import os + +import _config +# ------------------------------------------------------------------------------ +# CLASS +# ------------------------------------------------------------------------------ +class MarsRetrieval(object): + ''' + Class for submitting MARS retrievals. + + A description of MARS keywords/arguments and examples of their + values can be found here: + https://software.ecmwf.int/wiki/display/UDOC/\ + Identification+keywords#Identificationkeywords-class + + ''' + + def __init__(self, server, marsclass="ei", type="", levtype="", + levelist="", repres="", date="", resol="", stream="", + area="", time="", step="", expver="1", number="", + accuracy="", grid="", gaussian="", target="", + param=""): + ''' + @Description: + Initialises the instance of the MarsRetrieval class and + defines and assigns a set of the necessary retrieval parameters + for the FLEXPART input data. + A description of MARS keywords/arguments, their dependencies + on each other and examples of their values can be found here: + + https://software.ecmwf.int/wiki/display/UDOC/MARS+keywords + + @Input: + self: instance of MarsRetrieval + For description see class documentation. + + server: instance of ECMWFService (from ECMWF Web-API) + This is the connection to the ECMWF data servers. + It is needed for the pythonic access of ECMWF data. + + marsclass: string, optional + Characterisation of dataset. E.g. EI (ERA-Interim), + E4 (ERA40), OD (Operational archive), ea (ERA5). + Default is the ERA-Interim dataset "ei". + + type: string, optional + Determines the type of fields to be retrieved. + Selects between observations, images or fields. + Examples for fields: Analysis (an), Forecast (fc), + Perturbed Forecast (pf), Control Forecast (cf) and so on. + Default is an empty string. + + levtype: string, optional + Denotes type of level. Has a direct implication on valid + levelist values! + E.g. model level (ml), pressure level (pl), surface (sfc), + potential vorticity (pv), potential temperature (pt) + and depth (dp). + Default is an empty string. + + levelist: string, optional + Specifies the required levels. It has to have a valid + correspondence to the selected levtype. + Examples: model level: 1/to/137, pressure levels: 500/to/1000 + Default is an empty string. + + repres: string, optional + Selects the representation of the archived data. + E.g. sh - spherical harmonics, gg - Gaussian grid, + ll - latitude/longitude, ... + Default is an empty string. + + date: string, optional + Specifies the Analysis date, the Forecast base date or + Observations date. Valid formats are: + Absolute as YYYY-MM-DD or YYYYMMDD. + Default is an empty string. + + resol: string, optional + Specifies the desired triangular truncation of retrieved data, + before carrying out any other selected post-processing. + The default is automatic truncation (auto), by which the lowest + resolution compatible with the value specified in grid is + automatically selected for the retrieval. + Users wanting to perform post-processing from full spectral + resolution should specify Archived Value (av). + The following are examples of existing resolutions found in + the archive: 63, 106, 159, 213, 255, 319, 399, 511, 799 or 1279. + This keyword has no meaning/effect if the archived data is + not in spherical harmonics representation. + The best selection can be found here: + https://software.ecmwf.int/wiki/display/UDOC/\ + Retrieve#Retrieve-Truncationbeforeinterpolation + Default is an empty string. + + stream: string, optional + Identifies the forecasting system used to generate the data. + E.g. oper (Atmospheric model), enfo (Ensemble forecats), ... + Default is an empty string. + + area: string, optional + Specifies the desired sub-area of data to be extracted. + Areas can be defined to wrap around the globe. + + Latitude values must be given as signed numbers, with: + north latitudes (i.e. north of the equator) + being positive (e.g: 40.5) + south latitutes (i.e. south of the equator) + being negative (e.g: -50.5) + Longtitude values must be given as signed numbers, with: + east longitudes (i.e. east of the 0 degree meridian) + being positive (e.g: 35.0) + west longitudes (i.e. west of the 0 degree meridian) + being negative (e.g: -20.5) + + E.g.: North/West/South/East + Default is an empty string. + + time: string, optional + Specifies the time of the data in hours and minutes. + Valid values depend on the type of data: Analysis time, + Forecast base time or First guess verification time + (all usually at synoptic hours: 00, 06, 12 and 18 ). + Observation time (any combination in hours and minutes is valid, + subject to data availability in the archive). + The syntax is HHMM or HH:MM. If MM is omitted it defaults to 00. + Default is an empty string. + + step: string, optional + Specifies the forecast time step from forecast base time. + Valid values are hours (HH) from forecast base time. It also + specifies the length of the forecast which verifies at + First Guess time. + E.g. 1/3/6-hourly + Default is an empty string. + + expver: string, optional + The version of the dataset. Each experiment is assigned a + unique code (version). Production data is assigned 1 or 2, + and experimental data in Operations 11, 12 ,... + Research or Member State's experiments have a four letter + experiment identifier. + Default is "1". + + number: string, optional + Selects the member in ensemble forecast run. (Only then it + is necessary.) It has a different meaning depending on + the type of data. + E.g. Perturbed Forecasts: specifies the Ensemble forecast member + Default is an empty string. + + accuracy: string, optional + Specifies the number of bits per value to be used in the + generated GRIB coded fields. + A positive integer may be given to specify the preferred number + of bits per packed value. This must not be greater than the + number of bits normally used for a Fortran integer on the + processor handling the request (typically 32 or 64 bit). + Within a compute request the accuracy of the original fields + can be passed to the result field by specifying accuracy=av. + Default is an empty string. + + grid: string, optional + Specifies the output grid which can be either a Gaussian grid + or a Latitude/Longitude grid. MARS requests specifying + grid=av will return the archived model grid. + + Lat/Lon grid: The grid spacing needs to be an integer + fraction of 90 degrees e.g. grid = 0.5/0.5 + + Gaussian grid: specified by a letter denoting the type of + Gaussian grid followed by an integer (the grid number) + representing the number of lines between the Pole and Equator, + e.g. + grid = F160 - full (or regular) Gaussian grid with + 160 latitude lines between the pole and equator + grid = N320 - ECMWF original reduced Gaussian grid with + 320 latitude lines between the pole and equator, + see Reduced Gaussian Grids for grid numbers used at ECMWF + grid = O640 - ECMWF octahedral (reduced) Gaussian grid with + 640 latitude lines between the pole and equator + Default is an empty string. + + gaussian: string, optional + This parameter is deprecated and should no longer be used. + Specifies the desired type of Gaussian grid for the output. + Valid Gaussian grids are quasi-regular (reduced) or regular. + Keyword gaussian can only be specified together with + keyword grid. Gaussian without grid has no effect. + Default is an empty string. + + target: string, optional + Specifies a file into which data is to be written after + retrieval or manipulation. Path names should always be + enclosed in double quotes. The MARS client supports automatic + generation of multiple target files using MARS keywords + enclosed in square brackets [ ]. If the environment variable + MARS_MULTITARGET_STRICT_FORMAT is set to 1 before calling mars, + the keyword values will be used in the filename as shown by + the ecCodes GRIB tool grib_ls -m, e.g. with + MARS_MULTITARGET_STRICT_FORMAT set to 1 the keywords time, + expver and param will be formatted as 0600, 0001 and 129.128 + rather than 600, 1 and 129. + Default is an empty string. + + param: string, optional + Specifies the meteorological parameter. + The list of meteorological parameters in MARS is extensive. + Their availability is directly related to their meteorological + meaning and, therefore, the rest of directives specified + in the MARS request. + Meteorological parameters can be specified by their + GRIB code (param=130), their mnemonic (param=t) or + full name (param=temperature). + The list of parameter should be seperated by a "/"-sign. + E.g. 130/131/133 + Default is an empty string. + + @Return: + <nothing> + ''' + + self.server = server + self.marsclass = marsclass + self.type = type + self.levtype = levtype + self.levelist = levelist + self.repres = repres + self.date = date + self.resol = resol + self.stream = stream + self.area = area + self.time = time + self.step = step + self.expver = expver + self.number = number + self.accuracy = accuracy + self.grid = grid + self.gaussian = gaussian + self.target = target + self.param = param + + return + + + def display_info(self): + ''' + @Description: + Prints all class attributes and their values. + + @Input: + self: instance of MarsRetrieval + For description see class documentation. + + @Return: + <nothing> + ''' + # Get all class attributes and their values as a dictionary + attrs = vars(self) + + # iterate through all attributes and print them + # with their corresponding values + for item in attrs.items(): + if item[0] in 'server': + pass + else: + print item[0] + ': ' + str(item[1]) + + return + + + def print_info(self): + ''' + ''' + # Get all class attributes and their values as a dictionary + attrs = vars(self) + + # open a file to store all requests to + with open(os.path.join(_config.PATH_RUN_DIR + os.path.sep + + _config.FILE_MARS_REQUESTS), 'a') as f: + f.write('mars\n') + # iterate through all attributes and print them + # with their corresponding values + for item in attrs.items(): + if item[0] in 'server': + pass + else: + f.write(item[0] + ': ' + str(item[1]) + '\n') + f.write('\n\n') + + return + + def data_retrieve(self): + ''' + @Description: + Submits a MARS retrieval. Depending on the existence of + ECMWF Web-API it is submitted via Python or a + subprocess in the Shell. The parameter for the mars retrieval + are taken from the defined class attributes. + + @Input: + self: instance of MarsRetrieval + For description see class documentation. + + @Return: + <nothing> + ''' + # Get all class attributes and their values as a dictionary + attrs = vars(self) + + # convert the dictionary of attributes into a comma + # seperated list of attributes with their values + # needed for the retrieval call + s = 'ret' + for k, v in attrs.iteritems(): + if k in 'server': + continue + if k == 'marsclass': + k = 'class' + if v == '': + continue + if k.lower() == 'target': + target = v + else: + s = s + ',' + k + '=' + str(v) + + # MARS request via Python script + if self.server is not False: + try: + self.server.execute(s, target) + except: + print('MARS Request failed, ' + 'have you already registered at apps.ecmwf.int?') + raise IOError + if os.stat(target).st_size == 0: + print('MARS Request returned no data - please check request') + raise IOError + # MARS request via extra process in shell + else: + s += ',target = "' + target + '"' + p = subprocess.Popen(['mars'], stdin=subprocess.PIPE, + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, bufsize=1) + pout = p.communicate(input=s)[0] + print pout.decode() + + if 'Some errors reported' in pout.decode(): + print('MARS Request failed - please check request') + raise IOError + + if os.stat(target).st_size == 0: + print('MARS Request returned no data - please check request') + raise IOError + + return diff --git a/python/pythontest/TestInstallTar/test_untar/python/UioFiles.py b/python/pythontest/TestInstallTar/test_untar/python/UioFiles.py new file mode 100644 index 0000000000000000000000000000000000000000..fe6995320308bfb88805745ac5753ffbdc9dd799 --- /dev/null +++ b/python/pythontest/TestInstallTar/test_untar/python/UioFiles.py @@ -0,0 +1,178 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +#******************************************************************************* +# @Author: Anne Fouilloux (University of Oslo) +# +# @Date: October 2014 +# +# @Change History: +# +# November 2015 - Leopold Haimberger (University of Vienna): +# - modified method list_files to work with glob instead of listdir +# - added pattern search in method list_files +# +# February 2018 - Anne Philipp (University of Vienna): +# - applied PEP8 style guide +# - added documentation +# - optimisation of method list_files since it didn't work correctly +# for sub directories +# - additional speed up of method list_files +# - modified the class so that it is initiated with a pattern instead +# of suffixes. Gives more precision in selection of files. +# +# @License: +# (C) Copyright 2014-2018. +# +# This software is licensed under the terms of the Apache Licence Version 2.0 +# which can be obtained at http://www.apache.org/licenses/LICENSE-2.0. +# +# @Class Decription: +# The class is for file manipulation. It is initiated with a regular +# expression pattern for this instance and can produce a list of Files +# from the given file pattern. These files can be deleted. +# +# @Class Content: +# - __init__ +# - __str__ +# - __list_files__ +# - delete_files +# +# @Class Attributes: +# - pattern +# - files +# +#******************************************************************************* + +# ------------------------------------------------------------------------------ +# MODULES +# ------------------------------------------------------------------------------ +import os +import fnmatch + +# software specific module from flex_extract +#import profiling +from tools import silent_remove, get_list_as_string + +# ------------------------------------------------------------------------------ +# CLASS +# ------------------------------------------------------------------------------ + +class UioFiles(object): + ''' + Class to manipulate files. At initialisation it has the attribute + pattern which stores a regular expression pattern for the files associated + with the instance of the class. + ''' + # -------------------------------------------------------------------------- + # CLASS FUNCTIONS + # -------------------------------------------------------------------------- + def __init__(self, path, pattern): + ''' + @Description: + Assignes a specific pattern for these files. + + @Input: + self: instance of UioFiles + Description see class documentation. + + path: string + Directory where to list the files. + + pattern: string + Regular expression pattern. For example: '*.grb' + + @Return: + <nothing> + ''' + + self.path = path + self.pattern = pattern + self.files = None + + self.__list_files__(self.path) + + return + + #@profiling.timefn + def __list_files__(self, path, callid=0): + ''' + @Description: + Lists all files in the directory with the matching + regular expression pattern. + + @Input: + self: instance of UioFiles + Description see class documentation. + + path: string + Path to the files. + + callid: integer + Id which tells the function if its the first call + or a recursive call. Default and first call is 0. + Everything different from 0 is ment to be a recursive case. + + @Return: + <nothing> + ''' + + # initialize variable in first function call + if callid == 0: + self.files = [] + + # Get the absolute path + path = os.path.abspath(path) + + # get the file list of the path if its not a directory and + # if it contains the pattern + self.files.extend([os.path.join(path, k) for k in os.listdir(path) + if fnmatch.fnmatch(k, self.pattern)]) + + # find possible sub-directories in the path + subdirs = [s for s in os.listdir(path) + if os.path.isdir(os.path.join(path, s))] + + # do recursive calls for sub-direcorties + if subdirs: + for subdir in subdirs: + self.__list_files__(os.path.join(path, subdir), callid=1) + + return + + def __str__(self): + ''' + @Description: + Converts the list of files into a single string. + The entries are sepereated by "," sign. + + @Input: + self: instance of UioFiles + Description see class documentation. + + @Return: + files_string: string + The content of the list as a single string. + ''' + + filenames = [os.path.basename(f) for f in self.files] + files_string = get_list_as_string(filenames, concatenate_sign=', ') + + return files_string + + def delete_files(self): + ''' + @Description: + Deletes the files. + + @Input: + self: instance of UioFiles + Description see class documentation. + + @Return: + <nothing> + ''' + + for old_file in self.files: + silent_remove(old_file) + + return diff --git a/python/pythontest/TestInstallTar/test_untar/python/_config.py b/python/pythontest/TestInstallTar/test_untar/python/_config.py new file mode 100644 index 0000000000000000000000000000000000000000..4c5751cc1b98855fb1fa9e7b76c7d4b82c2b6983 --- /dev/null +++ b/python/pythontest/TestInstallTar/test_untar/python/_config.py @@ -0,0 +1,85 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +#******************************************************************************* +# @Author: Anne Philipp (University of Vienna) +# +# @Date: August 2018 +# +# @Change History: +# +# @License: +# (C) Copyright 2014-2018. +# +# This software is licensed under the terms of the Apache Licence Version 2.0 +# which can be obtained at http://www.apache.org/licenses/LICENSE-2.0. +# +# @Description: +# Contains constant value parameter for flex_extract. +# +#******************************************************************************* + +# ------------------------------------------------------------------------------ +# MODULES +# ------------------------------------------------------------------------------ +import os +import sys +import inspect + +_VERSION_STR = '7.1' + +# ------------------------------------------------------------------------------ +# EXPLICIT FILENAMES +# ------------------------------------------------------------------------------ + +FLEXEXTRACT_DIRNAME = 'flex_extract_v' + _VERSION_STR +FILE_MARS_REQUESTS = 'mars_requests.dat' +FORTRAN_EXECUTABLE = 'CONVERT2' +FILE_USER_ENVVARS = 'ECMWF_ENV' +TEMPFILE_INSTALL_COMPILEJOB = 'compilejob.temp' +FILE_INSTALL_COMPILEJOB = 'compilejob.ksh' +TEMPFILE_INSTALL_JOB = 'job.temp.o' +TEMPFILE_JOB = 'job.temp' +FILE_JOB_OD = 'job.ksh' +FILE_JOB_OP = 'jopoper.ksh' + +# ------------------------------------------------------------------------------ +# EXPLICIT PATHES +# ------------------------------------------------------------------------------ + +# add path to pythonpath +PATH_LOCAL_PYTHON = os.path.dirname(os.path.abspath( + inspect.getfile(inspect.currentframe()))) +if PATH_LOCAL_PYTHON not in sys.path: + sys.path.append(PATH_LOCAL_PYTHON) + +PATH_FLEXEXTRACT_DIR = os.path.normpath(os.path.dirname(os.path.abspath( + inspect.getfile(inspect.currentframe()))) + '/../') + +PATH_RELATIVE_PYTHON = os.path.relpath(PATH_LOCAL_PYTHON, PATH_FLEXEXTRACT_DIR) + +PATH_TEMPLATES = os.path.join(PATH_FLEXEXTRACT_DIR + os.path.sep + + '_templates') + +PATH_RELATIVE_TEMPLATES = os.path.relpath(PATH_TEMPLATES, PATH_FLEXEXTRACT_DIR) + +# path to gribtable +PATH_GRIBTABLE = os.path.join(PATH_TEMPLATES + os.path.sep + + 'ecmwf_grib1_table_128') + +# path to run directory +PATH_RUN_DIR = os.path.join(PATH_FLEXEXTRACT_DIR + os.path.sep + + 'run') + +# path to directory where all control files are stored +PATH_CONTROLFILES = os.path.join(PATH_RUN_DIR + os.path.sep + + 'control') + +# path to directory where all control files are stored +PATH_JOBSCRIPTS = os.path.join(PATH_RUN_DIR + os.path.sep + + 'jobscripts') + +PATH_FORTRAN_SRC = os.path.join(PATH_FLEXEXTRACT_DIR + os.path.sep + + 'src') + +PATH_RELATIVE_FORTRAN_SRC = os.path.relpath(PATH_FORTRAN_SRC, PATH_FLEXEXTRACT_DIR) + diff --git a/python/pythontest/TestInstallTar/test_untar/python/disaggregation.py b/python/pythontest/TestInstallTar/test_untar/python/disaggregation.py new file mode 100644 index 0000000000000000000000000000000000000000..aa84eaf0c28110f6772ff3e7a48fd411082d52a4 --- /dev/null +++ b/python/pythontest/TestInstallTar/test_untar/python/disaggregation.py @@ -0,0 +1,141 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +#******************************************************************************* +# @Author: Anne Philipp (University of Vienna) +# +# @Date: March 2018 +# +# @Change History: +# +# November 2015 - Leopold Haimberger (University of Vienna): +# - migration of the methods dapoly and darain from Fortran +# (flex_extract_v6 and earlier) to Python +# +# April 2018 - Anne Philipp (University of Vienna): +# - applied PEP8 style guide +# - added structured documentation +# - outsourced the disaggregation functions dapoly and darain +# to a new module named disaggregation +# +# @License: +# (C) Copyright 2015-2018. +# +# This software is licensed under the terms of the Apache Licence Version 2.0 +# which can be obtained at http://www.apache.org/licenses/LICENSE-2.0. +# +# @Module Description: +# disaggregation of deaccumulated flux data from an ECMWF model FG field. +# Initially the flux data to be concerned are: +# - large-scale precipitation +# - convective precipitation +# - surface sensible heat flux +# - surface solar radiation +# - u stress +# - v stress +# Different versions of disaggregation is provided for rainfall +# data (darain, modified linear) and the surface fluxes and +# stress data (dapoly, cubic polynomial). +# +# @Module Content: +# - dapoly +# - darain +# +#******************************************************************************* + +# ------------------------------------------------------------------------------ +# MODULES +# ------------------------------------------------------------------------------ + +# ------------------------------------------------------------------------------ +# FUNCTIONS +# ------------------------------------------------------------------------------ +def dapoly(alist): + ''' + @Author: P. JAMES + + @Date: 2000-03-29 + + @ChangeHistory: + June 2003 - A. BECK (2003-06-01) + adaptaions + November 2015 - Leopold Haimberger (University of Vienna) + migration from Fortran to Python + + @Description: + Interpolation of deaccumulated fluxes of an ECMWF model FG field + using a cubic polynomial solution which conserves the integrals + of the fluxes within each timespan. + disaggregationregation is done for 4 accumluated timespans which generates + a new, disaggregated value which is output at the central point + of the 4 accumulation timespans. This new point is used for linear + interpolation of the complete timeseries afterwards. + + @Input: + alist: list of size 4, array(2D), type=float + List of 4 timespans as 2-dimensional, horizontal fields. + E.g. [[array_t1], [array_t2], [array_t3], [array_t4]] + + @Return: + nfield: array(2D), type=float + New field which replaces the field at the second position + of the accumulation timespans. + + ''' + pya = (alist[3] - alist[0] + 3. * (alist[1] - alist[2])) / 6. + pyb = (alist[2] + alist[0]) / 2. - alist[1] - 9. * pya / 2. + pyc = alist[1] - alist[0] - 7. * pya / 2. - 2. * pyb + pyd = alist[0] - pya / 4. - pyb / 3. - pyc / 2. + nfield = 8. * pya + 4. * pyb + 2. * pyc + pyd + + return nfield + + +def darain(alist): + ''' + @Author: P. JAMES + + @Date: 2000-03-29 + + @ChangeHistory: + June 2003 - A. BECK (2003-06-01) + adaptaions + November 2015 - Leopold Haimberger (University of Vienna) + migration from Fortran to Python + + @Description: + Interpolation of deaccumulated fluxes of an ECMWF model FG rainfall + field using a modified linear solution which conserves the integrals + of the fluxes within each timespan. + disaggregationregation is done for 4 accumluated timespans which generates + a new, disaggregated value which is output at the central point + of the 4 accumulation timespans. This new point is used for linear + interpolation of the complete timeseries afterwards. + + @Input: + alist: list of size 4, array(2D), type=float + List of 4 timespans as 2-dimensional, horizontal fields. + E.g. [[array_t1], [array_t2], [array_t3], [array_t4]] + + @Return: + nfield: array(2D), type=float + New field which replaces the field at the second position + of the accumulation timespans. + ''' + xa = alist[0] + xb = alist[1] + xc = alist[2] + xd = alist[3] + xa[xa < 0.] = 0. + xb[xb < 0.] = 0. + xc[xc < 0.] = 0. + xd[xd < 0.] = 0. + + xac = 0.5 * xb + mask = xa + xc > 0. + xac[mask] = xb[mask] * xc[mask] / (xa[mask] + xc[mask]) + xbd = 0.5 * xc + mask = xb + xd > 0. + xbd[mask] = xb[mask] * xc[mask] / (xb[mask] + xd[mask]) + nfield = xac + xbd + + return nfield diff --git a/python/pythontest/TestInstallTar/test_untar/python/get_mars_data.py b/python/pythontest/TestInstallTar/test_untar/python/get_mars_data.py new file mode 100755 index 0000000000000000000000000000000000000000..bf8e02f37a06234c23fe4ea81ff88738337a9e43 --- /dev/null +++ b/python/pythontest/TestInstallTar/test_untar/python/get_mars_data.py @@ -0,0 +1,301 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +#******************************************************************************* +# @Author: Anne Fouilloux (University of Oslo) +# +# @Date: October 2014 +# +# @Change History: +# +# November 2015 - Leopold Haimberger (University of Vienna): +# - moved the getEIdata program into a function "get_mars_data" +# - moved the AgurmentParser into a seperate function +# - adatpted the function for the use in flex_extract +# - renamed file to get_mars_data +# +# February 2018 - Anne Philipp (University of Vienna): +# - applied PEP8 style guide +# - added structured documentation +# - minor changes in programming style for consistence +# - added function main and moved function calls vom __main__ there +# (necessary for better documentation with docstrings for later +# online documentation) +# - use of UIFiles class for file selection and deletion +# +# +# @License: +# (C) Copyright 2014-2018. +# +# This software is licensed under the terms of the Apache Licence Version 2.0 +# which can be obtained at http://www.apache.org/licenses/LICENSE-2.0. +# +# @Program Functionality: +# This program can be used as a module in the whole flex_extract process +# or can be run by itself to just extract MARS data from ECMWF. To do so, +# a couple of necessary parameters has to be passed with the program call. +# See documentation for more details. +# +# @Program Content: +# - main +# - get_mars_data +# - do_retrievement +# +#******************************************************************************* + +# ------------------------------------------------------------------------------ +# MODULES +# ------------------------------------------------------------------------------ +import os +import sys +import datetime +import inspect +try: + ecapi = True + import ecmwfapi +except ImportError: + ecapi = False + +# software specific classes and modules from flex_extract +from tools import my_error, normal_exit, get_cmdline_arguments, read_ecenv +from EcFlexpart import EcFlexpart +from UioFiles import UioFiles + +# add path to pythonpath so that python finds its buddies +LOCAL_PYTHON_PATH = os.path.dirname(os.path.abspath( + inspect.getfile(inspect.currentframe()))) +if LOCAL_PYTHON_PATH not in sys.path: + sys.path.append(LOCAL_PYTHON_PATH) + +# ------------------------------------------------------------------------------ +# FUNCTION +# ------------------------------------------------------------------------------ +def main(): + ''' + @Description: + If get_mars_data is called from command line, this function controls + the program flow and calls the argumentparser function and + the get_mars_data function for retrieving EC data. + + @Input: + <nothing> + + @Return: + <nothing> + ''' + + args = get_cmdline_arguments() + + try: + c = ControlFile(args.controlfile) + except IOError: + try: + c = ControlFile(LOCAL_PYTHON_PATH + args.controlfile) + except IOError: + print 'Could not read CONTROL file "' + args.controlfile + '"' + print 'Either it does not exist or its syntax is wrong.' + print 'Try "' + sys.argv[0].split('/')[-1] + \ + ' -h" to print usage information' + sys.exit(1) + + env_parameter = read_ecenv(c.ecmwfdatadir + 'python/ECMWF_ENV') + c.assign_args_to_control(args, env_parameter) + c.assign_envs_to_control(env_parameter) + c.check_conditions() + + get_mars_data(c) + normal_exit(c.mailfail, 'Done!') + + return + +def get_mars_data(c): + ''' + @Description: + Retrieves the EC data needed for a FLEXPART simulation. + Start and end dates for retrieval period is set. Retrievals + are divided into smaller periods if necessary and datechunk parameter + is set. + + @Input: + c: instance of class ControlFile + Contains all the parameters of CONTROL file, which are e.g.: + DAY1(start_date), DAY2(end_date), DTIME, MAXSTEP, TYPE, TIME, + STEP, CLASS(marsclass), STREAM, NUMBER, EXPVER, GRID, LEFT, + LOWER, UPPER, RIGHT, LEVEL, LEVELIST, RESOL, GAUSS, ACCURACY, + OMEGA, OMEGADIFF, ETA, ETADIFF, DPDETA, SMOOTH, FORMAT, + ADDPAR, WRF, CWC, PREFIX, ECSTORAGE, ECTRANS, ECFSDIR, + MAILOPS, MAILFAIL, GRIB2FLEXPART, FLEXPARTDIR, BASETIME + DATE_CHUNK, DEBUG, INPUTDIR, OUTPUTDIR, FLEXPART_ROOT_SCRIPTS + + For more information about format and content of the parameter + see documentation. + + @Return: + <nothing> + ''' + + if not os.path.exists(c.inputdir): + os.makedirs(c.inputdir) + + if c.request == 0 or c.request == 2: + print("Retrieving EC data!") + elif c.request == 1: + print("Printing mars requests!") + + print("start date %s " % (c.start_date)) + print("end date %s " % (c.end_date)) + + if ecapi: + server = ecmwfapi.ECMWFService("mars") + else: + server = False + + c.ecapi = ecapi + print('Using ECMWF WebAPI: ' + str(c.ecapi)) + + # basetime geht rückwärts + + # if basetime 00 + # dann wird von 12 am vortag bis 00 am start tag geholt + # aber ohne 12 selbst sondern 12 + step + + # if basetime 12 + # dann wird von 00 + step bis 12 am start tag geholt + + # purer forecast wird vorwärts bestimmt. + # purer forecast mode ist dann wenn größer 24 stunden + # wie kann das noch festgestellt werden ???? + # nur FC und steps mehr als 24 ? + # die einzige problematik beim reinen forecast ist die benennung der files! + # also sobald es Tagesüberschneidungen gibt + # allerdings ist das relevant und ersichtlich an den NICHT FLUSS DATEN + + + # set start date of retrieval period + start = datetime.date(year=int(c.start_date[:4]), + month=int(c.start_date[4:6]), + day=int(c.start_date[6:])) + startm1 = start - datetime.timedelta(days=1) + + # set end date of retrieval period + end = datetime.date(year=int(c.end_date[:4]), + month=int(c.end_date[4:6]), + day=int(c.end_date[6:])) + + # set time period for one single retrieval + datechunk = datetime.timedelta(days=int(c.date_chunk)) + + if c.basetime == '00': + start = startm1 + + if c.basetime == '00' or c.basetime == '12': + # endp1 = end + datetime.timedelta(days=1) + endp1 = end + else: + # endp1 = end + datetime.timedelta(days=2) + endp1 = end + datetime.timedelta(days=1) + + # -------------- flux data ------------------------------------------------ + if c.request == 0 or c.request == 2: + print('... removing old flux content of ' + c.inputdir) + tobecleaned = UioFiles(c.inputdir, + '*_acc_*.' + str(os.getppid()) + '.*.grb') + tobecleaned.delete_files() + + # if forecast for maximum one day (upto 24h) are to be retrieved, + # collect accumulation data (flux data) + # with additional days in the beginning and at the end + # (used for complete disaggregation of original period) + if c.maxstep <= 24: + do_retrievement(c, server, startm1, endp1, datechunk, fluxes=True) + + # if forecast data longer than 24h are to be retrieved, + # collect accumulation data (flux data) + # with the exact start and end date + # (disaggregation will be done for the + # exact time period with boundary conditions) + else: + do_retrievement(c, server, start, end, datechunk, fluxes=True) + + # -------------- non flux data -------------------------------------------- + if c.request == 0 or c.request == 2: + print('... removing old non flux content of ' + c.inputdir) + tobecleaned = UioFiles(c.inputdir, + '*__*.' + str(os.getppid()) + '.*.grb') + tobecleaned.delete_files() + + do_retrievement(c, server, start, end, datechunk, fluxes=False) + + return + +def do_retrievement(c, server, start, end, delta_t, fluxes=False): + ''' + @Description: + Divides the complete retrieval period in smaller chunks and + retrieves the data from MARS. + + @Input: + c: instance of ControlFile + Contains all the parameters of CONTROL file, which are e.g.: + DAY1(start_date), DAY2(end_date), DTIME, MAXSTEP, TYPE, TIME, + STEP, CLASS(marsclass), STREAM, NUMBER, EXPVER, GRID, LEFT, + LOWER, UPPER, RIGHT, LEVEL, LEVELIST, RESOL, GAUSS, ACCURACY, + OMEGA, OMEGADIFF, ETA, ETADIFF, DPDETA, SMOOTH, FORMAT, + ADDPAR, WRF, CWC, PREFIX, ECSTORAGE, ECTRANS, ECFSDIR, + MAILOPS, MAILFAIL, GRIB2FLEXPART, FLEXPARTDIR, BASETIME + DATE_CHUNK, DEBUG, INPUTDIR, OUTPUTDIR, FLEXPART_ROOT_SCRIPTS + + For more information about format and content of the parameter + see documentation. + + server: instance of ECMWFService + The server connection to ECMWF + + start: instance of datetime + The start date of the retrieval. + + end: instance of datetime + The end date of the retrieval. + + delta_t: instance of datetime + Delta_t +1 is the maximal time period of a single + retrieval. + + fluxes: boolean, optional + Decides if the flux parameters are to be retrieved or + the rest of the parameter list. + Default value is False. + + @Return: + <nothing> + ''' + + # since actual day also counts as one day, + # we only need to add datechunk - 1 days to retrieval + # for a period + delta_t_m1 = delta_t - datetime.timedelta(days=1) + + day = start + while day <= end: + flexpart = EcFlexpart(c, fluxes) + tmpday = day + delta_t_m1 + if tmpday < end: + dates = day.strftime("%Y%m%d") + "/to/" + \ + tmpday.strftime("%Y%m%d") + else: + dates = day.strftime("%Y%m%d") + "/to/" + \ + end.strftime("%Y%m%d") + + + print("... retrieve " + dates + " in dir " + c.inputdir) + + try: + flexpart.retrieve(server, dates, c.request, c.inputdir) + except IOError: + my_error(c.mailfail, 'MARS request failed') + + day += delta_t + + return + +if __name__ == "__main__": + main() diff --git a/python/pythontest/TestInstallTar/test_untar/python/install.py b/python/pythontest/TestInstallTar/test_untar/python/install.py new file mode 100755 index 0000000000000000000000000000000000000000..ba994276e57bb367c2252c83cb9819bb0bb9e1c5 --- /dev/null +++ b/python/pythontest/TestInstallTar/test_untar/python/install.py @@ -0,0 +1,546 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +#******************************************************************************* +# @Author: Leopold Haimberger (University of Vienna) +# +# @Date: November 2015 +# +# @Change History: +# +# February 2018 - Anne Philipp (University of Vienna): +# - applied PEP8 style guide +# - added documentation +# - moved install_args_and_control in here +# +# @License: +# (C) Copyright 2015-2018. +# +# This software is licensed under the terms of the Apache Licence Version 2.0 +# which can be obtained at http://www.apache.org/licenses/LICENSE-2.0. +# +# @Program Functionality: +# Depending on the selected installation environment (locally or on the +# ECMWF server ecgate or cca) the program extracts the commandline +# arguments and the CONTROL file parameter and prepares the corresponding +# environment. The necessary files are collected in a tar-ball and placed +# at the target location. There its untared, the environment variables will +# be set and the Fortran code will be compiled. If the ECMWF environment is +# selected a job script is prepared and submitted for the remaining +# configurations after putting the tar-ball to the target ECMWF server. +# +# @Program Content: +# - main +# - get_install_cmdline_arguments +# - install_via_gateway +# - mk_tarball +# - un_tarball +# - mk_env_vars +# - mk_compilejob +# - mk_job_template +# - delete_convert_build +# - make_convert_build +# +#******************************************************************************* + +# ------------------------------------------------------------------------------ +# MODULES +# ------------------------------------------------------------------------------ +import os +import sys +import glob +import subprocess +import inspect +from argparse import ArgumentParser, ArgumentDefaultsHelpFormatter + +# software specific classes and modules from flex_extract +import _config +from ControlFile import ControlFile +from UioFiles import UioFiles +from tools import make_dir, put_file_to_ecserver, submit_job_to_ecserver + + +# ------------------------------------------------------------------------------ +# FUNCTIONS +# ------------------------------------------------------------------------------ +def main(): + ''' + @Description: + Controls the installation process. Calls the installation function + if target is specified. + + @Intput: + <nothing> + + @Return: + <nothing> + ''' + + os.chdir(_config.PATH_LOCAL_PYTHON) + args = get_install_cmdline_arguments() + + try: + c = ControlFile(args.controlfile) + except IOError: + print 'Could not read CONTROL file "' + args.controlfile + '"' + print 'Either it does not exist or its syntax is wrong.' + print 'Try "' + sys.argv[0].split('/')[-1] + \ + ' -h" to print usage information' + exit(1) + + c.assign_args_to_control(args) + c.check_install_conditions() + + install_via_gateway(c) + + return + +def get_install_cmdline_arguments(): + ''' + @Description: + Decomposes the command line arguments and assigns them to variables. + Apply default values for non mentioned arguments. + + @Input: + <nothing> + + @Return: + args: instance of ArgumentParser + Contains the commandline arguments from script/program call. + ''' + parser = ArgumentParser(description='Install flex_extract software locally or \ + on ECMWF machines', + formatter_class=ArgumentDefaultsHelpFormatter) + + parser.add_argument('--target', dest='install_target', default=None, + help="Valid targets: local | ecgate | cca , \ + the latter two are at ECMWF") + parser.add_argument("--makefile", dest="makefile", default=None, + help='Name of Makefile to use for compiling CONVERT2') + parser.add_argument("--ecuid", dest="ecuid", default=None, + help='user id at ECMWF') + parser.add_argument("--ecgid", dest="ecgid", default=None, + help='group id at ECMWF') + parser.add_argument("--gateway", dest="gateway", default=None, + help='name of local gateway server') + parser.add_argument("--destination", dest="destination", default=None, + help='ecaccess destination, e.g. leo@genericSftp') + + parser.add_argument("--flexpart_root_scripts", dest="flexpart_root_scripts", + default=None, help="FLEXPART root directory on ECMWF \ + servers (to find grib2flexpart and COMMAND file)\n\ + Normally flex_extract resides in the scripts directory \ + of the FLEXPART distribution, thus the:") + + # arguments for job submission to ECMWF, only needed by submit.py + parser.add_argument("--job_template", dest='job_template', + default="job.temp.o", + help="job template file for submission to ECMWF") + + parser.add_argument("--controlfile", dest="controlfile", + default='CONTROL.temp', + help="file with CONTROL parameters") + + args = parser.parse_args() + + return args + + +def install_via_gateway(c): + ''' + @Description: + Perform the actual installation on local machine or prepare data + transfer to remote gate and submit a job script which will + install everything on the remote gate. + + @Input: + c: instance of class ControlFile + Contains all necessary information of a CONTROL file. The parameters + are: DAY1, DAY2, DTIME, MAXSTEP, TYPE, TIME, STEP, CLASS, STREAM, + NUMBER, EXPVER, GRID, LEFT, LOWER, UPPER, RIGHT, LEVEL, LEVELIST, + RESOL, GAUSS, ACCURACY, OMEGA, OMEGADIFF, ETA, ETADIFF, DPDETA, + SMOOTH, FORMAT, ADDPAR, WRF, CWC, PREFIX, ECSTORAGE, ECTRANS, + ECFSDIR, MAILOPS, MAILFAIL, GRIB2FLEXPART, FLEXPARTDIR + For more information about format and content of the parameter see + documentation. + + @Return: + <nothing> + ''' + import tarfile + + ecd = _config.PATH_FLEXEXTRACT_DIR + tarball_name = _config.FLEXEXTRACT_DIRNAME + '.tar' + tar_file = os.path.join(ecd + os.path.sep + tarball_name) + + target_dirname = _config.FLEXEXTRACT_DIRNAME + fortran_executable = _config.FORTRAN_EXECUTABLE + + if c.install_target.lower() != 'local': # ecgate or cca + + mk_compilejob(c.makefile, c.install_target, c.ecuid, c.ecgid, + c.flexpart_root_scripts) + + mk_job_template(c.ecuid, c.ecgid, c.gateway, + c.destination, c.flexpart_root_scripts) + + mk_env_vars(c.ecuid, c.ecgid, c.gateway, c.destination) + + mk_tarball(tar_file) + + put_file_to_ecserver(ecd, tarball_name, c.install_target, + c.ecuid, c.ecgid) + + submit_job_to_ecserver(c.install_target, + os.path.join(_config.PATH_JOBSCRIPTS + + os.path.sep + + _config.FILE_INSTALL_COMPILEJOB)) + + print('job compilation script has been submitted to ecgate for ' + + 'installation in ' + c.flexpart_root_scripts + + '/' + target_dirname) + print('You should get an email with subject "flexcompile" within ' + + 'the next few minutes!') + + else: #local + if not c.flexpart_root_scripts or c.flexpart_root_scripts == '../': + #install_dir = c.flexpart_root_scripts + print('WARNING: FLEXPART_ROOT_SCRIPTS has not been specified') + print('There will be only the compilation of the Fortran program' + + ' in ' + _config.PATH_FORTRAN_SRC) + os.chdir(_config.PATH_FORTRAN_SRC) + else: # creates the target working directory for flex_extract + c.flexpart_root_scripts = os.path.expandvars(os.path.expanduser( + c.flexpart_root_scripts)) + if os.path.abspath(ecd) != os.path.abspath(c.flexpart_root_scripts): + mk_tarball(tar_file) + make_dir(os.path.join(c.flexpart_root_scripts + os.path.sep + + target_dirname)) + os.chdir(os.path.join(c.flexpart_root_scripts + os.path.sep + + target_dirname)) + un_tarball(tar_file) + os.chdir(os.path.join(c.flexpart_root_scripts + os.path.sep + + target_dirname + os.path.sep + + _config.PATH_RELATIVE_FORTRAN_SRC)) + + # Create Fortran executable - CONVERT2 + print('Install ' + target_dirname + ' software at ' + + c.install_target + ' in directory ' + + os.path.abspath(c.flexpart_root_scripts) + '\n') + + delete_convert_build('.') + make_convert_build('.', c.makefile) + + os.chdir(ecd) + if os.path.isfile(tar_file): + os.remove(tar_file) + + return + +def mk_tarball(tarball_path): + ''' + @Description: + Creates a tarball with all necessary files which need to be sent to the + installation directory. + It does not matter if this is local or remote. + Collects all python files, the Fortran source and makefiles, + the ECMWF_ENV file, the CONTROL files as well as the + template files. + + @Input: + tarball_path: string + The complete path to the tar file which will contain all + relevant data for flex_extract. + + @Return: + <nothing> + ''' + import tarfile + from glob import glob + + print('Create tarball ...') + + # change to FLEXEXTRACT directory so that the tar can contain + # relative pathes to the files and directories + ecd = _config.PATH_FLEXEXTRACT_DIR + '/' + os.chdir(ecd) + + # get lists of the files to be added to the tar file + ECMWF_ENV_FILE = [os.path.join(_config.PATH_RELATIVE_PYTHON + + os.path.sep + _config.FILE_USER_ENVVARS)] + pyfiles = [os.path.relpath(x,ecd) + for x in glob(_config.PATH_LOCAL_PYTHON + + os.path.sep + '*py')] + controlfiles = [os.path.relpath(x,ecd) + for x in glob(_config.PATH_CONTROLFILES + + os.path.sep + 'CONTROL*')] + tempfiles = [os.path.relpath(x,ecd) + for x in glob(_config.PATH_TEMPLATES)] + ffiles = [os.path.relpath(x,ecd) + for x in glob(_config.PATH_FORTRAN_SRC + + os.path.sep + '*.f*')] + hfiles = [os.path.relpath(x,ecd) + for x in glob(_config.PATH_FORTRAN_SRC + + os.path.sep + '*.h')] + makefiles = [os.path.relpath(x,ecd) + for x in glob(_config.PATH_FORTRAN_SRC + + os.path.sep + 'Makefile*')] + + # concatenate single lists to one for a better looping + filelist = pyfiles + controlfiles + tempfiles + ffiles + hfiles + \ + makefiles + ECMWF_ENV_FILE + + # create installation tar-file + try: + with tarfile.open(tarball_path, "w:gz") as tar_handle: + for file in filelist: + tar_handle.add(file) + + except subprocess.CalledProcessError as e: + print('... ERROR CODE:\n ... ' + str(e.returncode)) + print('... ERROR MESSAGE:\n ... ' + str(e)) + + sys.exit('... could not make installation tar ball!') + + return + + +def un_tarball(tarball_path): + ''' + @Description: + Extracts the given tarball into current directory. + + @Input: + tarball_path: string + The complete path to the tar file which will contain all + relevant data for flex_extract. + + @Return: + <nothing> + ''' + import tarfile + + print('Untar ...') + + with tarfile.open(tarball_path) as tar_handle: + tar_handle.extractall() + + return + +def mk_env_vars(ecuid, ecgid, gateway, destination): + ''' + @Description: + Creates a file named ECMWF_ENV which contains the + necessary environmental variables at ECMWF servers. + + @Input: + ecuid: string + The user id on ECMWF server. + + ecgid: string + The group id on ECMWF server. + + gateway: string + The gateway server the user is using. + + destination: string + The remote destination which is used to transfer files + from ECMWF server to local gateway server. + + @Return: + <nothing> + ''' + + with open(os.path.join(_config.PATH_LOCAL_PYTHON + os.path.sep + + _config.FILE_USER_ENVVARS), 'w') as fo: + fo.write('ECUID ' + ecuid + '\n') + fo.write('ECGID ' + ecgid + '\n') + fo.write('GATEWAY ' + gateway + '\n') + fo.write('DESTINATION ' + destination + '\n') + + return + +def mk_compilejob(makefile, target, ecuid, ecgid, fp_root): + ''' + @Description: + Modifies the original job template file so that it is specified + for the user and the environment were it will be applied. Result + is stored in a new file "job.temp" in the python directory. + + @Input: + makefile: string + Name of the makefile which should be used to compile FORTRAN + CONVERT2 program. + + target: string + The target where the installation should be done, e.g. the queue. + + ecuid: string + The user id on ECMWF server. + + ecgid: string + The group id on ECMWF server. + + fp_root: string + Path to the root directory of FLEXPART environment or flex_extract + environment. + + @Return: + <nothing> + ''' + + template = os.path.join(_config.PATH_TEMPLATES + os.path.sep + + _config.TEMPFILE_INSTALL_COMPILEJOB) + with open(template) as f: + fdata = f.read().split('\n') + + compilejob = os.path.join(_config.PATH_JOBSCRIPTS + os.path.sep + + _config.FILE_INSTALL_COMPILEJOB) + with open(compilejob, 'w') as fo: + for data in fdata: + if 'MAKEFILE=' in data: + data = 'export MAKEFILE=' + makefile + elif 'FLEXPART_ROOT_SCRIPTS=' in data: + if fp_root != '../': + data = 'export FLEXPART_ROOT_SCRIPTS=' + fp_root + else: + data = 'export FLEXPART_ROOT_SCRIPTS=$HOME' + elif target.lower() != 'local': + if '--workdir' in data: + data = '#SBATCH --workdir=/scratch/ms/' + \ + ecgid + '/' + ecuid + elif '##PBS -o' in data: + data = '##PBS -o /scratch/ms/' + ecgid + '/' + ecuid + \ + 'flex_ecmwf.$Jobname.$Job_ID.out' + elif 'FLEXPART_ROOT_SCRIPTS=' in data: + if fp_root != '../': + data = 'export FLEXPART_ROOT_SCRIPTS=' + fp_root + else: + data = 'export FLEXPART_ROOT_SCRIPTS=$HOME' + fo.write(data + '\n') + + return + +def mk_job_template(ecuid, ecgid, gateway, destination, fp_root): + ''' + @Description: + Modifies the original job template file so that it is specified + for the user and the environment were it will be applied. Result + is stored in a new file "job.temp" in the python directory. + + @Input: + ecuid: string + The user id on ECMWF server. + + ecgid: string + The group id on ECMWF server. + + gateway: string + The gateway server the user is using. + + destination: string + The remote destination which is used to transfer files + from ECMWF server to local gateway server. + + fp_root: string + Path to the root directory of FLEXPART environment or flex_extract + environment. + + @Return: + <nothing> + ''' + ec_python_rel_path = _config.FLEXEXTRACT_DIRNAME + '/' + \ + _config.PATH_RELATIVE_PYTHON + + template = os.path.join(_config.PATH_TEMPLATES + os.path.sep + + _config.TEMPFILE_INSTALL_JOB) + with open(template) as f: + fdata = f.read().split('\n') + + jobfile_temp = os.path.join(_config.PATH_JOBSCRIPTS + os.path.sep + + _config.TEMPFILE_JOB) + with open(jobfile_temp, 'w') as fo: + for data in fdata: + if '--workdir' in data: + data = '#SBATCH --workdir=/scratch/ms/' + ecgid + \ + '/' + ecuid + elif '##PBS -o' in data: + data = '##PBS -o /scratch/ms/' + ecgid + '/' + \ + ecuid + 'flex_ecmwf.$Jobname.$Job_ID.out' + elif 'export PATH=${PATH}:' in data: + data += fp_root + '/' + ec_python_rel_path + + fo.write(data + '\n') + return + +def delete_convert_build(src_path): + ''' + @Description: + Clean up the Fortran source directory and remove all + build files (e.g. *.o, *.mod and CONVERT2) + + @Input: + src_path: string + Path to the fortran source directory. + + @Return: + <nothing> + ''' + + modfiles = UioFiles(src_path, '*.mod') + objfiles = UioFiles(src_path, '*.o') + exefile = UioFiles(src_path, _config.FORTRAN_EXECUTABLE) + + modfiles.delete_files() + objfiles.delete_files() + exefile.delete_files() + + return + +def make_convert_build(src_path, makefile): + ''' + @Description: + Compiles the Fortran code and generates the executable. + + @Input: + src_path: string + Path to the fortran source directory. + + makefile: string + The name of the makefile which should be used. + + @Return: + <nothing> + ''' + + try: + print('Using makefile: ' + makefile) + p = subprocess.Popen(['make', '-f', + os.path.join(src_path + os.path.sep + makefile)], + stdin=subprocess.PIPE, + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + bufsize=1) + pout, perr = p.communicate() + print(pout) + if p.returncode != 0: + print(perr) + print('Please edit ' + makefile + + ' or try another Makefile in the src directory.') + print('Most likely GRIB_API_INCLUDE_DIR, GRIB_API_LIB ' + 'and EMOSLIB must be adapted.') + print('Available Makefiles:') + print(UioFiles(src_path, 'Makefile*')) + sys.exit('Compilation failed!') + except ValueError as e: + print('ERROR: Makefile call failed:') + print(e) + else: + subprocess.check_call(['ls', '-l', + os.path.join(src_path + os.path.sep + + _config.FORTRAN_EXECUTABLE)]) + + return + + +if __name__ == "__main__": + main() diff --git a/python/pythontest/TestInstallTar/test_untar/python/plot_retrieved.py b/python/pythontest/TestInstallTar/test_untar/python/plot_retrieved.py new file mode 100755 index 0000000000000000000000000000000000000000..45e7bb2e7783cfe0644b1fe2eb4c296d1bf75fe2 --- /dev/null +++ b/python/pythontest/TestInstallTar/test_untar/python/plot_retrieved.py @@ -0,0 +1,675 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +#******************************************************************************* +# @Author: Leopold Haimberger (University of Vienna) +# +# @Date: November 2015 +# +# @Change History: +# +# February 2018 - Anne Philipp (University of Vienna): +# - applied PEP8 style guide +# - added documentation +# - created function main and moved the two function calls for +# arguments and plotting into it +# - added function get_basics to extract the boundary conditions +# of the data fields from the first grib file it gets. +# +# @License: +# (C) Copyright 2015-2018. +# +# This software is licensed under the terms of the Apache Licence Version 2.0 +# which can be obtained at http://www.apache.org/licenses/LICENSE-2.0. +# +# @Program Functionality: +# Simple tool for creating maps and time series of retrieved fields. +# +# @Program Content: +# - main +# - get_basics +# - get_files_per_date +# - plot_retrieved +# - plot_timeseries +# - plot_map +# - get_plot_args +# +#******************************************************************************* + +# ------------------------------------------------------------------------------ +# MODULES +# ------------------------------------------------------------------------------ +import time +import datetime +import os +import inspect +import sys +from argparse import ArgumentParser, ArgumentDefaultsHelpFormatter +import matplotlib +import matplotlib.pyplot as plt +from mpl_toolkits.basemap import Basemap +from eccodes import codes_grib_new_from_file, codes_get, codes_release, \ + codes_get_values +import numpy as np + +# software specific classes and modules from flex_extract +from ControlFile import ControlFile +from UioFiles import UioFiles + +# add path to pythonpath so that python finds its buddies +LOCAL_PYTHON_PATH = os.path.dirname(os.path.abspath( + inspect.getfile(inspect.currentframe()))) +if LOCAL_PYTHON_PATH not in sys.path: + sys.path.append(LOCAL_PYTHON_PATH) + +font = {'family': 'monospace', 'size': 12} +matplotlib.rcParams['xtick.major.pad'] = '20' +matplotlib.rc('font', **font) +# ------------------------------------------------------------------------------ +# FUNCTION +# ------------------------------------------------------------------------------ +def main(): + ''' + @Description: + If plot_retrieved is called from command line, this function controls + the program flow and calls the argumentparser function and + the plot_retrieved function for plotting the retrieved GRIB data. + + @Input: + <nothing> + + @Return: + <nothing> + ''' + args, c = get_plot_args() + plot_retrieved(c) + + return + +def get_basics(ifile, verb=False): + """ + @Description: + An example grib file will be opened and basic information will + be extracted. These information are important for later use and the + initialization of numpy arrays for data storing. + + @Input: + ifile: string + Contains the full absolute path to the ECMWF grib file. + + verb (opt): bool + Is True if there should be extra output in verbose mode. + Default value is False. + + @Return: + data: dict + Contains basic informations of the ECMWF grib files, e.g. + 'Ni', 'Nj', 'latitudeOfFirstGridPointInDegrees', + 'longitudeOfFirstGridPointInDegrees', + 'latitudeOfLastGridPointInDegrees', + 'longitudeOfLastGridPointInDegrees', + 'jDirectionIncrementInDegrees', + 'iDirectionIncrementInDegrees' + """ + + data = {} + + # --- open file --- + print("Opening file for getting information data --- %s" % + os.path.basename(ifile)) + + with open(ifile) as f: + + # load first message from file + gid = codes_grib_new_from_file(f) + + # information needed from grib message + keys = ['Ni', + 'Nj', + 'latitudeOfFirstGridPointInDegrees', + 'longitudeOfFirstGridPointInDegrees', + 'latitudeOfLastGridPointInDegrees', + 'longitudeOfLastGridPointInDegrees', + 'jDirectionIncrementInDegrees', + 'iDirectionIncrementInDegrees'] + + if verb: + print '\nInformations are: ' + for key in keys: + # Get the value of the key in a grib message. + data[key] = codes_get(gid, key) + if verb: + print "%s = %s" % (key, data[key]) + if verb: + print '\n' + + # Free the memory for the message referred as gribid. + codes_release(gid) + + return data + +def get_files_per_date(files, datelist): + ''' + @Description: + The filenames contain dates which are used to select a list + of files for a specific time period specified in datelist. + + @Input: + files: instance of UioFiles + For description see class documentation. + It contains the attribute "files" which is a list of pathes + to filenames. + + datelist: list of datetimes + Contains the list of dates which should be processed for plotting. + + @Return: + filelist: list of strings + Contains the selected files for the time period. + ''' + + filelist = [] + for filename in files: + filedate = filename[-8:] + ddate = datetime.datetime.strptime(filedate, '%y%m%d%H') + if ddate in datelist: + filelist.append(filename) + + return filelist + +def plot_retrieved(c): + ''' + @Description: + Reads GRIB data from a specified time period, a list of levels + and a specified list of parameter. + + @Input: + c: instance of class ControlFile + Contains all necessary information of a CONTROL file. The parameters + are: DAY1, DAY2, DTIME, MAXSTEP, TYPE, TIME, STEP, CLASS, STREAM, + NUMBER, EXPVER, GRID, LEFT, LOWER, UPPER, RIGHT, LEVEL, LEVELIST, + RESOL, GAUSS, ACCURACY, OMEGA, OMEGADIFF, ETA, ETADIFF, DPDETA, + SMOOTH, FORMAT, ADDPAR, WRF, CWC, PREFIX, ECSTORAGE, ECTRANS, + ECFSDIR, MAILOPS, MAILFAIL, GRIB2FLEXPART, DEBUG, INPUTDIR, + OUTPUTDIR, FLEXPART_ROOT_SCRIPTS + For more information about format and content of the parameter see + documentation. + + @Return: + <nothing> + ''' + start = datetime.datetime.strptime(c.start_date, '%Y%m%d%H') + end = datetime.datetime.strptime(c.end_date, '%Y%m%d%H') + + # create datelist between start and end date + datelist = [start] # initialise datelist with first date + run_date = start + while run_date < end: + run_date += datetime.timedelta(hours=int(c.dtime)) + datelist.append(run_date) + + print 'datelist: ', datelist + + c.paramIds = np.asarray(c.paramIds, dtype='int') + c.levels = np.asarray(c.levels, dtype='int') + c.area = np.asarray(c.area) + + files = UioFiles(c.inputdir, c.prefix+'*') + ifiles = get_files_per_date(files.files, datelist) + ifiles.sort() + + gdict = get_basics(ifiles[0], verb=False) + + fdict = dict() + fmeta = dict() + fstamp = dict() + for p in c.paramIds: + for l in c.levels: + key = '{:0>3}_{:0>3}'.format(p, l) + fdict[key] = [] + fmeta[key] = [] + fstamp[key] = [] + + for filename in ifiles: + f = open(filename) + print "Opening file for reading data --- %s" % filename + fdate = datetime.datetime.strptime(filename[-8:], "%y%m%d%H") + + # Load in memory a grib message from a file. + gid = codes_grib_new_from_file(f) + while gid is not None: + gtype = codes_get(gid, 'type') + paramId = codes_get(gid, 'paramId') + parameterName = codes_get(gid, 'parameterName') + level = codes_get(gid, 'level') + + if paramId in c.paramIds and level in c.levels: + key = '{:0>3}_{:0>3}'.format(paramId, level) + print 'key: ', key + if fstamp[key]: + for i in range(len(fstamp[key])): + if fdate < fstamp[key][i]: + fstamp[key].insert(i, fdate) + fmeta[key].insert(i, [paramId, parameterName, gtype, + fdate, level]) + fdict[key].insert(i, np.flipud(np.reshape( + codes_get_values(gid), + [gdict['Nj'], gdict['Ni']]))) + break + elif fdate > fstamp[key][i] and i == len(fstamp[key])-1: + fstamp[key].append(fdate) + fmeta[key].append([paramId, parameterName, gtype, + fdate, level]) + fdict[key].append(np.flipud(np.reshape( + codes_get_values(gid), + [gdict['Nj'], gdict['Ni']]))) + break + elif fdate > fstamp[key][i] and i != len(fstamp[key])-1 \ + and fdate < fstamp[key][i+1]: + fstamp[key].insert(i, fdate) + fmeta[key].insert(i, [paramId, parameterName, gtype, + fdate, level]) + fdict[key].insert(i, np.flipud(np.reshape( + codes_get_values(gid), + [gdict['Nj'], gdict['Ni']]))) + break + else: + pass + else: + fstamp[key].append(fdate) + fmeta[key].append((paramId, parameterName, gtype, + fdate, level)) + fdict[key].append(np.flipud(np.reshape( + codes_get_values(gid), [gdict['Nj'], gdict['Ni']]))) + + codes_release(gid) + + # Load in memory a grib message from a file. + gid = codes_grib_new_from_file(f) + + f.close() + + for k in fdict.iterkeys(): + print 'fmeta: ', len(fmeta), fmeta + fml = fmeta[k] + fdl = fdict[k] + print 'fm1: ', len(fml), fml + for fd, fm in zip(fdl, fml): + print fm + ftitle = fm[1] + ' {} '.format(fm[-1]) + \ + datetime.datetime.strftime(fm[3], '%Y%m%d%H') + pname = '_'.join(fm[1].split()) + '_{}_'.format(fm[-1]) + \ + datetime.datetime.strftime(fm[3], '%Y%m%d%H') + plot_map(c, fd, fm, gdict, ftitle, pname, 'png') + + for k in fdict.iterkeys(): + fml = fmeta[k] + fdl = fdict[k] + fsl = fstamp[k] + if fdl: + fm = fml[0] + fd = fdl[0] + ftitle = fm[1] + ' {} '.format(fm[-1]) + \ + datetime.datetime.strftime(fm[3], '%Y%m%d%H') + pname = '_'.join(fm[1].split()) + '_{}_'.format(fm[-1]) + \ + datetime.datetime.strftime(fm[3], '%Y%m%d%H') + lat = -20. + lon = 20. + plot_timeseries(c, fdl, fml, fsl, lat, lon, gdict, + ftitle, pname, 'png') + + return + +def plot_timeseries(c, flist, fmetalist, ftimestamps, lat, lon, + gdict, ftitle, filename, fending, show=False): + ''' + @Description: + Creates a timeseries plot for a given lat/lon position. + + @Input: + c: instance of class ControlFile + Contains all necessary information of a CONTROL file. The parameters + are: DAY1, DAY2, DTIME, MAXSTEP, TYPE, TIME, STEP, CLASS, STREAM, + NUMBER, EXPVER, GRID, LEFT, LOWER, UPPER, RIGHT, LEVEL, LEVELIST, + RESOL, GAUSS, ACCURACY, OMEGA, OMEGADIFF, ETA, ETADIFF, DPDETA, + SMOOTH, FORMAT, ADDPAR, WRF, CWC, PREFIX, ECSTORAGE, ECTRANS, + ECFSDIR, MAILOPS, MAILFAIL, GRIB2FLEXPART, DEBUG, INPUTDIR, + OUTPUTDIR, FLEXPART_ROOT_SCRIPTS + For more information about format and content of the parameter see + documentation. + + flist: numpy array, 2d + The actual data values to be plotted from the grib messages. + + fmetalist: list of strings + Contains some meta date for the data field to be plotted: + parameter id, parameter Name, grid type, datetime, level + + ftimestamps: list of datetime + Contains the time stamps. + + lat: float + The latitude for which the timeseries should be plotted. + + lon: float + The longitude for which the timeseries should be plotted. + + gdict: dict + Contains basic informations of the ECMWF grib files, e.g. + 'Ni', 'Nj', 'latitudeOfFirstGridPointInDegrees', + 'longitudeOfFirstGridPointInDegrees', + 'latitudeOfLastGridPointInDegrees', + 'longitudeOfLastGridPointInDegrees', + 'jDirectionIncrementInDegrees', + 'iDirectionIncrementInDegrees' + + ftitle: string + The title of the timeseries. + + filename: string + The time series is stored in a file with this name. + + fending: string + Contains the type of plot, e.g. pdf or png + + show: boolean + Decides if the plot is shown after plotting or hidden. + + @Return: + <nothing> + ''' + print 'plotting timeseries' + + t1 = time.time() + + #llx = gdict['longitudeOfFirstGridPointInDegrees'] + #if llx > 180. : + # llx -= 360. + #lly = gdict['latitudeOfLastGridPointInDegrees'] + #dxout = gdict['iDirectionIncrementInDegrees'] + #dyout = gdict['jDirectionIncrementInDegrees'] + #urx = gdict['longitudeOfLastGridPointInDegrees'] + #ury = gdict['latitudeOfFirstGridPointInDegrees'] + #numxgrid = gdict['Ni'] + #numygrid = gdict['Nj'] + + farr = np.asarray(flist) + #(time, lat, lon) + + #lonindex = linspace(llx, urx, numxgrid) + #latindex = linspace(lly, ury, numygrid) + + ts = farr[:, 0, 0] + + fig = plt.figure(figsize=(12, 6.7)) + + plt.plot(ftimestamps, ts) + plt.title(ftitle) + + plt.savefig(c.outputdir + '/' + filename + '_TS.' + fending, + facecolor=fig.get_facecolor(), + edgecolor='none', + format=fending) + print 'created ', c.outputdir + '/' + filename + if show: + plt.show() + fig.clf() + plt.close(fig) + + print time.time() - t1, 's' + + return + +def plot_map(c, flist, fmetalist, gdict, ftitle, filename, fending, show=False): + ''' + @Description: + Creates a basemap plot with imshow for a given data field. + + @Input: + c: instance of class ControlFile + Contains all necessary information of a CONTROL file. The parameters + are: DAY1, DAY2, DTIME, MAXSTEP, TYPE, TIME, STEP, CLASS, STREAM, + NUMBER, EXPVER, GRID, LEFT, LOWER, UPPER, RIGHT, LEVEL, LEVELIST, + RESOL, GAUSS, ACCURACY, OMEGA, OMEGADIFF, ETA, ETADIFF, DPDETA, + SMOOTH, FORMAT, ADDPAR, WRF, CWC, PREFIX, ECSTORAGE, ECTRANS, + ECFSDIR, MAILOPS, MAILFAIL, GRIB2FLEXPART, DEBUG, INPUTDIR, + OUTPUTDIR, FLEXPART_ROOT_SCRIPTS + For more information about format and content of the parameter see + documentation. + + flist: numpy array, 2d + The actual data values to be plotted from the grib messages. + + fmetalist: list of strings + Contains some meta date for the data field to be plotted: + parameter id, parameter Name, grid type, datetime, level + + gdict: dict + Contains basic informations of the ECMWF grib files, e.g. + 'Ni', 'Nj', 'latitudeOfFirstGridPointInDegrees', + 'longitudeOfFirstGridPointInDegrees', + 'latitudeOfLastGridPointInDegrees', + 'longitudeOfLastGridPointInDegrees', + 'jDirectionIncrementInDegrees', + 'iDirectionIncrementInDegrees' + + ftitle: string + The titel of the plot. + + filename: string + The plot is stored in a file with this name. + + fending: string + Contains the type of plot, e.g. pdf or png + + show: boolean + Decides if the plot is shown after plotting or hidden. + + @Return: + <nothing> + ''' + print 'plotting map' + + t1 = time.time() + + fig = plt.figure(figsize=(12, 6.7)) + #mbaxes = fig.add_axes([0.05, 0.15, 0.8, 0.7]) + + llx = gdict['longitudeOfFirstGridPointInDegrees'] #- 360 + if llx > 180.: + llx -= 360. + lly = gdict['latitudeOfLastGridPointInDegrees'] + #dxout = gdict['iDirectionIncrementInDegrees'] + #dyout = gdict['jDirectionIncrementInDegrees'] + urx = gdict['longitudeOfLastGridPointInDegrees'] + ury = gdict['latitudeOfFirstGridPointInDegrees'] + #numxgrid = gdict['Ni'] + #numygrid = gdict['Nj'] + + m = Basemap(projection='cyl', llcrnrlon=llx, llcrnrlat=lly, + urcrnrlon=urx, urcrnrlat=ury, resolution='i') + + #lw = 0.5 + m.drawmapboundary() + #x = linspace(llx, urx, numxgrid) + #y = linspace(lly, ury, numygrid) + + #xx, yy = m(*meshgrid(x, y)) + + #s = m.contourf(xx, yy, flist) + + s = plt.imshow(flist.T, + extent=(llx, urx, lly, ury), + alpha=1.0, + interpolation='nearest' + #vmin=vn, + #vmax=vx, + #cmap=my_cmap, + #levels=levels, + #cmap=my_cmap, + #norm=LogNorm(vn,vx) + ) + + plt.title(ftitle, y=1.08) + cb = m.colorbar(s, location="right", pad="10%") + cb.set_label('label', size=14) + + thickline = np.arange(lly, ury+1, 10.) + thinline = np.arange(lly, ury+1, 5.) + m.drawparallels(thickline, + color='gray', + dashes=[1, 1], + linewidth=0.5, + labels=[1, 1, 1, 1], + xoffset=1.) + m.drawparallels(np.setdiff1d(thinline, thickline), + color='lightgray', + dashes=[1, 1], + linewidth=0.5, + labels=[0, 0, 0, 0]) + + thickline = np.arange(llx, urx+1, 10.) + thinline = np.arange(llx, urx+1, 5.) + m.drawmeridians(thickline, + color='gray', + dashes=[1, 1], + linewidth=0.5, + labels=[1, 1, 1, 1], + yoffset=1.) + m.drawmeridians(np.setdiff1d(thinline, thickline), + color='lightgray', + dashes=[1, 1], + linewidth=0.5, + labels=[0, 0, 0, 0]) + + m.drawcoastlines() + m.drawcountries() + + plt.savefig(c.outputdir + '/' + filename + '_MAP.' + fending, + facecolor=fig.get_facecolor(), + edgecolor='none', + format=fending) + print 'created ', c.outputdir + '/' + filename + if show: + plt.show() + fig.clf() + plt.close(fig) + + print time.time() - t1, 's' + + return + +def get_plot_args(): + ''' + @Description: + Assigns the command line arguments and reads CONTROL file + content. Apply default values for non mentioned arguments. + + @Input: + <nothing> + + @Return: + args: instance of ArgumentParser + Contains the commandline arguments from script/program call. + + c: instance of class ControlFile + Contains all necessary information of a CONTROL file. The parameters + are: DAY1, DAY2, DTIME, MAXSTEP, TYPE, TIME, STEP, CLASS, STREAM, + NUMBER, EXPVER, GRID, LEFT, LOWER, UPPER, RIGHT, LEVEL, LEVELIST, + RESOL, GAUSS, ACCURACY, OMEGA, OMEGADIFF, ETA, ETADIFF, DPDETA, + SMOOTH, FORMAT, ADDPAR, WRF, CWC, PREFIX, ECSTORAGE, ECTRANS, + ECFSDIR, MAILOPS, MAILFAIL, GRIB2FLEXPART, DEBUG, INPUTDIR, + OUTPUTDIR, FLEXPART_ROOT_SCRIPTS + For more information about format and content of the parameter see + documentation. + ''' + parser = ArgumentParser(description='Plot retrieved GRIB data from ' + \ + 'ECMWF MARS archive', + formatter_class=ArgumentDefaultsHelpFormatter) + +# the most important arguments + parser.add_argument("--start_date", dest="start_date", + help="start date YYYYMMDD") + parser.add_argument("--end_date", dest="end_date", + help="end_date YYYYMMDD") + + parser.add_argument("--start_step", dest="start_step", + help="start step in hours") + parser.add_argument("--end_step", dest="end_step", + help="end step in hours") + +# some arguments that override the default in the CONTROL file + parser.add_argument("--levelist", dest="levelist", + help="vertical levels to be retrieved, e.g. 30/to/60") + parser.add_argument("--area", dest="area", + help="area defined as north/west/south/east") + parser.add_argument("--paramIds", dest="paramIds", + help="parameter IDs") + parser.add_argument("--prefix", dest="prefix", default='EN', + help="output file name prefix") + +# set the working directories + parser.add_argument("--inputdir", dest="inputdir", default=None, + help="root directory for storing intermediate files") + parser.add_argument("--outputdir", dest="outputdir", default=None, + help="root directory for storing output files") + parser.add_argument("--flexpart_root_scripts", dest="flexpart_root_scripts", + help="FLEXPART root directory (to find \ + 'grib2flexpart and COMMAND file)\n \ + Normally flex_extract resides in the scripts directory \ + of the FLEXPART distribution") + + parser.add_argument("--controlfile", dest="controlfile", + default='CONTROL.temp', + help="file with CONTROL parameters") + args = parser.parse_args() + + try: + c = ControlFile(args.controlfile) + except IOError: + try: + c = ControlFile(LOCAL_PYTHON_PATH + args.controlfile) + except IOError: + print 'Could not read CONTROL file "' + args.controlfile + '"' + print 'Either it does not exist or its syntax is wrong.' + print 'Try "' + sys.argv[0].split('/')[-1] + \ + ' -h" to print usage information' + exit(1) + + if args.levelist: + c.levels = args.levelist.split('/') + else: + c.levels = [0] + + if args.area: + c.area = args.area.split('/') + else: + c.area = '[0,0]' + + c.paramIds = args.paramIds.split('/') + + if args.start_step: + c.start_step = int(args.start_step) + else: + c.start_step = 0 + + if args.end_step: + c.end_step = int(args.end_step) + else: + c.end_step = 0 + + c.start_date = args.start_date + c.end_date = args.end_date + + c.prefix = args.prefix + + c.inputdir = args.inputdir + + if args.outputdir: + c.outputdir = args.outputdir + else: + c.outputdir = c.inputdir + + return args, c + +if __name__ == "__main__": + main() diff --git a/python/pythontest/TestInstallTar/test_untar/python/prepare_flexpart.py b/python/pythontest/TestInstallTar/test_untar/python/prepare_flexpart.py new file mode 100755 index 0000000000000000000000000000000000000000..088c2a0fd49e2cc9452d622640b8b0c137b47f07 --- /dev/null +++ b/python/pythontest/TestInstallTar/test_untar/python/prepare_flexpart.py @@ -0,0 +1,206 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +#******************************************************************************* +# @Author: Anne Fouilloux (University of Oslo) +# +# @Date: October 2014 +# +# @Change History: +# +# November 2015 - Leopold Haimberger (University of Vienna): +# - using the WebAPI also for general MARS retrievals +# - job submission on ecgate and cca +# - job templates suitable for twice daily operational dissemination +# - dividing retrievals of longer periods into digestable chunks +# - retrieve also longer term forecasts, not only analyses and +# short term forecast data +# - conversion into GRIB2 +# - conversion into .fp format for faster execution of FLEXPART +# +# February 2018 - Anne Philipp (University of Vienna): +# - applied PEP8 style guide +# - added documentation +# - minor changes in programming style for consistence +# - BUG: removed call of clean_up-Function after call of +# prepareFlexpart in main since it is already called in +# prepareFlexpart at the end! +# - created function main and moved the two function calls for +# arguments and prepare_flexpart into it +# +# @License: +# (C) Copyright 2014-2018. +# +# This software is licensed under the terms of the Apache Licence Version 2.0 +# which can be obtained at http://www.apache.org/licenses/LICENSE-2.0. +# +# @Program Functionality: +# This program prepares the final version of the grib files which are +# then used by FLEXPART. It converts the bunch of grib files extracted +# via get_mars_data by doing for example the necessary conversion to get +# consistent grids or the disaggregation of flux data. Finally, the +# program combines the data fields in files per available hour with the +# naming convention xxYYMMDDHH, where xx should be 2 arbitrary letters +# (mostly xx is chosen to be "EN"). +# +# @Program Content: +# - main +# - prepare_flexpart +# +#******************************************************************************* + +# ------------------------------------------------------------------------------ +# MODULES +# ------------------------------------------------------------------------------ +import datetime +import os +import inspect +import sys +import socket +import _config + +# software specific classes and modules from flex_extract +from UioFiles import UioFiles +from tools import clean_up, get_cmdline_arguments, read_ecenv +from EcFlexpart import EcFlexpart + +ecapi = 'ecmwf' not in socket.gethostname() +try: + if ecapi: + import ecmwfapi +except ImportError: + ecapi = False + +# add path to pythonpath so that python finds its buddies +LOCAL_PYTHON_PATH = os.path.dirname(os.path.abspath( + inspect.getfile(inspect.currentframe()))) +if LOCAL_PYTHON_PATH not in sys.path: + sys.path.append(LOCAL_PYTHON_PATH) + + +# ------------------------------------------------------------------------------ +# FUNCTION +# ------------------------------------------------------------------------------ +def main(): + ''' + @Description: + If prepare_flexpart is called from command line, this function controls + the program flow and calls the argumentparser function and + the prepare_flexpart function for preparation of GRIB data for FLEXPART. + + @Input: + <nothing> + + @Return: + <nothing> + ''' + + args = get_cmdline_arguments() + + try: + c = ControlFile(args.controlfile) + except IOError: + try: + c = ControlFile(LOCAL_PYTHON_PATH + args.controlfile) + except IOError: + print 'Could not read CONTROL file "' + args.controlfile + '"' + print 'Either it does not exist or its syntax is wrong.' + print 'Try "' + sys.argv[0].split('/')[-1] + \ + ' -h" to print usage information' + sys.exit(1) + + env_parameter = read_ecenv(c.ecmwfdatadir + 'python/ECMWF_ENV') + c.assign_args_to_control(args, env_parameter) + c.assign_envs_to_control(env_parameter) + c.check_conditions() + prepare_flexpart(args.ppid, c) + + return + +def prepare_flexpart(ppid, c): + ''' + @Description: + Lists all grib files retrieved from MARS with get_mars_data and + uses prepares data for the use in FLEXPART. Specific data fields + are converted to a different grid and the flux data are going to be + disaggregated. The data fields are collected by hour and stored in + a file with a specific FLEXPART relevant naming convention. + + @Input: + ppid: int + Contains the ppid number of the current ECMWF job. If it is called + from this script, it is "None". + + c: instance of class ControlFile + Contains all the parameters of CONTROL file, which are e.g.: + DAY1(start_date), DAY2(end_date), DTIME, MAXSTEP, TYPE, TIME, + STEP, CLASS(marsclass), STREAM, NUMBER, EXPVER, GRID, LEFT, + LOWER, UPPER, RIGHT, LEVEL, LEVELIST, RESOL, GAUSS, ACCURACY, + OMEGA, OMEGADIFF, ETA, ETADIFF, DPDETA, SMOOTH, FORMAT, + ADDPAR, WRF, CWC, PREFIX, ECSTORAGE, ECTRANS, ECFSDIR, + MAILOPS, MAILFAIL, GRIB2FLEXPART, FLEXPARTDIR, BASETIME + DATE_CHUNK, DEBUG, INPUTDIR, OUTPUTDIR, FLEXPART_ROOT_SCRIPTS + + For more information about format and content of the parameter + see documentation. + + @Return: + <nothing> + ''' + + if not ppid: + c.ppid = str(os.getppid()) + else: + c.ppid = ppid + + c.ecapi = ecapi + + # create the start and end date + start = datetime.date(year=int(c.start_date[:4]), + month=int(c.start_date[4:6]), + day=int(c.start_date[6:])) + + end = datetime.date(year=int(c.end_date[:4]), + month=int(c.end_date[4:6]), + day=int(c.end_date[6:])) + + # assign starting date minus 1 day + # since for basetime 00 we need the 12 hours upfront + # (the day before from 12 UTC to current day 00 UTC) + if c.basetime == '00': + start = start - datetime.timedelta(days=1) + + print 'Prepare ' + start.strftime("%Y%m%d") + \ + "/to/" + end.strftime("%Y%m%d") + + # create output dir if necessary + if not os.path.exists(c.outputdir): + os.makedirs(c.outputdir) + + # get all files with flux data to be deaccumulated + inputfiles = UioFiles(c.inputdir, '*OG_acc_SL*.' + c.ppid + '.*') + + # deaccumulate the flux data + flexpart = EcFlexpart(c, fluxes=True) + flexpart.write_namelist(c, 'fort.4') + flexpart.deacc_fluxes(inputfiles, c) + + # get a list of all files from the root inputdir + inputfiles = UioFiles(c.inputdir, '????__??.*' + c.ppid + '.*') + + # produce FLEXPART-ready GRIB files and process them - + # copy/transfer/interpolate them or make them GRIB2 + flexpart = EcFlexpart(c, fluxes=False) + flexpart.create(inputfiles, c) + flexpart.process_output(c) + + # check if in debugging mode, then store all files + # otherwise delete temporary files + if int(c.debug) != 0: + print '\nTemporary files left intact' + else: + clean_up(c) + + return + +if __name__ == "__main__": + main() diff --git a/python/pythontest/TestInstallTar/test_untar/python/profiling.py b/python/pythontest/TestInstallTar/test_untar/python/profiling.py new file mode 100644 index 0000000000000000000000000000000000000000..4511af2aca3a41265a9dd035b11430e84626ac62 --- /dev/null +++ b/python/pythontest/TestInstallTar/test_untar/python/profiling.py @@ -0,0 +1,72 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +#************************************************************************ +# ToDo AP +# - check license of book content +#************************************************************************ +#******************************************************************************* +# +# @Author: Anne Philipp (University of Vienna) +# +# @Date: March 2018 +# +# @License: +# (C) Copyright 2018. +# +# This software is licensed under the terms of the Apache Licence Version 2.0 +# which can be obtained at http://www.apache.org/licenses/LICENSE-2.0. +# +# @Program functionality: +# This module is not part of flex_extract. It is just used for testing and +# performance analysis of some functions. +# +# @Program Content: +# - timefn +# +#******************************************************************************* + +# ------------------------------------------------------------------------------ +# MODULES +# ------------------------------------------------------------------------------ +from functools import wraps +import time + +# ------------------------------------------------------------------------------ +# FUNCTION +# ------------------------------------------------------------------------------ +def timefn(fn): + ''' + @Description: + Decorator function. It takes the inner function as an argument. + ''' + @wraps(fn) + def measure_time(*args, **kwargs): + ''' + @Descripton: + Passes the arguments through fn for execution. Around the + execution of fn the time is captured to execute the fn function + and prints the result along with the function name. + + This is taken from the book "High Performance Python" from + Micha Gorelick and Ian Ozsvald, O'Reilly publisher, 2014, + ISBN: 978-1-449-36159-4 + + @Input: + *args: undefined + A variable number of positional arguments. + + **kwargs: undefined + A variable number of key/value arguments. + + @Return: + <nothing> + ''' + + t1 = time.time() + result = fn(*args, **kwargs) + t2 = time.time() + print "@timefn:" + fn.func_name + " took " + str(t2 - t1) + " seconds" + + return result + + return measure_time diff --git a/python/pythontest/TestInstallTar/test_untar/python/submit.py b/python/pythontest/TestInstallTar/test_untar/python/submit.py new file mode 100755 index 0000000000000000000000000000000000000000..967ed945810218d72a7e3b53215c279ca40824d2 --- /dev/null +++ b/python/pythontest/TestInstallTar/test_untar/python/submit.py @@ -0,0 +1,200 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +#******************************************************************************* +# @Author: Anne Fouilloux (University of Oslo) +# +# @Date: October 2014 +# +# @Change History: +# +# November 2015 - Leopold Haimberger (University of Vienna): +# - job submission on ecgate and cca +# - job templates suitable for twice daily operational dissemination +# +# February 2018 - Anne Philipp (University of Vienna): +# - applied PEP8 style guide +# - added documentation +# - minor changes in programming style (for consistence) +# +# @License: +# (C) Copyright 2014-2018. +# +# This software is licensed under the terms of the Apache Licence Version 2.0 +# which can be obtained at http://www.apache.org/licenses/LICENSE-2.0. +# +# @Program Functionality: +# This program is the main program of flex_extract and controls the +# program flow. +# If it is supposed to work locally then it works through the necessary +# functions get_mars_data and prepareFlexpart. Otherwise it prepares +# a shell job script which will do the necessary work on the +# ECMWF server and is submitted via ecaccess-job-submit. +# +# @Program Content: +# - main +# - submit +# +#******************************************************************************* + +# ------------------------------------------------------------------------------ +# MODULES +# ------------------------------------------------------------------------------ +import os +import sys +import subprocess +import inspect +import collections + +# software specific classes and modules from flex_extract +import _config +from tools import normal_exit, get_cmdline_arguments, submit_job_to_ecserver, \ + read_ecenv +from get_mars_data import get_mars_data +from prepare_flexpart import prepare_flexpart +from ControlFile import ControlFile + +# ------------------------------------------------------------------------------ +# FUNCTIONS +# ------------------------------------------------------------------------------ + +def main(): + ''' + @Description: + Get the arguments from script call and from CONTROL file. + Decides from the argument "queue" if the local version + is done "queue=None" or the gateway version with "queue=ecgate" + or "queue=cca". + + @Input: + <nothing> + + @Return: + <nothing> + ''' + + called_from_dir = os.getcwd() + + args = get_cmdline_arguments() + + try: + c = ControlFile(args.controlfile) + except IOError: + try: + c = ControlFile(_config.PATH_LOCAL_PYTHON + args.controlfile) + except IOError: + print 'Could not read CONTROL file "' + args.controlfile + '"' + print 'Either it does not exist or its syntax is wrong.' + print 'Try "' + sys.argv[0].split('/')[-1] + \ + ' -h" to print usage information' + sys.exit(1) + + env_parameter = read_ecenv(c.ecmwfdatadir + 'python/ECMWF_ENV') + c.assign_args_to_control(args) + c.assign_envs_to_control(env_parameter) + c.check_conditions() + + # on local side + # on ECMWF server this would also be the local side + if args.queue is None: + if c.inputdir[0] != '/': + c.inputdir = os.path.join(called_from_dir, c.inputdir) + if c.outputdir[0] != '/': + c.outputdir = os.path.join(called_from_dir, c.outputdir) + get_mars_data(c) + if c.request == 0 or c.request == 2: + prepare_flexpart(args.ppid, c) + normal_exit(c.mailfail, 'FLEX_EXTRACT IS DONE!') + else: + normal_exit(c.mailfail, 'PRINTING MARS_REQUESTS DONE!') + # on ECMWF server + else: + submit(args.job_template, c, args.queue) + + return + +def submit(jtemplate, c, queue): + ''' + @Description: + Prepares the job script and submit it to the specified queue. + + @Input: + jtemplate: string + Job template file for submission to ECMWF. It contains all necessary + module and variable settings for the ECMWF environment as well as + the job call and mail report instructions. + Default is "job.temp". + + c: instance of class ControlFile + Contains all the parameters of CONTROL file, which are e.g.: + DAY1(start_date), DAY2(end_date), DTIME, MAXSTEP, TYPE, TIME, + STEP, CLASS(marsclass), STREAM, NUMBER, EXPVER, GRID, LEFT, + LOWER, UPPER, RIGHT, LEVEL, LEVELIST, RESOL, GAUSS, ACCURACY, + OMEGA, OMEGADIFF, ETA, ETADIFF, DPDETA, SMOOTH, FORMAT, + ADDPAR, WRF, CWC, PREFIX, ECSTORAGE, ECTRANS, ECFSDIR, + MAILOPS, MAILFAIL, GRIB2FLEXPART, FLEXPARTDIR, BASETIME + DATE_CHUNK, DEBUG, INPUTDIR, OUTPUTDIR, FLEXPART_ROOT_SCRIPTS + + For more information about format and content of the parameter + see documentation. + + queue: string + Name of queue for submission to ECMWF (e.g. ecgate or cca ) + + @Return: + <nothing> + ''' + + # read template file and get index for CONTROL input + with open(jtemplate) as f: + lftext = f.read().split('\n') + insert_point = lftext.index('EOF') + + if not c.basetime: + # --------- create on demand job script ------------------------------------ + if c.maxstep > 24: + print '---- Pure forecast mode! ----' + else: + print '---- On-demand mode! ----' + job_file = jtemplate[:-4] + 'ksh' + clist = c.to_list() + + lftextondemand = lftext[:insert_point] + clist + lftext[insert_point:] + + with open(job_file, 'w') as f: + f.write('\n'.join(lftextondemand)) + + result_code = submit_job_to_ecserver(queue, job_file) + + else: + # --------- create operational job script ---------------------------------- + print '---- Operational mode! ----' + job_file = jtemplate[:-5] + 'oper.ksh' + #colist = [] + + if c.maxstep: + mt = int(c.maxstep) + else: + mt = 0 + + c.start_date = '${MSJ_YEAR}${MSJ_MONTH}${MSJ_DAY}' + c.end_date = '${MSJ_YEAR}${MSJ_MONTH}${MSJ_DAY}' + c.base_time = '${MSJ_BASETIME}' + if mt > 24: + c.time = '${MSJ_BASETIME} {MSJ_BASETIME}' + + colist = c.to_list() + + lftextoper = lftext[:insert_point] + colist + lftext[insert_point + 2:] + + with open(job_file, 'w') as f: + f.write('\n'.join(lftextoper)) + + result_code = submit_job_to_ecserver(queue, job_file) + + # -------------------------------------------------------------------------- + print 'You should get an email with subject flex.hostname.pid' + + return + +if __name__ == "__main__": + main() diff --git a/python/pythontest/TestInstallTar/test_untar/python/test_suite.py b/python/pythontest/TestInstallTar/test_untar/python/test_suite.py new file mode 100755 index 0000000000000000000000000000000000000000..6cd9ed7cfb41cd7faf5b1f5487524535216a889d --- /dev/null +++ b/python/pythontest/TestInstallTar/test_untar/python/test_suite.py @@ -0,0 +1,104 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +#******************************************************************************* +# @Author: Leopold Haimberger (University of Vienna) +# +# @Date: December 2015 +# +# @Change History: +# +# February 2018 - Anne Philipp (University of Vienna): +# - applied PEP8 style guide +# - added documentation +# +# @License: +# (C) Copyright 2015-2018. +# +# This software is licensed under the terms of the Apache Licence Version 2.0 +# which can be obtained at http://www.apache.org/licenses/LICENSE-2.0. +# +# @Program Functionality: +# This script triggers the flex_extract test suite. Call with +# test_suite.py [test group] +# +# @Program Content: +# +#******************************************************************************* + +# ------------------------------------------------------------------------------ +# MODULES +# ------------------------------------------------------------------------------ +import os +import sys +import json +import subprocess + +# ------------------------------------------------------------------------------ +# PROGRAM +# ------------------------------------------------------------------------------ +try: + taskfile = open('test_suite.json') +except IOError: + print 'could not open suite definition file test_suite.json' + exit() + +if not os.path.isfile('../src/CONVERT2'): + print '../src/CONVERT2 could not be found' + print 'please run "install.py --target=local" first' + exit() + +fprs = os.getenv('FLEXPART_ROOT_SCRIPTS') +if fprs is None: + print 'FLEXPART_ROOT_SCRIPTS not set .. some test jobs may fail' + +tasks = json.load(taskfile, encoding='latin-1') +taskfile.close() +if not os.path.exists('../test'): + os.makedirs('../test') +if len(sys.argv) > 1: + groups = sys.argv[1:] +else: + groups = ['xinstall', 'default', 'ops', 'work', 'cv', 'fc']#,'hires'] +jobcounter = 0 +jobfailed = 0 +for g in groups: + try: + tk, tv = g, tasks[g] + finally: + pass + garglist = [] + for ttk, ttv in tv.iteritems(): + if isinstance(ttv, basestring): + if ttk != 'script': + garglist.append('--' + ttk) + if ttv[0] == '$': + garglist.append(os.path.expandvars(ttv)) + else: + garglist.append(ttv) + for ttk, ttv in tv.iteritems(): + if isinstance(ttv, dict): + arglist = [] + for tttk, tttv in ttv.iteritems(): + if isinstance(tttv, basestring): + arglist.append('--' + tttk) + if '$' in tttv[0]: + arglist.append(os.path.expandvars(tttv)) + else: + arglist.append(tttv) + print 'Command: ', ' '.join([tv['script']] + garglist + arglist) + o = '../test/' + tk + '_' + ttk + '_' + '_'.join(ttv.keys()) + print 'Output will be sent to ', o + f = open(o, 'w') + try: + p = subprocess.check_call([tv['script']] + garglist + arglist, + stdout=f, stderr=f) + except subprocess.CalledProcessError as e: + f.write('\nFAILED\n') + print 'FAILED' + jobfailed += 1 + jobcounter += 1 + f.close() + +print 'Test suite tasks completed' +print str(jobcounter-jobfailed) + ' successful, ' + str(jobfailed) + ' failed' +print 'If tasks have been submitted via ECACCESS please check emails' diff --git a/python/pythontest/TestInstallTar/test_untar/python/testecmwfapi.py b/python/pythontest/TestInstallTar/test_untar/python/testecmwfapi.py new file mode 100644 index 0000000000000000000000000000000000000000..90e49ee2e76f81a61dbdc25c1983c5d942b0f58b --- /dev/null +++ b/python/pythontest/TestInstallTar/test_untar/python/testecmwfapi.py @@ -0,0 +1,16 @@ +#!/usr/bin/env python +from ecmwfapi import ECMWFDataServer + +server = ECMWFDataServer() + +server.retrieve({ + 'dataset' : "interim", + 'time' : "00", + 'date' : "2013-09-01/to/2013-09-30", + 'step' : "0", + 'type' : "an", + 'levtype' : "sfc", + 'param' : "165.128/41.128", + 'grid' : "0.75/0.75", + 'target' : "interim201309.grib" +}) diff --git a/python/pythontest/TestInstallTar/test_untar/python/tools.py b/python/pythontest/TestInstallTar/test_untar/python/tools.py new file mode 100644 index 0000000000000000000000000000000000000000..8e1e405d3730febfcb3e473222fe332b0e104edd --- /dev/null +++ b/python/pythontest/TestInstallTar/test_untar/python/tools.py @@ -0,0 +1,531 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +#******************************************************************************* +# @Author: Anne Philipp (University of Vienna) +# +# @Date: May 2018 +# +# @Change History: +# October 2014 - Anne Fouilloux (University of Oslo) +# - created functions silent_remove and product (taken from ECMWF) +# +# November 2015 - Leopold Haimberger (University of Vienna) +# - created functions: interpret_args_and_control, clean_up +# my_error, normal_exit, init128, to_param_id +# +# April 2018 - Anne Philipp (University of Vienna): +# - applied PEP8 style guide +# - added documentation +# - moved all functions from file Flexparttools to this file tools +# - added function get_list_as_string +# - seperated args and control interpretation +# +# @License: +# (C) Copyright 2014-2018. +# +# This software is licensed under the terms of the Apache Licence Version 2.0 +# which can be obtained at http://www.apache.org/licenses/LICENSE-2.0. +# +# @Modul Description: +# This module contains a couple of helpful functions which are +# used in different places in flex_extract. +# +# @Module Content: +# - get_cmdline_arguments +# - clean_up +# - my_error +# - normal_exit +# - product +# - silent_remove +# - init128 +# - to_param_id +# - get_list_as_string +# - make_dir +# +#******************************************************************************* + +# ------------------------------------------------------------------------------ +# MODULES +# ------------------------------------------------------------------------------ +import os +import errno +import sys +import glob +import subprocess +import traceback +from argparse import ArgumentParser, ArgumentDefaultsHelpFormatter + +# ------------------------------------------------------------------------------ +# FUNCTIONS +# ------------------------------------------------------------------------------ + +def get_cmdline_arguments(): + ''' + @Description: + Decomposes the command line arguments and assigns them to variables. + Apply default values for non mentioned arguments. + + @Input: + <nothing> + + @Return: + args: instance of ArgumentParser + Contains the commandline arguments from script/program call. + ''' + + parser = ArgumentParser(description='Retrieve FLEXPART input from \ + ECMWF MARS archive', + formatter_class=ArgumentDefaultsHelpFormatter) + + # the most important arguments + parser.add_argument("--start_date", dest="start_date", default=None, + help="start date YYYYMMDD") + parser.add_argument("--end_date", dest="end_date", default=None, + help="end_date YYYYMMDD") + parser.add_argument("--date_chunk", dest="date_chunk", default=None, + help="# of days to be retrieved at once") + + # some arguments that override the default in the CONTROL file + parser.add_argument("--basetime", dest="basetime", default=None, + help="base such as 00/12 (for half day retrievals)") + parser.add_argument("--step", dest="step", default=None, + help="steps such as 00/to/48") + parser.add_argument("--levelist", dest="levelist", default=None, + help="Vertical levels to be retrieved, e.g. 30/to/60") + parser.add_argument("--area", dest="area", default=None, + help="area defined as north/west/south/east") + + # set the working directories + parser.add_argument("--inputdir", dest="inputdir", default=None, + help="root directory for storing intermediate files") + parser.add_argument("--outputdir", dest="outputdir", default=None, + help="root directory for storing output files") + parser.add_argument("--flexpart_root_scripts", dest="flexpart_root_scripts", + default=None, + help="FLEXPART root directory (to find grib2flexpart \ + and COMMAND file)\n Normally flex_extract resides in \ + the scripts directory of the FLEXPART distribution") + + # this is only used by prepare_flexpart.py to rerun a postprocessing step + parser.add_argument("--ppid", dest="ppid", default=None, + help="specify parent process id for \ + rerun of prepare_flexpart") + + # arguments for job submission to ECMWF, only needed by submit.py + parser.add_argument("--job_template", dest='job_template', + default="job.temp", + help="job template file for submission to ECMWF") + parser.add_argument("--queue", dest="queue", default=None, + help="queue for submission to ECMWF \ + (e.g. ecgate or cca )") + parser.add_argument("--controlfile", dest="controlfile", + default='CONTROL.temp', + help="file with CONTROL parameters") + parser.add_argument("--debug", dest="debug", default=None, + help="debug mode - leave temporary files intact") + parser.add_argument("--request", dest="request", default=None, + help="list all mars request in file mars_requests.dat \ + and skip submission to mars") + + args = parser.parse_args() + + return args + +def read_ecenv(filename): + ''' + @Description: + Reads the file into a dictionary where the key values are the parameter + names. + + @Input: + filename: string + Name of file where the ECMWV environment parameters are stored. + + @Return: + envs: dict + ''' + envs= {} + print filename + with open(filename, 'r') as f: + for line in f: + data = line.strip().split() + envs[str(data[0])] = str(data[1]) + + return envs + +def clean_up(c): + ''' + @Description: + Remove all files from intermediate directory + (inputdir from CONTROL file). + + @Input: + c: instance of class ControlFile + Contains all the parameters of CONTROL file, which are e.g.: + DAY1(start_date), DAY2(end_date), DTIME, MAXSTEP, TYPE, TIME, + STEP, CLASS(marsclass), STREAM, NUMBER, EXPVER, GRID, LEFT, + LOWER, UPPER, RIGHT, LEVEL, LEVELIST, RESOL, GAUSS, ACCURACY, + OMEGA, OMEGADIFF, ETA, ETADIFF, DPDETA, SMOOTH, FORMAT, + ADDPAR, WRF, CWC, PREFIX, ECSTORAGE, ECTRANS, ECFSDIR, + MAILOPS, MAILFAIL, GRIB2FLEXPART, FLEXPARTDIR, BASETIME + DATE_CHUNK, DEBUG, INPUTDIR, OUTPUTDIR, FLEXPART_ROOT_SCRIPTS + + For more information about format and content of the parameter + see documentation. + + @Return: + <nothing> + ''' + + print "clean_up" + + cleanlist = glob.glob(c.inputdir + "/*") + for clist in cleanlist: + if c.prefix not in clist: + silent_remove(clist) + if c.ecapi is False and (c.ectrans == '1' or c.ecstorage == '1'): + silent_remove(clist) + + print "Done" + + return + + +def my_error(users, message='ERROR'): + ''' + @Description: + Prints a specified error message which can be passed to the function + before exiting the program. + + @Input: + user: list of strings + Contains all email addresses which should be notified. + It might also contain just the ecmwf user name which wil trigger + mailing to the associated email address for this user. + + message: string, optional + Error message. Default value is "ERROR". + + @Return: + <nothing> + ''' + + print message + + # comment if user does not want email notification directly from python + for user in users: + if '${USER}' in user: + user = os.getenv('USER') + try: + p = subprocess.Popen(['mail', '-s flex_extract_v7.1 ERROR', + os.path.expandvars(user)], + stdin=subprocess.PIPE, + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + bufsize=1) + trace = '\n'.join(traceback.format_stack()) + pout = p.communicate(input=message + '\n\n' + trace)[0] + except ValueError as e: + print 'ERROR: ', e + sys.exit('Email could not be sent!') + else: + print 'Email sent to ' + os.path.expandvars(user) + ' ' + \ + pout.decode() + + sys.exit(1) + + return + + +def normal_exit(users, message='Done!'): + ''' + @Description: + Prints a specific exit message which can be passed to the function. + + @Input: + user: list of strings + Contains all email addresses which should be notified. + It might also contain just the ecmwf user name which wil trigger + mailing to the associated email address for this user. + + message: string, optional + Message for exiting program. Default value is "Done!". + + @Return: + <nothing> + + ''' + print message + + # comment if user does not want notification directly from python + for user in users: + if '${USER}' in user: + user = os.getenv('USER') + try: + p = subprocess.Popen(['mail', '-s flex_extract_v7.1 normal exit', + os.path.expandvars(user)], + stdin=subprocess.PIPE, + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + bufsize=1) + pout = p.communicate(input=message+'\n\n')[0] + except ValueError as e: + print 'ERROR: ', e + print 'Email could not be sent!' + else: + print 'Email sent to ' + os.path.expandvars(user) + ' ' + \ + pout.decode() + + return + + +def product(*args, **kwds): + ''' + @Description: + This method is taken from an example at the ECMWF wiki website. + https://software.ecmwf.int/wiki/display/GRIB/index.py; 2018-03-16 + + This method combines the single characters of the passed arguments + with each other. So that each character of each argument value + will be combined with each character of the other arguments as a tuple. + + Example: + product('ABCD', 'xy') --> Ax Ay Bx By Cx Cy Dx Dy + product(range(2), repeat = 3) --> 000 001 010 011 100 101 110 111 + + @Input: + *args: tuple + Positional arguments (arbitrary number). + + **kwds: dictionary + Contains all the keyword arguments from *args. + + @Return: + prod: tuple + Return will be done with "yield". A tuple of combined arguments. + See example in description above. + ''' + pools = map(tuple, args) * kwds.get('repeat', 1) + result = [[]] + for pool in pools: + result = [x + [y] for x in result for y in pool] + for prod in result: + yield tuple(prod) + + return + + +def silent_remove(filename): + ''' + @Description: + Remove file if it exists. + The function does not fail if the file does not exist. + + @Input: + filename: string + The name of the file to be removed without notification. + + @Return: + <nothing> + ''' + try: + os.remove(filename) + except OSError as e: + if e.errno != errno.ENOENT: + # errno.ENOENT = no such file or directory + raise # re-raise exception if a different error occured + + return + + +def init128(filepath): + ''' + @Description: + Opens and reads the grib file with table 128 information. + + @Input: + filepath: string + Path to file of ECMWF grib table number 128. + + @Return: + table128: dictionary + Contains the ECMWF grib table 128 information. + The key is the parameter number and the value is the + short name of the parameter. + ''' + table128 = dict() + with open(filepath) as f: + fdata = f.read().split('\n') + for data in fdata: + if data[0] != '!': + table128[data[0:3]] = data[59:64].strip() + + return table128 + + +def to_param_id(pars, table): + ''' + @Description: + Transform parameter names to parameter ids + with ECMWF grib table 128. + + @Input: + pars: string + Addpar argument from CONTROL file in the format of + parameter names instead of ids. The parameter short + names are sepearted with "/" and they are passed as + one single string. + + table: dictionary + Contains the ECMWF grib table 128 information. + The key is the parameter number and the value is the + short name of the parameter. + + @Return: + ipar: list of integer + List of addpar parameters from CONTROL file transformed to + parameter ids in the format of integer. + ''' + cpar = pars.upper().split('/') + ipar = [] + for par in cpar: + for k, v in table.iteritems(): + if par == k or par == v: + ipar.append(int(k)) + break + else: + print 'Warning: par ' + par + ' not found in table 128' + + return ipar + +def get_list_as_string(list_obj, concatenate_sign=', '): + ''' + @Description: + Converts a list of arbitrary content into a single string. + + @Input: + list_obj: list + A list with arbitrary content. + + concatenate_sign: string, optional + A string which is used to concatenate the single + list elements. Default value is ", ". + + @Return: + str_of_list: string + The content of the list as a single string. + ''' + + str_of_list = concatenate_sign.join(str(l) for l in list_obj) + + return str_of_list + +def make_dir(directory): + ''' + @Description: + Creates a directory and gives a warning if the directory + already exists. The program stops only if there is another problem. + + @Input: + directory: string + The directory name including the path which should be created. + + @Return: + <nothing> + ''' + try: + os.makedirs(directory) + except OSError as e: + if e.errno != errno.EEXIST: + # errno.EEXIST = directory already exists + raise # re-raise exception if a different error occured + else: + print 'WARNING: Directory {0} already exists!'.format(directory) + + return + +def put_file_to_ecserver(ecd, filename, target, ecuid, ecgid): + ''' + @Description: + Uses the ecaccess-file-put command to send a file to the ECMWF servers. + + NOTE: + The return value is just for testing reasons. It does not have + to be used from the calling function since the whole error handling + is done in here. + + @Input: + ecd: string + The path were the file is stored. + + filename: string + The name of the file to send to the ECMWF server. + + target: string + The target queue where the file should be sent to. + + ecuid: string + The user id on ECMWF server. + + ecgid: string + The group id on ECMWF server. + + @Return: + rcode: string + Resulting code of command execution. If successful the string + will be empty. + ''' + + try: + rcode = subprocess.check_output(['ecaccess-file-put', + ecd + '/' + filename, + target + ':/home/ms/' + + ecgid + '/' + ecuid + + '/' + filename], + stderr=subprocess.STDOUT) + except subprocess.CalledProcessError as e: + print('... ERROR CODE:\n ... ' + str(e.returncode)) + print('... ERROR MESSAGE:\n ... ' + str(e)) + + print('\n... Do you have a valid ecaccess certification key?') + sys.exit('... ECACCESS-FILE-PUT FAILED!') + + return rcode + +def submit_job_to_ecserver(target, jobname): + ''' + @Description: + Uses ecaccess-job-submit command to submit a job to the ECMWF server. + + NOTE: + The return value is just for testing reasons. It does not have + to be used from the calling function since the whole error handling + is done in here. + + @Input: + target: string + The target where the file should be sent to, e.g. the queue. + + jobname: string + The name of the jobfile to be submitted to the ECMWF server. + + @Return: + rcode: string + Resulting code of command execution. If successful the string + will contain an integer number, representing the id of the job + at the ecmwf server. + ''' + + try: + rcode = subprocess.check_output(['ecaccess-job-submit', + '-queueName', target, + jobname]) + except subprocess.CalledProcessError as e: + print('... ERROR CODE:\n ... ' + str(e.returncode)) + print('... ERROR MESSAGE:\n ... ' + str(e)) + + + print('\n... Do you have a valid ecaccess certification key?') + sys.exit('... ECACCESS-JOB-SUBMIT FAILED!') + + return rcode diff --git a/python/pythontest/TestInstallTar/test_untar/run/control/CONTROL.test b/python/pythontest/TestInstallTar/test_untar/run/control/CONTROL.test new file mode 100644 index 0000000000000000000000000000000000000000..b693ee4d1364bc38deb58739b3c8844f4ee5490c --- /dev/null +++ b/python/pythontest/TestInstallTar/test_untar/run/control/CONTROL.test @@ -0,0 +1,18 @@ +DTIME 3 +TYPE AN FC FC FC FC FC AN FC FC FC FC FC AN FC FC FC FC FC AN FC FC FC FC FC +TIME 00 00 00 00 00 00 06 00 00 00 00 00 12 12 12 12 12 12 18 12 12 12 12 12 +STEP 00 01 02 03 04 05 00 07 08 09 10 11 00 01 02 03 04 05 00 07 08 09 10 11 +CLASS EI +STREAM OPER +EXPVER 1 +GRID 5000 +LEFT -10000 +LOWER 30000 +UPPER 40000 +RIGHT 10000 +LEVELIST 59/to/60 +RESOL 63 +GAUSS 1 +ADDPAR 186/187/188/235/139/39 +PREFIX EItest_ +ECTRANS 1 diff --git a/python/pythontest/TestInstallTar/test_untar/src/Makefile.CRAY b/python/pythontest/TestInstallTar/test_untar/src/Makefile.CRAY new file mode 100644 index 0000000000000000000000000000000000000000..6ed57be95245136e040e65c5964a2ef4f93d48f9 --- /dev/null +++ b/python/pythontest/TestInstallTar/test_untar/src/Makefile.CRAY @@ -0,0 +1,62 @@ +############################################################################### +# +# Top level Makefile for ECMWFDATA7.0 software +# +# Last modified: December 1, 2015 +# +############################################################################### + + +.SUFFIXES: .o .c .c~ .f .f~ .F90 .f90 .f90~ .f95 .f95~ .F .F~ .y .y~ .l .l~ \ + .s .s~ .sh .sh~ .h .h~ .C .C~ .a + + +#GRIB_API_INCLUDE_DIR=/usr/local/gcc-4.9.3/grib1.12.3//include +#GRIB_API_LIB=-openmp -L/usr/local/gcc-4.9.3/grib1.12.3/lib -Bstatic -lgrib_api_f77 -lgrib_api_f90 -lgrib_api -Bdynamic -lm -ljasper +#EMOSLIB=-lemosR64 + +OPT = +DEBUG = -g +LIB = $(GRIB_API_LIBS) $(EMOSLIB) + +FC=ftn $(F90FLAGS) +F90C=ftn $(F90FLAGS) + +FFLAGS = $(OPT) -I. -r8 -I$(GRIB_API_INCLUDE_DIR) +F90FLAGS = $(OPT) -I. -r8 -I$(GRIB_API_INCLUDE_DIR) + +LDFLAGS = $(OPT) + +BINDIR = . + +EXE = CONVERT2 + + +.f.o: + $(F90C) -c $(F90FLAGS) $(DEBUG) $*.f +.f90.o: + $(F90C) -c $(F90FLAGS) $(DEBUG) $*.f90 + +all: ${EXE} + +clean: + rm *.o + +phgrreal.o: phgrreal.f + $(F90C) -c -g -O3 phgrreal.f + +grphreal.o: grphreal.f + $(F90C) -c -g -O3 grphreal.f + +ftrafo.o: ftrafo.f + $(F90C) -c -g -O3 ftrafo.f + +$(BINDIR)/CONVERT2: phgrreal.o grphreal.o ftrafo.o rwGRIB2.o posnam.o preconvert.o + $(F90C) $(DEBUG) $(OPT) -o $(BINDIR)/CONVERT2 ftrafo.o phgrreal.o grphreal.o rwGRIB2.o posnam.o preconvert.o ${LIB} + + +############################################################################### +# +# End of the Makefile +# +############################################################################### diff --git a/python/pythontest/TestInstallTar/test_untar/src/Makefile.gfortran b/python/pythontest/TestInstallTar/test_untar/src/Makefile.gfortran new file mode 100644 index 0000000000000000000000000000000000000000..58923fa5bf9713717ca12a4f420f9fb5ead4d6f0 --- /dev/null +++ b/python/pythontest/TestInstallTar/test_untar/src/Makefile.gfortran @@ -0,0 +1,62 @@ +############################################################################### +# +# Top level Makefile for ECMWFDATA7.0 software +# +# Last modified: December 1, 2015 +# +############################################################################### + + +.SUFFIXES: .o .c .c~ .f .f~ .F90 .f90 .f90~ .f95 .f95~ .F .F~ .y .y~ .l .l~ \ + .s .s~ .sh .sh~ .h .h~ .C .C~ .a + + +#GRIB_API_INCLUDE_DIR=/usr/local/gcc-4.9.3/grib1.12.3//include +#GRIB_API_LIB=-openmp -L/usr/local/gcc-4.9.3/grib1.12.3/lib -Bstatic -lgrib_api_f77 -lgrib_api_f90 -lgrib_api -Bdynamic -lm -ljasper +#EMOSLIB=-lemosR64 + +OPT = -g +DEBUG = -g +LIB = $(GRIB_API_LIB) $(EMOSLIB) + +FC=gfortran -m64 -fdefault-real-8 -fcray-pointer -fno-second-underscore -ffixed-line-length-132 -fopenmp -fconvert=big-endian +F90C=gfortran -m64 -fdefault-real-8 -fcray-pointer -fno-second-underscore -ffixed-line-length-132 -fopenmp -fconvert=big-endian + +FFLAGS = $(OPT) -I. -I$(GRIB_API_INCLUDE_DIR) +F90FLAGS = $(OPT) -I. -I$(GRIB_API_INCLUDE_DIR) + +LDFLAGS = $(OPT) + +BINDIR = . + +EXE = CONVERT2 + + +.f.o: + $(F90C) -c $(F90FLAGS) $(DEBUG) $*.f +.f90.o: + $(F90C) -c $(F90FLAGS) $(DEBUG) $*.f90 + +all: ${EXE} + +clean: + rm *.o + +phgrreal.o: phgrreal.f + $(F90C) -c -g -O3 -fopenmp phgrreal.f + +grphreal.o: grphreal.f + $(F90C) -c -g -O3 -fopenmp grphreal.f + +ftrafo.o: ftrafo.f + $(F90C) -c -g -O3 -fopenmp ftrafo.f + +$(BINDIR)/CONVERT2: phgrreal.o grphreal.o ftrafo.o rwGRIB2.o posnam.o preconvert.o + $(F90C) $(DEBUG) $(OPT) -o $(BINDIR)/CONVERT2 ftrafo.o phgrreal.o grphreal.o rwGRIB2.o posnam.o preconvert.o ${LIB} + + +############################################################################### +# +# End of the Makefile +# +############################################################################### diff --git a/python/pythontest/TestInstallTar/test_untar/src/Makefile.ifort b/python/pythontest/TestInstallTar/test_untar/src/Makefile.ifort new file mode 100644 index 0000000000000000000000000000000000000000..038a1689708cdf956c1f3a89621d3cad58654414 --- /dev/null +++ b/python/pythontest/TestInstallTar/test_untar/src/Makefile.ifort @@ -0,0 +1,61 @@ +############################################################################### +# +# Top level Makefile for ECMWFDATA7.0 software +# +# Last modified: December 1, 2015 +# +############################################################################### + + +.SUFFIXES: .o .c .c~ .f .f~ .F90 .f90 .f90~ .f95 .f95~ .F .F~ .y .y~ .l .l~ \ + .s .s~ .sh .sh~ .h .h~ .C .C~ .a + + +#GRIB_API_INCLUDE_DIR=/usr/local/ifort/grib1.12.3//include +#GRIB_API_LIB=-openmp -L/usr/local/ifort/grib1.12.3/lib -Bstatic -lgrib_api_f77 -lgrib_api_f90 -lgrib_api -Bdynamic -lm -ljasper + +OPT = -g +DEBUG = -g +LIB = $(GRIB_API_LIBS) -lemosR64 -lgfortran + +FC=ifort -132 -traceback -r8 +F90C=ifort -132 -traceback -r8 + +FFLAGS = $(OPT) -I. -I$(GRIB_API_INCLUDE_DIR) +F90FLAGS = $(OPT) -I. -I$(GRIB_API_INCLUDE_DIR) + +LDFLAGS = $(OPT) + +BINDIR = . + +EXE = CONVERT2 + + +.f.o: + $(F90C) -c $(F90FLAGS) -132 $(DEBUG) $*.f +.f90.o: + $(F90C) -c $(F90FLAGS) -132 $(DEBUG) $*.f90 + +all: ${EXE} + +clean: + rm *.o + +phgrreal.o: phgrreal.f + $(F90C) -c -g -O3 -fopenmp phgrreal.f + +grphreal.o: grphreal.f + $(F90C) -c -g -O3 -fopenmp grphreal.f + +ftrafo.o: ftrafo.f + $(F90C) -c -g -O3 -fopenmp ftrafo.f + +$(BINDIR)/CONVERT2: phgrreal.o grphreal.o ftrafo.o rwGRIB2.o posnam.o preconvert.o + $(F90C) $(DEBUG) $(OPT) -o $(BINDIR)/CONVERT2 ftrafo.o phgrreal.o grphreal.o rwGRIB2.o posnam.o preconvert.o ${LIB} + + +############################################################################### +# +# End of the Makefile +# +############################################################################### diff --git a/python/pythontest/TestInstallTar/test_untar/src/Makefile.local.gfortran b/python/pythontest/TestInstallTar/test_untar/src/Makefile.local.gfortran new file mode 100644 index 0000000000000000000000000000000000000000..3847d57ad9e6c0b92d823eda01fb6f83e2221d33 --- /dev/null +++ b/python/pythontest/TestInstallTar/test_untar/src/Makefile.local.gfortran @@ -0,0 +1,62 @@ +############################################################################### +# +# Top level Makefile for ECMWFDATA7.0 software +# +# Last modified: December 1, 2015 +# +############################################################################### + + +.SUFFIXES: .o .c .c~ .f .f~ .F90 .f90 .f90~ .f95 .f95~ .F .F~ .y .y~ .l .l~ \ + .s .s~ .sh .sh~ .h .h~ .C .C~ .a + + +GRIB_API_INCLUDE_DIR=/usr/local/gcc-4.9.3/grib_api-1.14.3//include +GRIB_API_LIB= -L/usr/local/gcc-4.9.3/grib_api-1.14.3/lib -Bstatic -lgrib_api_f77 -lgrib_api_f90 -lgrib_api -Bdynamic -lm -ljasper +EMOSLIB=-lemosR64 + +OPT = -g -O3 -fopenmp +DEBUG = -g +LIB = $(GRIB_API_LIB) $(EMOSLIB) + +FC=gfortran -m64 -fdefault-real-8 -fcray-pointer -fno-second-underscore -ffixed-line-length-132 -fopenmp -fconvert=big-endian +F90C=gfortran -m64 -fdefault-real-8 -fcray-pointer -fno-second-underscore -ffixed-line-length-132 -fopenmp -fconvert=big-endian + +FFLAGS = $(OPT) -I. -I$(GRIB_API_INCLUDE_DIR) +F90FLAGS = $(OPT) -I. -I$(GRIB_API_INCLUDE_DIR) + +LDFLAGS = $(OPT) + +BINDIR = . + +EXE = CONVERT2 + + +.f.o: + $(F90C) -c $(F90FLAGS) $(DEBUG) $*.f +.f90.o: + $(F90C) -c $(F90FLAGS) $(DEBUG) $*.f90 + +all: ${EXE} + +clean: + rm *.o + +phgrreal.o: phgrreal.f + $(F90C) -c -g -O3 -fopenmp phgrreal.f + +grphreal.o: grphreal.f + $(F90C) -c -g -O3 -fopenmp grphreal.f + +ftrafo.o: ftrafo.f + $(F90C) -c -g -O3 -fopenmp ftrafo.f + +$(BINDIR)/CONVERT2: phgrreal.o grphreal.o ftrafo.o rwGRIB2.o posnam.o preconvert.o + $(F90C) $(DEBUG) $(OPT) -o $(BINDIR)/CONVERT2 ftrafo.o phgrreal.o grphreal.o rwGRIB2.o posnam.o preconvert.o ${LIB} + + +############################################################################### +# +# End of the Makefile +# +############################################################################### diff --git a/python/pythontest/TestInstallTar/test_untar/src/Makefile.local.ifort b/python/pythontest/TestInstallTar/test_untar/src/Makefile.local.ifort new file mode 100644 index 0000000000000000000000000000000000000000..6f58a3532815bb2766f1bcd5fb2677958eaae355 --- /dev/null +++ b/python/pythontest/TestInstallTar/test_untar/src/Makefile.local.ifort @@ -0,0 +1,61 @@ +############################################################################### +# +# Top level Makefile for ECMWFDATA7.0 software +# +# Last modified: December 1, 2015 +# +############################################################################### + + +.SUFFIXES: .o .c .c~ .f .f~ .F90 .f90 .f90~ .f95 .f95~ .F .F~ .y .y~ .l .l~ \ + .s .s~ .sh .sh~ .h .h~ .C .C~ .a + + +GRIB_API_INCLUDE_DIR=/home/srvx1/tmc/TestEnv/Libraries/eccodes-2.6.0_ifort/include +GRIB_API_LIB= -L/home/srvx1/tmc/TestEnv/Libraries/eccodes-2.6.0_ifort/lib -Bstatic -leccodes_f90 -leccodes -Bdynamic -lm -ljasper + +OPT = -g -O3 -mcmodel=medium -unroll -inline -heap-arrays 32 +DEBUG = -g +LIB = $(GRIB_API_LIB) -lemosR64 -lgfortran + +FC=/opt/intel/bin/ifort -132 -traceback -r8 +F90C=/opt/intel/bin/ifort -132 -traceback -r8 + +FFLAGS = $(OPT) -I. -I$(GRIB_API_INCLUDE_DIR) +F90FLAGS = $(OPT) -I. -I$(GRIB_API_INCLUDE_DIR) + +LDFLAGS = $(OPT) + +BINDIR = . + +EXE = CONVERT2 + + +.f.o: + $(F90C) -c $(F90FLAGS) -132 $(DEBUG) $*.f +.f90.o: + $(F90C) -c $(F90FLAGS) -132 $(DEBUG) $*.f90 + +all: ${EXE} + +clean: + rm *.o + +phgrreal.o: phgrreal.f + $(F90C) -c -g -O3 phgrreal.f + +grphreal.o: grphreal.f + $(F90C) -c -g -O3 grphreal.f + +ftrafo.o: ftrafo.f + $(F90C) -c -g -O3 ftrafo.f + +$(BINDIR)/CONVERT2: phgrreal.o grphreal.o ftrafo.o rwGRIB2.o posnam.o preconvert.o + $(F90C) $(DEBUG) $(OPT) -o $(BINDIR)/CONVERT2 ftrafo.o phgrreal.o grphreal.o rwGRIB2.o posnam.o preconvert.o ${LIB} + + +############################################################################### +# +# End of the Makefile +# +############################################################################### diff --git a/python/pythontest/TestInstallTar/test_untar/src/Makefile.new b/python/pythontest/TestInstallTar/test_untar/src/Makefile.new new file mode 100644 index 0000000000000000000000000000000000000000..9953d130c88dbb3a07e54867ef5c700c12df799f --- /dev/null +++ b/python/pythontest/TestInstallTar/test_untar/src/Makefile.new @@ -0,0 +1,61 @@ +############################################################################### +# +# Top level Makefile for ECMWFDATA7.0 software +# +# Last modified: December 1, 2015 +# +############################################################################### + + +.SUFFIXES: .o .c .c~ .f .f~ .F90 .f90 .f90~ .f95 .f95~ .F .F~ .y .y~ .l .l~ \ + .s .s~ .sh .sh~ .h .h~ .C .C~ .a + + +GRIB_API_INCLUDE_DIR=/usr/local/ifort/grib1.12.3//include +GRIB_API_LIBS=-openmp -L/usr/local/ifort/grib1.12.3/lib -Bstatic -lgrib_api_f77 -lgrib_api_f90 -lgrib_api -Bdynamic -lm -ljasper + +OPT = -g +DEBUG = -g +LIB = $(GRIB_API_LIBS) -lemosR64 + +FC=ifort -132 -traceback -r8 +F90C=ifort -132 -traceback -r8 + +FFLAGS = $(OPT) -I. -I$(GRIB_API_INCLUDE_DIR) +F90FLAGS = $(OPT) -I. -I$(GRIB_API_INCLUDE_DIR) + +LDFLAGS = $(OPT) + +BINDIR = . + +EXE = CONVERT2 + + +.f.o: + $(F90C) -c $(F90FLAGS) -132 $(DEBUG) $*.f +.f90.o: + $(F90C) -c $(F90FLAGS) -132 $(DEBUG) $*.f90 + +all: ${EXE} + +clean: + rm *.o + +phgrreal.o: phgrreal.f + $(F90C) -c -g -O3 -fopenmp phgrreal.f + +grphreal.o: grphreal.f + $(F90C) -c -g -O3 -fopenmp grphreal.f + +ftrafo.o: ftrafo.f + $(F90C) -c -g -O3 -fopenmp ftrafo.f + +$(BINDIR)/CONVERT2: phgrreal.o grphreal.o ftrafo.o rwGRIB2.o posnam.o preconvert.o + $(F90C) $(DEBUG) $(OPT) -o $(BINDIR)/CONVERT2 ftrafo.o phgrreal.o grphreal.o rwGRIB2.o posnam.o preconvert.o ${LIB} + + +############################################################################### +# +# End of the Makefile +# +############################################################################### diff --git a/python/pythontest/TestInstallTar/test_untar/src/ftrafo.f b/python/pythontest/TestInstallTar/test_untar/src/ftrafo.f new file mode 100644 index 0000000000000000000000000000000000000000..affdccdcf8b2a439fbd5fd35435ac076eefe7ee5 --- /dev/null +++ b/python/pythontest/TestInstallTar/test_untar/src/ftrafo.f @@ -0,0 +1,504 @@ + MODULE FTRAFO + + CONTAINS + + + +C +C Implementierung der spektralen Transformationsmethode unter Verwendung +C des reduzierten Gauss'schen Gitters +C +C Berechnung der scale winds aus Vorticity und Divergenz +C uebergibt man in XMN die Divergenz, so wird der divergente Anteil des +C Windes (XPHI=Ud,XPHI=Vd) zurueckgegeben, uebergibt man die Vorticity, so +C erhaelt man den rotationellen Wind (XLAM=Vrot,XPHI=-Urot). +C Summiert man beide, erhaelt man den gesamten Scale wind +C GWSAVE ist ein Hilfsfeld fuer die FFT +C P enthaelt die assoziierten Legendrepolynome, H deren Ableitung +C MLAT enthaelt die Anzahl der Gitterpunkte pro Breitenkreis +C MNAUF gibt die spektrale Aufloesung an, +C NI = Anzahl der Gauss'schen Gitterpunkte pro Flaeche +C NJ = Anzahl der Gauss'schen Breiten, +C NK = Anzahl der Niveaus + + SUBROUTINE VDTOUV(XMN,XLAM,XPHI,GWSAVE,IFAX,P,MLAT,MNAUF,NI,NJ,NK) + + + USE PHTOGR + + IMPLICIT NONE + INTEGER J,N,NI,NJ,NK,MNAUF,GGIND(NJ/2) + INTEGER MLAT(NJ),IFAX(10,NJ) + REAL XMN(0:(MNAUF+1)*(MNAUF+2)-1,NK) + REAL P(0:(MNAUF+3)*(MNAUF+4)/2,NJ/2) + REAL H(0:(MNAUF+2)*(MNAUF+3)/2) + REAL XLAM(NI,NK),XPHI(NI,NK) + REAL GWSAVE(8*NJ+15,NJ/2) + REAL SCR,SCI,ACR,ACI,MUSCR,MUSCI,MUACR,MUACI + REAL RT,IT + + GGIND(1)=0 + DO 4 J = 2,NJ/2 + GGIND(J)=GGIND(J-1)+MLAT(J-1) +4 CONTINUE +!$OMP PARALLEL DO SCHEDULE(DYNAMIC) + DO 5 J = 1,NJ/2 + CALL VDUVSUB(J,XMN,XLAM,XPHI,GWSAVE,IFAX,P,GGIND(J), + *MLAT,MNAUF,NI,NJ,NK) + 5 CONTINUE +!$OMP END PARALLEL DO + RETURN + END SUBROUTINE VDTOUV + + SUBROUTINE VDUVSUB(J,XMN,XLAM,XPHI,GWSAVE,IFAX,P, + *GGIND,MLAT,MNAUF,NI,NJ,NK) + + USE PHTOGR + + IMPLICIT NONE + INTEGER J,K,M,N,NI,NJ,NK,MNAUF,GGIND,LL,LLP,LLH,LLS,LLPS,LLHS + INTEGER MLAT(NJ),IFAX(10,NJ) + REAL UFOUC(0:MAXAUF),MUFOUC(0:MAXAUF) + REAL VFOUC(0:MAXAUF),MVFOUC(0:MAXAUF) + REAL XMN(0:(MNAUF+1)*(MNAUF+2)-1,NK) + REAL P(0:(MNAUF+3)*(MNAUF+4)/2,NJ/2) + REAL H(0:(MNAUF+2)*(MNAUF+3)/2) + REAL XLAM(NI,NK),XPHI(NI,NK) + REAL GWSAVE(8*NJ+15,NJ/2) + REAL ERAD,SCR,SCI,ACR,ACI,MUSCR,MUSCI,MUACR,MUACI + REAL FAC(0:MNAUF),RT,IT + + + ERAD = 6367470.D0 + + FAC(0)=0.D0 + DO 12 N=1,MNAUF + FAC(N)=-ERAD/DBLE(N)/DBLE(N+1) +12 CONTINUE + + CALL DPLGND(MNAUF,P(0,J),H) + DO 3 K = 1,NK + LL=0 + LLP=0 + LLH=0 + DO 2 M = 0,MNAUF + SCR=0.D0 + SCI=0.D0 + ACR=0.D0 + ACI=0.D0 + MUSCR=0.D0 + MUSCI=0.D0 + MUACR=0.D0 + MUACI=0.D0 + LLS=LL + LLPS=LLP + LLHS=LLH + IF(2*M+1.LT.MLAT(J)) THEN + DO 1 N = M,MNAUF,2 + RT=XMN(2*LL,K)*FAC(N) + IT=XMN(2*LL+1,K)*FAC(N) + SCR =SCR+ RT*P(LLP,J) + SCI =SCI+ IT*P(LLP,J) + MUACR =MUACR+ RT*H(LLH) + MUACI =MUACI+ IT*H(LLH) + LL=LL+2 + LLP=LLP+2 + LLH=LLH+2 + 1 CONTINUE + LL=LLS+1 + LLP=LLPS+1 + LLH=LLHS+1 + DO 11 N = M+1,MNAUF,2 + RT=XMN(2*LL,K)*FAC(N) + IT=XMN(2*LL+1,K)*FAC(N) + ACR =ACR+ RT*P(LLP,J) + ACI =ACI+ IT*P(LLP,J) + MUSCR =MUSCR+ RT*H(LLH) + MUSCI =MUSCI+ IT*H(LLH) + LL=LL+2 + LLP=LLP+2 + LLH=LLH+2 + 11 CONTINUE + ENDIF + LL=LLS+(MNAUF-M+1) + LLP=LLPS+(MNAUF-M+3) + LLH=LLHS+(MNAUF-M+2) + + UFOUC(2*M)=-M*(SCI-ACI) + UFOUC(2*M+1)=M*(SCR-ACR) + VFOUC(2*M)=-M*(SCI+ACI) + VFOUC(2*M+1)=M*(SCR+ACR) + + MUFOUC(2*M)=-(MUSCR-MUACR) + MUFOUC(2*M+1)=-(MUSCI-MUACI) + MVFOUC(2*M)=-(MUSCR+MUACR) + MVFOUC(2*M+1)=-(MUSCI+MUACI) + 2 CONTINUE + + CALL RFOURTR(VFOUC, + *GWSAVE(:,J),IFAX(:,J),MNAUF,MLAT(J),1) + XLAM(GGIND+1:GGIND+MLAT(J),K)=VFOUC(0:MLAT(J)-1) + CALL RFOURTR(UFOUC, + *GWSAVE(:,J),IFAX(:,J),MNAUF,MLAT(J),1) + XLAM(NI-GGIND-MLAT(J)+1:NI-GGIND,K)=UFOUC(0:MLAT(J)-1) + + CALL RFOURTR(MVFOUC, + *GWSAVE(:,J),IFAX(:,J),MNAUF,MLAT(J),1) + XPHI(GGIND+1:GGIND+MLAT(J),K)=MVFOUC(0:MLAT(J)-1) + CALL RFOURTR(MUFOUC, + *GWSAVE(:,J),IFAX(:,J),MNAUF,MLAT(J),1) + XPHI(NI-GGIND-MLAT(J)+1:NI-GGIND,K)=MUFOUC(0:MLAT(J)-1) + +3 CONTINUE + + RETURN + END SUBROUTINE VDUVSUB + +C Berechnung des Gradienten eines Skalars aus dem Feld des +C Skalars XMN im Phasenraum. Zurueckgegeben werden die Felder der +C Komponenten des horizontalen Gradienten XLAM,XPHI auf dem Gauss'schen Gitter. +C GWSAVE ist ein Hilfsfeld fuer die FFT +C P enthaelt die assoziierten Legendrepolynome, H deren Ableitung +C MLAT enthaelt die Anzahl der Gitterpunkte pro Breitenkreis +C MNAUF gibt die spektrale Aufloesung an, +C NI = Anzahl der Gauss'schen Gitterpunkte, +C NJ = Anzahl der Gauss'schen Breiten, +C NK = Anzahl der Niveaus + + SUBROUTINE PHGRAD(XMN,XLAM,XPHI,GWSAVE,IFAX,P,H,MLAT, + *MNAUF,NI,NJ,NK) + + USE PHTOGR + IMPLICIT NONE + INTEGER J,K,M,N,NI,NJ,NK,MNAUF,GGIND,LL,LLP,LLH,LLS,LLPS,LLHS + INTEGER MLAT(NJ),IFAX(10,NJ) + REAL UFOUC(0:MAXAUF),MUFOUC(0:MAXAUF) + REAL VFOUC(0:MAXAUF),MVFOUC(0:MAXAUF) + REAL XMN(0:(MNAUF+1)*(MNAUF+2)-1,NK) + REAL P(0:(MNAUF+3)*(MNAUF+4)/2,NJ/2) + REAL H(0:(MNAUF+2)*(MNAUF+3)/2) + REAL XLAM(NI,NK),XPHI(NI,NK) + REAL GWSAVE(8*NJ+15,NJ/2) + REAL ERAD + REAL SCR,SCI,ACR,ACI,MUSCR,MUSCI,MUACR,MUACI,RT,IT + + ERAD = 6367470.0 + + GGIND=0 + DO 4 J = 1,NJ/2 + CALL DPLGND(MNAUF,P(0,J),H) + DO 3 K = 1,NK + LL=0 + LLP=0 + LLH=0 + DO 2 M = 0,MNAUF + SCR=0.D0 + SCI=0.D0 + ACR=0.D0 + ACI=0.D0 + MUSCR=0.D0 + MUSCI=0.D0 + MUACR=0.D0 + MUACI=0.D0 + LLS=LL + LLPS=LLP + LLHS=LLH + IF(2*M+1.LT.MLAT(J)) THEN + DO 1 N = M,MNAUF,2 + RT=XMN(2*LL,K) + IT=XMN(2*LL+1,K) + SCR =SCR+ RT*P(LLP,J) + SCI =SCI+ IT*P(LLP,J) + MUACR =MUACR+RT*H(LLH) + MUACI =MUACI+ IT*H(LLH) + LL=LL+2 + LLP=LLP+2 + LLH=LLH+2 + 1 CONTINUE + LL=LLS+1 + LLP=LLPS+1 + LLH=LLHS+1 + DO 11 N = M+1,MNAUF,2 + RT=XMN(2*LL,K) + IT=XMN(2*LL+1,K) + ACR =ACR+ RT*P(LLP,J) + ACI =ACI+ IT*P(LLP,J) + MUSCR =MUSCR+ RT*H(LLH) + MUSCI =MUSCI+ IT*H(LLH) + LL=LL+2 + LLP=LLP+2 + LLH=LLH+2 + 11 CONTINUE + ENDIF + LL=LLS+(MNAUF-M+1) + LLP=LLPS+(MNAUF-M+3) + LLH=LLHS+(MNAUF-M+2) + + UFOUC(2*M)=-M*(SCI-ACI)/ERAD + UFOUC(2*M+1)=M*(SCR-ACR)/ERAD + VFOUC(2*M)=-M*(SCI+ACI)/ERAD + VFOUC(2*M+1)=M*(SCR+ACR)/ERAD + + MUFOUC(2*M)=-(MUSCR-MUACR)/ERAD + MUFOUC(2*M+1)=-(MUSCI-MUACI)/ERAD + MVFOUC(2*M)=-(MUSCR+MUACR)/ERAD + MVFOUC(2*M+1)=-(MUSCI+MUACI)/ERAD +2 CONTINUE + + CALL RFOURTR(VFOUC, + *GWSAVE(:,J),IFAX(:,J),MNAUF,MLAT(J),1) + XLAM(GGIND+1:GGIND+MLAT(J),K)=VFOUC(0:MLAT(J)-1) + CALL RFOURTR(UFOUC, + *GWSAVE(:,J),IFAX(:,J),MNAUF,MLAT(J),1) + XLAM(NI-GGIND-MLAT(J)+1:NI-GGIND,K)=UFOUC(0:MLAT(J)-1) + + CALL RFOURTR(MVFOUC, + *GWSAVE(:,J),IFAX(:,J),MNAUF,MLAT(J),1) + XPHI(GGIND+1:GGIND+MLAT(J),K)=MVFOUC(0:MLAT(J)-1) + CALL RFOURTR(MUFOUC, + *GWSAVE(:,J),IFAX(:,J),MNAUF,MLAT(J),1) + XPHI(NI-GGIND-MLAT(J)+1:NI-GGIND,K)=MUFOUC(0:MLAT(J)-1) + +3 CONTINUE + GGIND=GGIND+MLAT(J) +4 CONTINUE + + + RETURN + END SUBROUTINE PHGRAD + +C Berechnung des Gradienten eines Skalars aus dem Feld des +C Skalars XMN im Phasenraum. Zurueckgegeben werden die Felder der +C Komponenten des horizontalen Gradienten XLAM,XPHI auf dem Gauss'schen Gitter. +C GWSAVE ist ein Hilfsfeld fuer die FFT +C P enthaelt die assoziierten Legendrepolynome, H deren Ableitung +C MLAT enthaelt die Anzahl der Gitterpunkte pro Breitenkreis +C MNAUF gibt die spektrale Aufloesung an, +C NI = Anzahl der Gauss'schen Gitterpunkte, +C NJ = Anzahl der Gauss'schen Breiten, +C NK = Anzahl der Niveaus + + SUBROUTINE PHGRACUT(XMN,XLAM,XPHI,GWSAVE,IFAX,P,H,MAUF, + *MNAUF,NI,NJ,MANF,NK) + + USE PHTOGR + IMPLICIT NONE + INTEGER J,K,M,N,NI,NJ,NK,MNAUF,GGIND,LL,LLP,LLH,LLS,LLPS,LLHS + INTEGER MAUF,MANF,I,IFAX(10) + REAL UFOUC(0:MAXAUF),MUFOUC(0:MAXAUF) + REAL VFOUC(0:MAXAUF),MVFOUC(0:MAXAUF) + REAL XMN(0:(MNAUF+1)*(MNAUF+2)-1,NK) + REAL P(0:(MNAUF+3)*(MNAUF+4)/2,NJ) + REAL H(0:(MNAUF+2)*(MNAUF+3)/2) + REAL XLAM(NI,NJ,NK),XPHI(NI,NJ,NK) + REAL HLAM(MAXAUF,2),HPHI(MAXAUF,2) + REAL GWSAVE(4*MAUF+15) + REAL ERAD + REAL SCR,SCI,ACR,ACI,MUSCR,MUSCI,MUACR,MUACI,RT,IT + + ERAD = 6367470.0 + + GGIND=0 + DO 4 J = 1,NJ + CALL DPLGND(MNAUF,P(0,J),H) + DO 3 K = 1,NK + LL=0 + LLP=0 + LLH=0 + DO 2 M = 0,MNAUF + SCR=0.D0 + SCI=0.D0 + ACR=0.D0 + ACI=0.D0 + MUSCR=0.D0 + MUSCI=0.D0 + MUACR=0.D0 + MUACI=0.D0 + LLS=LL + LLPS=LLP + LLHS=LLH + IF(2*M+1.LT.MAUF) THEN + DO 1 N = M,MNAUF,2 + RT=XMN(2*LL,K) + IT=XMN(2*LL+1,K) + SCR =SCR+ RT*P(LLP,J) + SCI =SCI+ IT*P(LLP,J) + MUACR =MUACR+RT*H(LLH) + MUACI =MUACI+ IT*H(LLH) + LL=LL+2 + LLP=LLP+2 + LLH=LLH+2 + 1 CONTINUE + LL=LLS+1 + LLP=LLPS+1 + LLH=LLHS+1 + DO 11 N = M+1,MNAUF,2 + RT=XMN(2*LL,K) + IT=XMN(2*LL+1,K) + ACR =ACR+ RT*P(LLP,J) + ACI =ACI+ IT*P(LLP,J) + MUSCR =MUSCR+ RT*H(LLH) + MUSCI =MUSCI+ IT*H(LLH) + LL=LL+2 + LLP=LLP+2 + LLH=LLH+2 + 11 CONTINUE + ENDIF + LL=LLS+(MNAUF-M+1) + LLP=LLPS+(MNAUF-M+3) + LLH=LLHS+(MNAUF-M+2) + + UFOUC(2*M)=-M*(SCI-ACI)/ERAD + UFOUC(2*M+1)=M*(SCR-ACR)/ERAD + VFOUC(2*M)=-M*(SCI+ACI)/ERAD + VFOUC(2*M+1)=M*(SCR+ACR)/ERAD + + MUFOUC(2*M)=-(MUSCR-MUACR)/ERAD + MUFOUC(2*M+1)=-(MUSCI-MUACI)/ERAD + MVFOUC(2*M)=-(MUSCR+MUACR)/ERAD + MVFOUC(2*M+1)=-(MUSCI+MUACI)/ERAD +2 CONTINUE + + CALL RFOURTR(VFOUC, + *GWSAVE,IFAX,MNAUF,MAUF,1) + + CALL RFOURTR(MVFOUC, + *GWSAVE,IFAX,MNAUF,MAUF,1) + + DO 6 I=0,NI-1 + IF(MANF+I.LE. MAUF) THEN + XLAM(I+1,J,K)=VFOUC(MANF+I-1) + XPHI(I+1,J,K)=MVFOUC(MANF+I-1) + ELSE + XLAM(I+1,J,K)=VFOUC(MANF-MAUF+I-1) + XPHI(I+1,J,K)=MVFOUC(MANF-MAUF+I-1) + ENDIF + 6 CONTINUE +3 CONTINUE + GGIND=GGIND+MAUF +4 CONTINUE + + RETURN + END SUBROUTINE PHGRACUT + +C Berechnung der Divergenz aus dem Windfeld (U,V) +C im Phasenraum. Zurueckgegeben werden die Felder der +C Komponenten des horizontalen Gradienten XLAM,XPHI auf dem Gauss'schen Gitter. +C GWSAVE ist ein Hilfsfeld fuer die FFT +C P enthaelt die assoziierten Legendrepolynome, H deren Ableitung +C MLAT enthaelt die Anzahl der Gitterpunkte pro Breitenkreis +C MNAUF gibt die spektrale Aufloesung an, +C NI = Anzahl der Gauss'schen Gitterpunkte, +C NJ = Anzahl der Gauss'schen Breiten, +C NK = Anzahl der Niveaus +C Beachte, dass das Windfeld eine um 1 erhoehte Aufloesung in mu-Richtung hat. + + SUBROUTINE CONTGL(PS,DPSDL,DPSDM,DIV,U,V,BREITE,ETA, + *MLAT,A,B,NI,NJ,NK) + + IMPLICIT NONE + + INTEGER NI,NJ,NK,I,J,K,MLAT(NJ),L + + REAL A(NK+1),B(NK+1) + REAL PS(NI),DPSDL(NI),DPSDM(NI) + REAL DIV(NI,NK),U(NI,NK),V(NI,NK),ETA(NI,NK) + REAL BREITE(NJ) + + REAL DIVT1,DIVT2,POB,PUN,DPSDT,COSB + + L=0 + DO 4 J=1,NJ + COSB=(1.0-BREITE(J)*BREITE(J)) + DO 3 I=1,MLAT(J) + L=L+1 + DIVT1=0.0 + DIVT2=0.0 + DO 1 K=1,NK + POB=A(K)+B(K)*PS(L) + PUN=A(K+1)+B(K+1)*PS(L) + + DIVT1=DIVT1+DIV(L,K)*(PUN-POB) + if(cosb .gt. 0.) then + DIVT2=DIVT2+(B(K+1)-B(K))*PS(L)* + *(U(L,K)*DPSDL(L)+V(L,K)*DPSDM(L))/COSB + endif + + ETA(L,K)=-DIVT1-DIVT2 +1 CONTINUE + + DPSDT=(-DIVT1-DIVT2)/PS(L) + + DO 2 K=1,NK + ETA(L,K)=ETA(L,K)-DPSDT*B(K+1)*PS(L) +2 CONTINUE + PS(L)=DPSDT*PS(L) +3 CONTINUE +4 CONTINUE + RETURN + END SUBROUTINE CONTGL + +C OMEGA berechnet omega im Hybridkoordinatensystem +C PS ist der Bodendruck, +C DPSDL,DPSDM sind die Komponenten des Gradienten des Logarithmus des +C Bodendrucks +C DIV,U,V sind die horizontale Divergenz und das horizontale Windfeld +C BREITE ist das Feld der Gauss'schen Breiten +C E ist omega, + + SUBROUTINE OMEGA(PS,DPSDL,DPSDM,DIV,U,V,BREITE,E,MLAT,A,B,NGI + * ,NGJ,MKK) + + IMPLICIT NONE + + INTEGER I,J,K,L,NGI,NGJ,MKK,MLAT(NGJ) + + REAL PS(NGI),DPSDL(NGI),DPSDM(NGI),A(MKK+1),B(MKK+1) + REAL DIV(NGI,MKK),U(NGI,MKK),V(NGI,MKK),E(NGI,MKK) + REAL BREITE(NGJ) + + REAL DIVT1,DIVT2,POB,PUN,DP,X,Y,COSB + REAL DIVT3(MKK+2) + + L=0 + DO 4 J=1,NGJ + COSB=(1.0-BREITE(J)*BREITE(J)) + DO 3 I=1,MLAT(J) + L=L+1 + DIVT1=0.0 + DIVT2=0.0 + DIVT3(1)=0.0 + DO 1 K=1,MKK + POB=A(K)+B(K)*PS(L) + PUN=A(K+1)+B(K+1)*PS(L) + DP=PUN-POB + + Y=PS(L)*(U(L,K)*DPSDL(L)+V(L,K)*DPSDM(L))/COSB + IF(K.LT.3) THEN + X=0.0 + ELSE + X=(B(K+1)-B(K))*Y + ENDIF + + DIVT1=DIVT1+DIV(L,K)*DP + DIVT2=DIVT2+X + + DIVT3(K+1)=-DIVT1-DIVT2 + + IF(K.GT.1) THEN + E(L,K) = 0.5*(POB+PUN)/DP*Y* + *((B(K+1)-B(K))+(A(K+1)*B(K)-A(K)*B(K+1))/ + *DP*LOG(PUN/POB)) + ELSE + E(L,K) = 0.0 + ENDIF + + E(L,K) = E(L,K)+0.5*(DIVT3(K)+DIVT3(K+1)) + +1 CONTINUE +3 CONTINUE +4 CONTINUE + RETURN + END SUBROUTINE OMEGA + + END MODULE FTRAFO diff --git a/python/pythontest/TestInstallTar/test_untar/src/grphreal.f b/python/pythontest/TestInstallTar/test_untar/src/grphreal.f new file mode 100644 index 0000000000000000000000000000000000000000..dae342bf336d149d97ffa55234c0b1375eff2661 --- /dev/null +++ b/python/pythontest/TestInstallTar/test_untar/src/grphreal.f @@ -0,0 +1,188 @@ + MODULE GRTOPH + + USE PHTOGR + + CONTAINS +C + SUBROUTINE GRPH213(CXMN,FELD,WSAVE,IFAX,Z,W,MLAT, + *MNAUF,MAXL,MAXB,MLEVEL) + +C DIE ROUTINE F]HRT EINE TRANSFORMATION EINER +C FELDVARIABLEN VOM PHASENRAUM IN DEN PHYSIKALISCHEN +C RAUM AUF KUGELKOORDINATEN DURCH +C +C CXMN = SPEKTRALKOEFFIZIENTEN IN DER REIHENFOLGE +C CX00,CX01,CX11,CX02,....CXMNAUFMNAUF +C CXM = FOURIERKOEFFIZIENTEN - nur ein Hilfsfeld +C FELD = FELD DER METEOROLOGISCHEN VARIABLEN +C WSAVE = Working Array fuer Fouriertransformation +C Z = LEGENDREFUNKTIONSWERTE +C +C MNAUF ANZAHL DER FOURIERKOEFFIZIENTEN +C MAXL ANZAHL DER FUER DAS GITTER BENUTZTEN LAENGEN +C MAXB ANZAHL DER FUER DAS GITTER BENOETIGTEN BREITEN +C MLEVEL ANZAHL DER LEVELS, DIE TRANSFORMIERT WERDEN +C + IMPLICIT REAL (A-H,O-Z) + + +C Anzahl der Gitterpunkte pro Breitenkreis des reduzierten +C Gauss'schen Gitters + INTEGER MLAT(MAXB),ISIZE,IFAX(10,MAXB) + +C FELD DER LEGENDREPOLYNOME FUER EINE BREITE + REAL*8 Z(MAXB/2,0:((MNAUF+3)*(MNAUF+4))/2) + +C LOGICAL*1 USED(((216*217)/2+1)*160) + + DIMENSION CXMN(0:(MNAUF+1)*(MNAUF+2)-1,MLEVEL) + REAL FELD(MAXL,MLEVEL) + DIMENSION WSAVE(8*MAXB+15,MAXB/2) + REAL*8 W(MAXB) + DIMENSION IND(MAXB) + + + IND(1)=0 + DO 6 J=2,MAXB/2 + IND(j)=IND(J-1)+MLAT(J-1) + 6 CONTINUE +!$OMP PARALLEL DO SCHEDULE(DYNAMIC) + DO 16 L=1,MLEVEL + CALL GRPHSUB(L,IND,CXMN,FELD,WSAVE,IFAX,Z,W,MLAT, + *MNAUF,MAXL,MAXB,MLEVEL) +16 CONTINUE +!$omp end parallel do + + + RETURN + END SUBROUTINE GRPH213 +C + SUBROUTINE GRPHSUB(L,IND,CXMN,FELD,WSAVE,IFAX,Z,W,MLAT, + *MNAUF,MAXL,MAXB,MLEVEL) + +C DIE ROUTINE F]HRT EINE TRANSFORMATION EINER +C FELDVARIABLEN VOM PHASENRAUM IN DEN PHYSIKALISCHEN +C RAUM AUF KUGELKOORDINATEN DURCH +C +C CXMN = SPEKTRALKOEFFIZIENTEN IN DER REIHENFOLGE +C CX00,CX01,CX11,CX02,....CXMNAUFMNAUF +C CXM = FOURIERKOEFFIZIENTEN - nur ein Hilfsfeld +C FELD = FELD DER METEOROLOGISCHEN VARIABLEN +C WSAVE = Working Array fuer Fouriertransformation +C Z = LEGENDREFUNKTIONSWERTE +C +C MNAUF ANZAHL DER FOURIERKOEFFIZIENTEN +C MAXL ANZAHL DER FUER DAS GITTER BENUTZTEN LAENGEN +C MAXB ANZAHL DER FUER DAS GITTER BENOETIGTEN BREITEN +C MLEVEL ANZAHL DER LEVELS, DIE TRANSFORMIERT WERDEN +C + IMPLICIT REAL (A-H,O-Z) + +C FELD DER FOURIERKOEFFIZIENTEN + REAL CXMS(4*(MNAUF+1)) + REAL CXMA(4*(MNAUF+1)) + REAL,ALLOCATABLE :: CXM(:,:) + +C Anzahl der Gitterpunkte pro Breitenkreis des reduzierten +C Gauss'schen Gitters + INTEGER MLAT(MAXB),ISIZE + +C FELD DER LEGENDREPOLYNOME FUER EINE BREITE + REAL Z(MAXB/2,0:((MNAUF+3)*(MNAUF+4))/2) + +C LOGICAL*1 USED(((216*217)/2+1)*160) + + REAL CXMN(0:(MNAUF+1)*(MNAUF+2)-1,MLEVEL) + REAL FELD(MAXL,MLEVEL) + REAL WSAVE(8*MAXB+15,MAXB/2) + INTEGER IFAX(10,MAXB) + REAL W(MAXB) + INTEGER IND(MAXB) + + ALLOCATE(CXM( 4*MAXB,MAXB)) + DO 5 J=1,MAXB/2 + CXMS(1:MLAT(J))=FELD(IND(J)+1:IND(J)+MLAT(J),L) + CALL RFOUFTR(CXMS,WSAVE(1,J),IFAX(:,J),MNAUF,MLAT(J),1) + CXMA(1:MLAT(J))=FELD(MAXL-IND(J)-MLAT(J)+1:MAXL-IND(J),L) + CALL RFOUFTR(CXMA, + *WSAVE(1,J),IFAX(:,J),MNAUF,MLAT(J),1) + DO 4 I=1,2*(MNAUF+1) + CXM(I,J)=CXMS(I)+CXMA(I) + CXM(I,MAXB+1-J)=CXMS(I)-CXMA(I) +4 CONTINUE + 5 CONTINUE + CALL LGTR213(CXMN(0,L),CXM,Z,W,MLAT,MNAUF,MAXB) + + DEALLOCATE(CXM) + + RETURN + END SUBROUTINE GRPHSUB +C + SUBROUTINE LGTR213(CXMN,CXM,Z,W,MLAT,MNAUF,MAXB) + IMPLICIT REAL (A-H,O-Z) + INTEGER MLAT(MAXB) + DIMENSION CXM(0:4*MAXB-1,MAXB) + DIMENSION CXMN(0:2*(((MNAUF+1)*MNAUF)/2+MNAUF)+1) + REAL*8 Z(MAXB/2,0:((MNAUF+3)*(MNAUF+4))/2) + REAL*8 W(MAXB),CR,CI,HILF + LOGICAL EVEN +C +C DIESE ROUTINE BERECHNET DIE KFFKs CXMN +C + LL=0 + LLP=0 + DO 1 I=0,MNAUF + KM=0 + 9 KM=KM+1 + IF(MLAT(KM).LE.2*I) THEN + GOTO 9 + ENDIF + DO 2 J=I,MNAUF + CR=0 + CI=0 + EVEN=MOD(I+J,2).EQ.0 + IF(EVEN) THEN + DO 3 K=KM,MAXB/2 + HILF=W(K)*Z(K,LLP) + CR=CR+CXM(2*I,K)*HILF + CI=CI+CXM(2*I+1,K)*HILF + 3 CONTINUE + ELSE + DO 4 K=KM,MAXB/2 + HILF=W(K)*Z(K,LLP) + CR=CR+CXM(2*I,MAXB+1-K)*HILF + CI=CI+CXM(2*I+1,MAXB+1-K)*HILF + 4 CONTINUE + ENDIF + 5 CXMN(2*LL)=CR + CXMN(2*LL+1)=CI + LL=LL+1 + LLP=LLP+1 + 2 CONTINUE + LLP=LLP+2 + 1 CONTINUE + RETURN + END SUBROUTINE LGTR213 +C + +C + SUBROUTINE RFOUFTR(CXM,TRIGS,IFAX,MNAUF,MAXL,ISIGN) +C BERECHNET DIE FOURIERSUMME MIT EINEM FFT-ALGORITHMUS + IMPLICIT REAL (A-H,O-Z) + DIMENSION CXM(0:2*MAXL-1) + DIMENSION FELD(MAXL),TRIGS(2*MAXL) + DIMENSION WSAVE(MAXAUF) + INTEGER IFAX(10) + + +C NORMIERUNG... + WSAVE(1)=CXM(MAXL-1) + + CXM(1:MAXL)=CXM(0:MAXL-1)/2 + CXM(0)=WSAVE(1)/2 +! CALL CFFTF(MAXL,CXM,WSAVE) + CALL FFT99(CXM,WSAVE,TRIGS,IFAX,1,1,MAXL,1,-1) + RETURN + END SUBROUTINE RFOUFTR + + END MODULE GRTOPH diff --git a/python/pythontest/TestInstallTar/test_untar/src/jparams.h b/python/pythontest/TestInstallTar/test_untar/src/jparams.h new file mode 100644 index 0000000000000000000000000000000000000000..146a7f05c10fdeaadb611686c4c95f7fdc2f7a28 --- /dev/null +++ b/python/pythontest/TestInstallTar/test_untar/src/jparams.h @@ -0,0 +1,34 @@ +C +C Parameters +C + INTEGER JP32, JPLONO, J2NFFT, JPFFT, JPLOOK, JPMAX, JPMAXITER + INTEGER JPMXTRY, JPTRNC, JPK, JPTRP1 + PARAMETER ( JP32 = 32 ) +C +C The following value for JPLONO (2560) will handle regular grids +C from N1 to N720 derived from spectral truncations from T1 to +C T639. +C +Cjdc PARAMETER ( JPLONO = 2560 , J2NFFT = 2 + JPLONO, JPFFT = 12000) + PARAMETER ( JPLONO = 6000 , J2NFFT = 2 + JPLONO, JPFFT = 12000) + PARAMETER ( JPLOOK = 50) + PARAMETER ( JPMAX = 2048 ) + PARAMETER ( JPMAXITER = 10) + PARAMETER ( JPMXTRY = 3 ) + PARAMETER ( JPTRNC = 2047, JPK = (JPTRNC + 1)*(JPTRNC + 4) ) + PARAMETER ( JPTRP1 = (JPTRNC + 1) ) +C + REAL PPEPSA, PPQUART, PPHALF, PPTWO, PP90 + PARAMETER ( PPEPSA = 1.0E-6) + PARAMETER ( PPQUART = 0.25E0) + PARAMETER ( PPHALF = 0.5E0) + PARAMETER ( PPTWO = 2.0E0) + PARAMETER ( PP90 = 90.0E0) +C + REAL PPI + PARAMETER ( PPI = 3.14159265358979 ) +C +C Debug parameters +C + INTEGER NDBG, NDBGLP + COMMON /JDCNDBG/ NDBG, NDBGLP diff --git a/python/pythontest/TestInstallTar/test_untar/src/phgrreal.f b/python/pythontest/TestInstallTar/test_untar/src/phgrreal.f new file mode 100644 index 0000000000000000000000000000000000000000..aa3658c917fd35d15dd4c2b9959f4fcf074923a1 --- /dev/null +++ b/python/pythontest/TestInstallTar/test_untar/src/phgrreal.f @@ -0,0 +1,553 @@ + MODULE PHTOGR + + INTEGER, PARAMETER :: MAXAUF=36000 + + CONTAINS + + SUBROUTINE PHGR213(CXMN,FELD,WSAVE,IFAX,Z,MLAT,MNAUF, + *MAXL,MAXB,MLEVEL) + +C DIE ROUTINE F]HRT EINE TRANSFORMATION EINER +C FELDVARIABLEN VOM PHASENRAUM IN DEN PHYSIKALISCHEN +C RAUM AUF DAS REDUZIERTE GAUSS'SCHE GITTER DURCH +C +C CXMN = SPEKTRALKOEFFIZIENTEN IN DER REIHENFOLGE +C CX00,CX01,CX11,CX02,....CXMNAUFMNAUF +C FELD = FELD DER METEOROLOGISCHEN VARIABLEN +C WSAVE = Working Array fuer Fouriertransformation +C Z = LEGENDREFUNKTIONSWERTE +C +C MNAUF ANZAHL DER FOURIERKOEFFIZIENTEN +C MAXL ANZAHL DER FUER DAS GITTER BENUTZTEN LAENGEN +C MAXB ANZAHL DER FUER DAS GITTER BENOETIGTEN BREITEN +C MLEVEL ANZAHL DER LEVELS, DIE TRANSFORMIERT WERDEN +C + IMPLICIT NONE + +C Anzahl der Gitterpunkte auf jedem Breitenkreis + INTEGER MLAT(MAXB/2) + INTEGER K,MAXL,MAXB,MLEVEL,MNAUF + INTEGER IND(MAXB) + + +C FELD DER LEGENDREPOLYNOME FUER EINE BREITE + REAL Z(0:((MNAUF+3)*(MNAUF+4))/2,MAXB/2) + + REAL CXMN(0:(MNAUF+1)*(MNAUF+2)-1,MLEVEL) + REAL FELD(MAXL,MLEVEL) + REAL WSAVE(8*MAXB+15,MAXB/2) + INTEGER :: IFAX(10,MAXB) + + IND(1)=0 + DO 7 K=2,MAXB/2 + IND(K)=IND(K-1)+MLAT(K-1) +7 CONTINUE + +!$OMP PARALLEL DO SCHEDULE(DYNAMIC) + DO 17 K=1,MAXB/2 + CALL PHSYM(K,IND,CXMN,FELD,Z,WSAVE,IFAX,MLAT, + *MNAUF,MAXL,MAXB,MLEVEL) + +17 CONTINUE +!$OMP END PARALLEL DO + + RETURN + END SUBROUTINE PHGR213 +C +C + SUBROUTINE PHSYM(K,IND,CXMN,FELD,Z,WSAVE,IFAX,MLAT, + *MNAUF,MAXL,MAXB,MLEVEL) + + IMPLICIT NONE + + INTEGER MLAT(MAXB/2) + INTEGER K,L,I,J,LLS,LLPS,LL,LLP,MAXL,MAXB,MLEVEL,MNAUF + INTEGER IND(MAXB) + INTEGER :: IFAX(10,MAXB) + + +C FELD DER FOURIERKOEFFIZIENTEN + REAL :: CXMS(0:MAXAUF-1),CXMA(0:MAXAUF-1) + +C FELD DER LEGENDREPOLYNOME FUER EINE BREITE + REAL Z(0:((MNAUF+3)*(MNAUF+4))/2,MAXB/2) + REAL ACR,ACI,SCR,SCI + + REAL CXMN(0:(MNAUF+1)*(MNAUF+2)-1,MLEVEL) + REAL FELD(MAXL,MLEVEL) + REAL WSAVE(8*MAXB+15,MAXB/2) + + DO 6 L=1,MLEVEL + LL=0 + LLP=0 + DO 1 I=0,MNAUF + SCR=0.D0 + SCI=0.D0 + ACR=0.D0 + ACI=0.D0 + LLS=LL + LLPS=LLP + IF(2*I+1.LT.MLAT(K)) THEN +C Innerste Schleife aufgespalten um if-Abfrage zu sparen + DO 18 J=I,MNAUF,2 + SCR=SCR+Z(LLP,K)*CXMN(2*LL,L) + SCI=SCI+Z(LLP,K)*CXMN(2*LL+1,L) + LL=LL+2 + LLP=LLP+2 +18 CONTINUE + LL=LLS+1 + LLP=LLPS+1 + DO 19 J=I+1,MNAUF,2 + ACR=ACR+Z(LLP,K)*CXMN(2*LL,L) + ACI=ACI+Z(LLP,K)*CXMN(2*LL+1,L) + LL=LL+2 + LLP=LLP+2 +19 CONTINUE + ENDIF + LL=LLS+(MNAUF-I+1) + LLP=LLPS+(MNAUF-I+3) + CXMS(2*I)=SCR+ACR + CXMS(2*I+1)=SCI+ACI + CXMA(2*I)=SCR-ACR + CXMA(2*I+1)=SCI-ACI + 1 CONTINUE +C CALL FOURTR(CXMS,FELD(IND(k)+1,L),WSAVE(:,K),MNAUF, +C *MLAT(K),1) +C CALL FOURTR(CXMA,FELD(MAXL-IND(k)-MLAT(K)+1,L), +C *WSAVE(:,K),MNAUF,MLAT(K),1) + CALL RFOURTR(CXMS,WSAVE(:,K),IFAX(:,K),MNAUF, + *MLAT(K),1) + FELD(IND(k)+1:IND(K)+MLAT(K),L)=CXMS(0:MLAT(K)-1) + CALL RFOURTR(CXMA, + *WSAVE(:,K),IFAX(:,K),MNAUF,MLAT(K),1) + FELD(MAXL-IND(k)-MLAT(K)+1:MAXL-IND(k),L)=CXMA(0:MLAT(K)-1) +C WRITE(*,*) IND+1,FELD(IND+1,L) +6 CONTINUE + + END SUBROUTINE PHSYM + + SUBROUTINE PHGCUT(CXMN,FELD,WSAVE,IFAX,Z, + * MNAUF,MMAX,MAUF,MANF,MAXL,MAXB,MLEVEL) + +C DIE ROUTINE FUEHRT EINE TRANSFORMATION EINER +C FELDVARIABLEN VOM PHASENRAUM IN DEN PHYSIKALISCHEN +C RAUM AUF KUGELKOORDINATEN DURCH. Es kann ein Teilausschnitt +C Der Erde angegeben werden. Diese Routine ist langsamer als +C phgrph +C +C CXMN = SPEKTRALKOEFFIZIENTEN IN DER REIHENFOLGE +C CX00,CX01,CX11,CX02,....CXMNAUFMNAUF +C FELD = FELD DER METEOROLOGISCHEN VARIABLEN +C BREITE = SINUS DER GEOGRAFISCHEN BREITEN +C +C MNAUF ANZAHL DER FOURIERKOEFFIZIENTEN +C MAUF ANZAHL DER LAENGEN UND DER FOURIERKOEFFIZIENTEN +C MANF ANFANG DES LAENGENBEREICHS FUER DAS GITTER, +C AUF DAS INTERPOLIERT WERDEN SOLL +C MAXL ANZAHL DER FUER DAS GITTER BENUTZTEN LAENGEN +C MAXB ANZAHL DER FUER DAS GITTER BENOETIGTEN BREITEN +C MLEVEL ANZAHL DER LEVELS, DIE TRANSFORMIERT WERDEN +C + IMPLICIT REAL (A-H,O-Z) + +C FELD DER FOURIERKOEFFIZIENTEN + +C FELD DER LEGENDREPOLYNOME FUER EINE BREITE + REAL Z(0:((MMAX+3)*(MMAX+4))/2,MAXB) + + DIMENSION CXMN(0:(MMAX+1)*(MMAX+2)-1,MLEVEL) + REAL FELD(MAXL,MAXB,MLEVEL) + DIMENSION WSAVE(4*MAUF+15) + INTEGER:: IFAX(10) + + LOGICAL SYM + +C +C write(*,*)mauf,mnauf,manf,maxl + + + IF(MAUF.LE.MNAUF) WRITE(*,*) 'TOO COARSE LONGITUDE RESOLUTION' + IF((MANF.LT.1).OR.(MAXL.LT.1).OR. + * (MANF.GT.MAUF).OR.(MAXL.GT.MAUF)) THEN + WRITE(*,*) 'WRONG LONGITUDE RANGE',MANF,MAXL + STOP + ENDIF + +C Pruefe, ob Ausgabegitter symmetrisch zum Aequator ist +C Wenn ja soll Symmetrie der Legendrepolynome ausgenutzt werden + IF(MAXB .GT. 4) THEN + SYM=.TRUE. + DO 11 J=5,5 + IF(ABS(ABS(Z(100,J))-ABS(Z(100,MAXB+1-J))).GT.1E-11) + * SYM=.FALSE. +C WRITE(*,*) ABS(Z(100,J)),ABS(Z(100,MAXB+1-J)) +11 CONTINUE + WRITE(*,*) 'Symmetrisch: ',SYM + ELSE + SYM=.FALSE. + ENDIF + + + IF(SYM) THEN +!$OMP PARALLEL DO + DO J=1,(MAXB+1)/2 + CALL PHSYMCUT(J,CXMN,FELD,Z,WSAVE,IFAX, + *MAUF,MNAUF,MAXL,MAXB,MLEVEL,MANF) + + ENDDO +!$OMP END PARALLEL DO + ELSE +!$OMP PARALLEL DO + DO J=1,MAXB + CALL PHGPNS(CXMN,FELD,Z,WSAVE,IFAX, + *J,MNAUF,MAUF,MANF,MAXL,MAXB,MLEVEL) + ENDDO +!$OMP END PARALLEL DO + + ENDIF + + + RETURN + END SUBROUTINE PHGCUT + + SUBROUTINE PHSYMCUT(J,CXMN,FELD,Z,WSAVE,IFAX, + *MAUF,MNAUF,MAXL,MAXB,MLEVEL,MANF) + + IMPLICIT REAL (A-H,O-Z) + +C FELD DER FOURIERKOEFFIZIENTEN + + REAL :: CXM(0:MAXAUF-1),CXMA(0:MAXAUF-1) + + +C FELD DER LEGENDREPOLYNOME FUER EINE BREITE + REAL Z(0:((MNAUF+3)*(MNAUF+4))/2,MAXB) + REAL SCR,SCI,ACR,ACI + + DIMENSION CXMN(0:(MNAUF+1)*(MNAUF+2)-1,MLEVEL) + REAL FELD(MAXL,MAXB,MLEVEL) + DIMENSION WSAVE(4*MAUF+15) + INTEGER :: IFAX(10) + + DO 16 L=1,MLEVEL + LL=0 + LLP=0 + DO 17 I=0,MNAUF + SCR=0.D0 + SCI=0.D0 + ACR=0.D0 + ACI=0.D0 + LLS=LL + LLPS=LLP +C Innerste Schleife aufgespalten um if-Abfrage zu sparen + DO 18 K=I,MNAUF,2 + SCR=SCR+Z(LLP,J)*CXMN(2*LL,L) + SCI=SCI+Z(LLP,J)*CXMN(2*LL+1,L) + LL=LL+2 + LLP=LLP+2 +18 CONTINUE + LL=LLS+1 + LLP=LLPS+1 + DO 19 K=I+1,MNAUF,2 + ACR=ACR+Z(LLP,J)*CXMN(2*LL,L) + ACI=ACI +Z(LLP,J)*CXMN(2*LL+1,L) + LL=LL+2 + LLP=LLP+2 +19 CONTINUE + LL=LLS+MNAUF-I+1 + LLP=LLPS+MNAUF-I+3 + CXM(2*I)=SCR+ACR + CXM(2*I+1)=SCI+ACI + CXMA(2*I)=SCR-ACR + CXMA(2*I+1)=SCI-ACI +17 CONTINUE + + CALL RFOURTR(CXM,WSAVE,IFAX,MNAUF,MAUF,1) + DO 26 I=0,MAXL-1 + IF(MANF+I.LE.MAUF) THEN + FELD(I+1,J,L)=CXM(MANF+I-1) + ELSE + FELD(I+1,J,L)=CXM(MANF-MAUF+I-1) + ENDIF +26 CONTINUE + CALL RFOURTR(CXMA,WSAVE,IFAX,MNAUF,MAUF,1) + DO 36 I=0,MAXL-1 + IF(MANF+I.LE.MAUF) THEN + FELD(I+1,MAXB+1-J,L)=CXMA(MANF+I-1) + ELSE + FELD(I+1,MAXB+1-J,L)=CXMA(MANF-MAUF+I-1) + ENDIF +36 CONTINUE +16 CONTINUE + + END SUBROUTINE PHSYMCUT + + SUBROUTINE PHGPNS(CXMN,FELD,Z,WSAVE,IFAX, + *J,MNAUF,MAUF,MANF,MAXL,MAXB,MLEVEL) + + IMPLICIT NONE + INTEGER,intent(in) :: MNAUF,MAUF,MANF,J,MAXL,MAXB,MLEVEL + REAL :: CXM(0:MAXAUF-1) + REAL,intent(in) :: Z(0:((MNAUF+3)*(MNAUF+4))/2,MAXB) + + REAL,intent(in) :: CXMN(0:(MNAUF+1)*(MNAUF+2)-1,MLEVEL) + + REAL,intent(in) :: WSAVE(4*MAUF+15) + + REAL :: FELD(MAXL,MAXB,MLEVEL) + INTEGER :: IFAX(10) + + INTEGER I,L + + DO L=1,MLEVEL + CALL LEGTR(CXMN(:,L),CXM,Z(:,J),MNAUF,MAUF) + CALL RFOURTR(CXM,WSAVE,IFAX,MNAUF,MAUF,1) + + DO I=0,MAXL-1 + IF(MANF+I.LE.MAUF) THEN + FELD(I+1,J,L)=CXM(MANF+I-1) + ELSE + FELD(I+1,J,L)=CXM(MANF-MAUF+I-1) + ENDIF + ENDDO + ENDDO + END SUBROUTINE PHGPNS +C + SUBROUTINE LEGTR(CXMN,CXM,Z,MNAUF,MAUF) + IMPLICIT NONE + INTEGER MNAUF,MAUF,LL,LLP,I,J + REAL CXM(0:MAXAUF-1) + REAL CXMN(0:(MNAUF+1)*(MNAUF+2)-1) + REAL Z(0:((MNAUF+3)*(MNAUF+4))/2) + REAL CI,CR +C +C DIESE ROUTINE BERECHNET DIE FOURIERKOEFFIZIENTEN CXM +C + LL=0 + LLP=0 + DO 1 I=0,MNAUF + CR=0.D0 + CI=0.D0 + DO 2 J=I,MNAUF + CR=CR+Z(LLP)*CXMN(2*LL) + CI=CI+Z(LLP)*CXMN(2*LL+1) + LL=LL+1 + LLP=LLP+1 + 2 CONTINUE + LLP=LLP+2 + CXM(2*I)=CR + CXM(2*I+1)=CI + 1 CONTINUE + RETURN + END SUBROUTINE LEGTR +C +C +C + SUBROUTINE RFOURTR(CXM,TRIGS,IFAX,MNAUF,MAXL,ISIGN) +C BERECHNET DIE FOURIERSUMME MIT EINEM FFT-ALGORITHMUS + IMPLICIT REAL (A-H,O-Z) + DIMENSION CXM(0:MAXAUF-1) + REAL :: WSAVE(2*MAXL),TRIGS(2*MAXL) + INTEGER IFAX(10) + + DO I=MNAUF+1,MAXL-1 + CXM(2*I)=0.0 + CXM(2*I+1)=0.0 + ENDDO + CALL FFT99(CXM,WSAVE,TRIGS,IFAX,1,1,MAXL,1,1) + DO I=0,MAXL-1 + CXM(I)=CXM(I+1) + ENDDO + + RETURN + END SUBROUTINE RFOURTR +C +C + SUBROUTINE GAULEG(X1,X2,X,W,N) +C BERECHNET DIE GAUSS+SCHEN BREITEN + IMPLICIT REAL (A-H,O-Z) + DIMENSION X(N),W(N) + PARAMETER (EPS=3.D-14) + M=(N+1)/2 + XM=0.5D0*(X2+X1) + XL=0.5D0*(X2-X1) + DO 12 I=1,M + Z=DCOS(3.141592654D0*(I-.25D0)/(N+.5D0)) +1 CONTINUE + P1=1.D0 + P2=0.D0 + DO 11 J=1,N + P3=P2 + P2=P1 + P1=((2.D0*J-1.D0)*Z*P2-(J-1.D0)*P3)/J +11 CONTINUE + PP=N*(Z*P1-P2)/(Z*Z-1.D0) + Z1=Z + Z=Z1-P1/PP + IF(ABS(Z-Z1).GT.EPS)GO TO 1 + X(I)=XM-XL*Z + X(N+1-I)=XM+XL*Z + W(I)=2.D0*XL/((1.D0-Z*Z)*PP*PP) + W(N+1-I)=W(I) +12 CONTINUE + RETURN + END SUBROUTINE GAULEG +C +C + SUBROUTINE PLGNFA(LL,X,Z) +C +C PLGNDN BERECHNET ALLE NORMIERTEN ASSOZIIERTEN +C LEGENDREFUNKTIONEN VON P00(X) BIS PLL(X) +C UND SCHREIBT SIE IN DAS FELD Z +C Die Polynome sind wie im ECMWF indiziert, d.h. +C P00,P10,P11,P20,P21,P22,... +C Ansonsten ist die Routine analog zu PLGNDN +C X IST DER COSINUS DES ZENITWINKELS ODER +C DER SINUS DER GEOGRAFISCHEN BREITE +C + IMPLICIT REAL (A-H,O-Z) + DIMENSION Z(0:((LL+3)*(LL+4))/2) +C + L=LL+2 + I=1 + Z(0)=1.D0 + FACT=1.D0 + POT=1.D0 + SOMX2=DSQRT(1.D0-X*X) + DO 14 J=0,L + DJ=DBLE(J) + IF(J.GT.0) THEN + FACT=FACT*(2.D0*DJ-1.D0)/(2.D0*DJ) + POT=POT*SOMX2 + Z(I)=DSQRT((2.D0*DJ+1.D0)*FACT)*POT + I=I+1 + ENDIF + IF(J.LT.L) THEN + Z(I)=X* + *DSQRT((4.D0*DJ*DJ+8.D0*DJ+3.D0)/(2.D0*DJ+1.D0))*Z(I-1) + I=I+1 + ENDIF + DK=DJ+2.D0 + DO 14 K=J+2,L + DDK=(DK*DK-DJ*DJ) + Z(I)=X*DSQRT((4.D0*DK*DK-1.D0)/DDK)*Z(I-1)- + * DSQRT(((2.D0*DK+1.D0)*(DK-DJ-1.D0)*(DK+DJ-1.D0))/ + * ((2.D0*DK-3.D0)*DDK))*Z(I-2) + DK=DK+1.D0 + I=I+1 +14 CONTINUE + RETURN + END SUBROUTINE PLGNFA + + + SUBROUTINE DPLGND(MNAUF,Z,DZ) +C +C DPLGND BERECHNET DIE ABLEITUNG DER NORMIERTEN ASSOZIIERTEN +C LEGENDREFUNKTIONEN VON P00(X) BIS PLL(X) +C UND SCHREIBT SIE IN DAS FELD DZ +C DIE REIHENFOLGE IST +C P00(X),P01(X),P11(X),P02(X),P12(X),P22(X),..PLL(X) +C + IMPLICIT REAL (A-H,O-Z) + DIMENSION Z(0:((MNAUF+3)*(MNAUF+4))/2) + DIMENSION DZ(0:((MNAUF+2)*(MNAUF+3))/2) +C + IF(Z(0).NE.1.D0) THEN + WRITE(*,*) 'DPLGND: Z(0) must be 1.0' + STOP + ENDIF + + LLP=0 + LLH=0 + DO 1 I=0,MNAUF+1 + DO 2 J=I,MNAUF+1 + IF(I.EQ.J) THEN + WURZELA= + *DSQRT(DBLE((J+1)*(J+1)-I*I)/DBLE(4*(J+1)*(J+1)-1)) + DZ(LLH)=DBLE(J)*WURZELA*Z(LLP+1) + ELSE + WURZELB= + *DSQRT(DBLE((J+1)*(J+1)-I*I)/DBLE(4*(J+1)*(J+1)-1)) + DZ(LLH)= + *DBLE(J)*WURZELB*Z(LLP+1)-DBLE(J+1)*WURZELA*Z(LLP-1) + WURZELA=WURZELB + ENDIF + LLH=LLH+1 + LLP=LLP+1 +2 CONTINUE + LLP=LLP+1 +1 CONTINUE + RETURN + END SUBROUTINE DPLGND + + +* Spectral Filter of Sardeshmukh and Hoskins (1984, MWR) +* MM=Spectral truncation of field +* MMAX= Spectral truncation of filter +* + SUBROUTINE SPFILTER(FELDMN,MM,MMAX) + + IMPLICIT NONE + + INTEGER MM,MMAX,I,J,K,L + REAL FELDMN(0:(MM+1)*(MM+2)-1) + REAL KMAX,SMAX,FAK + + SMAX=0.1 + KMAX=-ALOG(SMAX) + KMAX=KMAX/(float(MMAX)*float(MMAX+1))**2 +c WRITE(*,*)'alogsmax',alog(smax),'KMAX:',KMAX + l=0 + do i=0,MM + do j=i,MM +c write(*,*) i,j,feld(k),feld(k)*exp(-KMAX*(j*(j+1))**2) + if(j .le. MMAX) then +c fak=exp(-KMAX*(j*(j+1))**2) + fak=1.0 + feldmn(2*l)=feldmn(2*l)*fak + feldmn(2*l+1)=feldmn(2*l+1)*fak + else + feldmn(2*l)=0. + feldmn(2*l+1)=0. + endif + l=l+1 + enddo + enddo + END SUBROUTINE SPFILTER + + END MODULE PHTOGR + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/python/pythontest/TestInstallTar/test_untar/src/posnam.f b/python/pythontest/TestInstallTar/test_untar/src/posnam.f new file mode 100644 index 0000000000000000000000000000000000000000..c5d12d2b9928e581f67ef0c6388dd3e641693aed --- /dev/null +++ b/python/pythontest/TestInstallTar/test_untar/src/posnam.f @@ -0,0 +1,25 @@ + SUBROUTINE POSNAM(KULNAM,CDNAML) +!------------------------------------- + +!--- position in namelist file. +! author: Mats Hamrud, ECMWF + + INTEGER, INTENT(IN) :: KULNAM + CHARACTER*(*), INTENT(IN) :: CDNAML + CHARACTER*120 CLINE + CHARACTER*1 CLTEST + REWIND(KULNAM) + ILEN=LEN(CDNAML) + 102 CONTINUE + CLINE=' ' + READ(KULNAM,'(A)') CLINE + IND1=INDEX(CLINE,'&'//CDNAML) + IF(IND1.EQ.0) GO TO 102 + CLTEST=CLINE(IND1+ILEN+1:IND1+ILEN+1) + IF((LGE(CLTEST,'0').AND.LLE(CLTEST,'9')).OR. + & (LGE(CLTEST,'A').AND.LLE(CLTEST,'Z'))) GO TO 102 + BACKSPACE(KULNAM) + + RETURN + END SUBROUTINE POSNAM + diff --git a/python/pythontest/TestInstallTar/test_untar/src/preconvert.f90 b/python/pythontest/TestInstallTar/test_untar/src/preconvert.f90 new file mode 100644 index 0000000000000000000000000000000000000000..c28610f2aecb31a3f4c3860d7a482ae0fa85d23f --- /dev/null +++ b/python/pythontest/TestInstallTar/test_untar/src/preconvert.f90 @@ -0,0 +1,807 @@ + PROGRAM PRECONVERT +!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! +! ! +! PROGRAM PRECONVERT - PREPARES INPUT DATA FOR POP MODEL METEOR- ! +! OLOGICAL PREPROCESSOR ! +! ! +!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! +! ! +! CALCULATION OF ETAPOINT ON A REGULAR LAMDA/PHI GRID AND WRITING ! +! U,V,ETAPOINT,T,PS,Q,SD,MSL,TCC,10U, 10V, 2T,2D,LSP,CP,SSHF,SSR, ! +! EWSS,NSSS TO AN OUTPUT FILE (GRIB 1 or 2 FORMAT). ! +! ! +! AUTHORS: L. HAIMBERGER, G. WOTAWA, 1994-04 ! +! adapted: A. BECK ! +! 2003-05-11 ! +! L. Haimberger 2006-12 V2.0 ! +! modified to handle arbitrary regular grids ! +! and T799 resolution data ! +! L. Haimberger 2010-03 V4.0 ! +! modified to grib edition 2 fields ! +! and T1279 resolution data ! +! ! +!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! +! ! +! DESCRIPTION OF NEEDED INPUT: ! +! ! +! UNIT FILE PARAMETER(S) DATA REPRESENTATION ! +! ! +! 11 fort.11 T,U,V regular lamda phi grid ! +! 12 fort.12 D regular lamda phi grid ! +! 13 fort.13 LNSP fort.13 spherical harmonics ! +! 14 fort.14 SD,MSL,TCC,10U, ! +! 10V,2T,2D regular lamda phi grid ! +! 16 fort.16 LSP,CP,SSHF, ! +! SSR,EWSS,NSSS regular lamda phi grid ! +! 17 fort.17 Q regular lamda phi grid ! +! ! +!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! +! ! +! DESCRIPTION OF OUTPUT: ! +! ! +! UNIT FILE PARAMETER(S) DATA REPRESENTATION ! +! ! +! 15 fort.15 U,V,ETA,T,PS, ! +! Q,SD,MSL,TCC, ! +! 10U,10V,2T,2D, regular lamda phi grid ! +! LSP,CP,SSHF, ! +! SSR,EWSS,NSSS ! +! ! +!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! +! + + USE PHTOGR + USE GRTOPH + USE FTRAFO + USE RWGRIB2 + USE GRIB_API + + IMPLICIT NONE + + REAL, ALLOCATABLE, DIMENSION (:,:) :: LNPS + REAL, ALLOCATABLE, DIMENSION (:,:) :: Z + REAL, ALLOCATABLE, DIMENSION (:,:,:) :: T, UV , UV2 + REAL, ALLOCATABLE, DIMENSION (:,:,:) :: QA,OM,OMR + REAL, ALLOCATABLE, DIMENSION (:,:,:) :: DIV, ETA,ETAR + REAL, ALLOCATABLE, DIMENSION (:,:) :: DPSDL, DPSDM + REAL, ALLOCATABLE, DIMENSION (:,:,:) :: PS,DPSDT + REAL, ALLOCATABLE, DIMENSION (:,:,:) :: SURF,FLUX,OROLSM + REAL, ALLOCATABLE, DIMENSION (:) :: WSAVE,H,SINL,COSL,WSAVE2 + REAL, ALLOCATABLE, DIMENSION (:) :: BREITE, GBREITE,AK, BK,pv + +! Arrays for Gaussian grid calculations + + REAL :: X1,X2,RMS,MW,SIG,LAM + REAL,ALLOCATABLE :: CUA(:,:,:),CVA(:,:,:) + + REAL, ALLOCATABLE, DIMENSION (:,:) :: P,PP !,P2 + REAL, ALLOCATABLE, DIMENSION (:,:) :: XMN,HILFUV + REAL, ALLOCATABLE, DIMENSION (:) :: LNPMN,LNPMN2,LNPMN3 + REAL, ALLOCATABLE, DIMENSION (:) :: WEIGHT + REAL, ALLOCATABLE, DIMENSION (:,:) :: UGVG + REAL, ALLOCATABLE, DIMENSION (:,:) :: DG, ETAG + REAL, ALLOCATABLE, DIMENSION (:,:) :: GWSAVE + REAL, ALLOCATABLE, DIMENSION (:) :: PSG,HILF + +! end arrays for Gaussian grid calculations + + INTEGER, ALLOCATABLE, DIMENSION (:) :: MLAT,MPSURF,MPFLUX,MPORO,MPAR + INTEGER, ALLOCATABLE :: GIFAX(:,:) + + REAL PI,COSB,DAK,DBK,P00 + REAL URLAR8,JMIN1,LLLAR8,MAXBMIN1,PIR8,DCOSB + + INTEGER I,J,K,L,IERR,M,LTEST,MK,NGI,NGJ + INTEGER MFLUX,MSURF,MORO + INTEGER LUNIT,LUNIT2 + + INTEGER MAXL, MAXB, MLEVEL, LEVOUT,LEVMIN,LEVMAX + INTEGER MOMEGA,MOMEGADIFF,MGAUSS,MSMOOTH, MNAUF,META,METADIFF + INTEGER MDPDETA,METAPAR + REAL RLO0, RLO1, RLA0, RLA1 + CHARACTER*300 MLEVELIST + + INTEGER MAUF, MANF,IFAX(10) + + INTEGER IGRIB(1),iret,ogrib + + CHARACTER*80 FILENAME + + NAMELIST /NAMGEN/ & + MAXL, MAXB, & + MLEVEL,MLEVELIST,MNAUF,METAPAR, & + RLO0, RLO1, RLA0, RLA1, & + MOMEGA,MOMEGADIFF,MGAUSS,MSMOOTH,META,METADIFF,& + MDPDETA + + LTEST=1 + + call posnam (4,'NAMGEN') + read (4,NAMGEN) + + MAUF=INT(360.*(REAL(MAXL)-1.)/(RLO1-RLO0)+0.0001) +! PRINT*, MAUF + + MANF=INT(REAL(MAUF)/360.*(360.+RLO0)+1.0001) + IF(MANF .gt. MAUF) MANF=MANF-MAUF + + +!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! +! ALLOCATE VARIABLES ! +!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! + + ALLOCATE (LNPS(0:(MNAUF+1)*(MNAUF+2)-1,1)) + + ALLOCATE (H(0:(MNAUF+2)*(MNAUF+3)/2)) + + + ALLOCATE (OM(MAXL, MAXB, MLEVEL)) + + ALLOCATE (ETA(MAXL,MAXB,MLEVEL)) + + ALLOCATE (PS(MAXL, MAXB,1),DPSDT(MAXL, MAXB,1)) + + + ALLOCATE (WSAVE(4*MAUF+15),WSAVE2(4*MAUF+15)) + + ALLOCATE (BREITE(MAXB),AK(MLEVEL+1),BK(MLEVEL+1),pv(2*mlevel+2)) + + ALLOCATE (MPAR(2)) + + ALLOCATE (COSL(MAXL),SINL(MAXL)) + + ALLOCATE (CUA(2,4,MLEVEL),CVA(2,4,MLEVEL)) + +!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! +! GAUSS STUFF ! +!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! + + + IF(MGAUSS .EQ. 1) THEN + LUNIT=0 + FILENAME='fort.18' + + call grib_open_file(LUNIT, TRIM(FILENAME),'r') + + call grib_new_from_file(LUNIT,igrib(1), iret) + +! we can close the file + call grib_close_file(LUNIT) + +! call grib_get(igrib(1),'gridType', j) + + NGJ=MNAUF+1 + + ALLOCATE (GWSAVE(8*NGJ+15,NGJ/2)) + ALLOCATE(GIFAX(10,NGJ)) + ALLOCATE (GBREITE(NGJ),WEIGHT(NGJ)) + ALLOCATE (MLAT(NGJ)) + ALLOCATE (P(0:((MNAUF+3)*(MNAUF+4))/2,NGJ/2)) + ALLOCATE (PP(NGJ/2,0:((MNAUF+3)*(MNAUF+4))/2)) + ALLOCATE (Z(0:((MNAUF+3)*(MNAUF+4))/2,MAXB)) + + call grib_get(igrib(1),'numberOfPointsAlongAMeridian', NGJ) + + ! get as a integer + call grib_get(igrib(1),'pl', MLAT) + + NGI=SUM(MLAT) + + call grib_get(igrib(1),'numberOfVerticalCoordinateValues',mk) + + IF(mk/2-1 .ne. MLEVEL) THEN + WRITE(*,*) 'FATAL: Number of model levels',mk, & + ' does not agree with', MLEVEL,' in namelist' + STOP + ENDIF + call grib_get(igrib(1),'pv',pv) + AK=pv(1:1+MLEVEL) + BK=pv(2+MLEVEL:2*MLEVEL+2) + + ALLOCATE (LNPMN(0:(MNAUF+1)*(MNAUF+2)-1)) + ALLOCATE (LNPMN2(0:(MNAUF+1)*(MNAUF+2)-1)) + ALLOCATE (UGVG(NGI, 2*MLEVEL),HILFUV(2*MAXL,2)) + + + ALLOCATE (DPSDL(NGI,1),DPSDM(NGI,1)) + + ALLOCATE (PSG(NGI),HILF(NGI)) + ALLOCATE (UV(MAXL, MAXB, 2*MLEVEL)) +! ALLOCATE (UV2(MAXL, MAXB, 2*MLEVEL)) + + ALLOCATE (XMN(0:(MNAUF+1)*(MNAUF+2)-1, 2*MLEVEL)) + ALLOCATE (DG(NGI,MLEVEL),ETAG(NGI,MLEVEL)) + +! Initialisieren Legendretransformation +! auf das LaT/LON Gitter + + PI=ACOS(-1.D0) +!$OMP PARALLEL DO + DO 20 J=1,MAXB + + BREITE(J)=SIN((RLA1-(J-1.D0)*(RLA1-RLA0)/(MAXB-1))* PI/180.D0) + + CALL PLGNFA(MNAUF,BREITE(J),Z(0,J)) + +20 CONTINUE +!$OMP END PARALLEL DO + +! Avoid possible Pole problem +! IF(RLA0 .EQ. -90.0) BREITE(MAXB)=sin(-89.99*PI/180.d0) +! IF(RLA1 .EQ. 90.0) BREITE(1)=sin(89.99*PI/180.d0) + +! Initialisation of fields for FFT and Legendre transformation +! to Gaussian grid and back to phase space + X1=-1.D0 + X2=1.D0 + CALL GAULEG(X1,X2,GBREITE,WEIGHT,NGJ) + +!$OMP PARALLEL DO PRIVATE(M) + DO J=1,NGJ/2 + CALL PLGNFA(MNAUF,GBREITE(J),P(:,J)) + DO M=0,(MNAUF+3)*(MNAUF+4)/2 + PP(J,M)=P(M,J) + ENDDO + ENDDO +!$OMP END PARALLEL DO + + +! MPAR(1)=152 + FILENAME='fort.12' + CALL READSPECTRAL(FILENAME,LNPMN,MNAUF,1,MLEVEL,(/152/),AK,BK) +! goto 111 + CALL SET99(WSAVE,IFAX,mauf) + CALL PHGCUT(LNPMN,PS,WSAVE,IFAX,Z, & + MNAUF,MNAUF,MAUF,MANF,MAXL,MAXB,1) + CALL STATIS(MAXL,MAXB,1,EXP(PS),RMS,MW,SIG) + WRITE(*,'(A12,3F12.4)') 'STATISTICS: ',RMS,MW,SIG + + DO J=1,NGJ/2 + CALL SET99(GWSAVE(1,J),GIFAX(1,J),MLAT(J)) + ENDDO + CALL PHGR213(LNPMN,HILF,GWSAVE,GIFAX,P,MLAT,MNAUF,NGI,NGJ,1) + PSG=HILF + CALL GRPH213(LNPMN2,PSG,GWSAVE,GIFAX,PP,WEIGHT,MLAT, & + MNAUF,NGI,NGJ,1) + CALL PHGR213(LNPMN2,HILF,GWSAVE,GIFAX,P,MLAT,MNAUF,NGI,NGJ,1) + + + HILF=exp(PSG)-exp(HILF) + + CALL STATIS(NGI,1,1,HILF,RMS,MW,SIG) + WRITE(*,'(A12,3F11.4)') 'STATISTICS: ',RMS,MW,SIG + + PSG=EXP(PSG) + HILF=PSG + CALL STATIS(NGI,1,1,HILF,RMS,MW,SIG) + WRITE(*,'(A12,3F11.4)') 'STATISTICS: ',RMS,MW,SIG + + 111 FILENAME='fort.10' + CALL READSPECTRAL(FILENAME, & + XMN,MNAUF,2*MLEVEL,MLEVEL,(/131,132/),AK,BK) +! Transformieren des Windes auf das Gaussgitter + CALL PHGR213(XMN,UGVG,GWSAVE,GIFAX,P,MLAT,MNAUF,NGI,NGJ,2*MLEVEL) + DO K=1,MLEVEL +! North Pole + CALL JSPPOLE(XMN(:,K),1,MNAUF,.TRUE.,CUA(:,:,K)) + CALL JSPPOLE(XMN(:,MLEVEL+K),1,MNAUF,.TRUE.,CVA(:,:,K)) +! South Pole + CALL JSPPOLE(XMN(:,K),-1,MNAUF,.TRUE.,CUA(:,3:4,K)) + CALL JSPPOLE(XMN(:,MLEVEL+K),-1,MNAUF,.TRUE.,CVA(:,3:4,K)) + ENDDO + + DO K=1,2*MLEVEL + IF(MSMOOTH .ne. 0) CALL SPFILTER(XMN(:,K),MNAUF,MSMOOTH) + ENDDO + CALL PHGCUT(XMN,UV,WSAVE,IFAX,Z, & + MNAUF,MNAUF,MAUF,MANF,MAXL,MAXB,2*MLEVEL) + + + 112 FILENAME='fort.13' + CALL READSPECTRAL(FILENAME,XMN,MNAUF,MLEVEL,MLEVEL,(/155/),AK,BK) +! Transformieren der horizontalen Divergenz auf das Gaussgitter + CALL PHGR213(XMN,DG,GWSAVE,GIFAX,P,MLAT,MNAUF,NGI,NGJ,MLEVEL) + + +! Berechnung des Gradienten des Logarithmus des Bodendrucks +! auf dem Gaussgitter + CALL PHGRAD(LNPMN,DPSDL,DPSDM,GWSAVE,GIFAX,P,H,MLAT,MNAUF,NGI,NGJ,1) + +! Berechnung der Vertikalgeschwindigkeit auf dem Gaussgitter + CALL CONTGL(HILF,DPSDL,DPSDM,DG,UGVG(:,1),UGVG(:,MLEVEL+1), & + GBREITE,ETAG,MLAT,AK,BK,NGI,NGJ,MLEVEL) + + + CALL GRPH213(XMN,ETAG,GWSAVE,GIFAX,PP,WEIGHT,MLAT, & + MNAUF,NGI,NGJ,MLEVEL) + DO K=1,MLEVEL + IF(MSMOOTH .ne. 0) CALL SPFILTER(XMN(:,K),MNAUF,MSMOOTH) + ENDDO + CALL PHGCUT(XMN,ETA,WSAVE,IFAX,Z,MNAUF,MNAUF,MAUF,MANF,MAXL,MAXB,MLEVEL) + + CALL GRPH213(XMN,HILF,GWSAVE,GIFAX,PP,WEIGHT,MLAT, MNAUF,NGI,NGJ,1) + + IF(MSMOOTH .ne. 0) CALL SPFILTER(XMN(:,1),MNAUF,MSMOOTH) + CALL PHGCUT(XMN,DPSDT,WSAVE,IFAX,Z,MNAUF,MNAUF,MAUF,MANF,MAXL,MAXB,1) +! GOTO 114 + + CALL STATIS(MAXL,MAXB,1,DPSDT,RMS,MW,SIG) + WRITE(*,'(A12,3F11.4)') 'STATISTICS DPSDT: ',RMS,MW,SIG + + IF(MOMEGADIFF .ne. 0) THEN +! Berechnung von Omega auf dem Gaussgitter + CALL OMEGA(PSG,DPSDL,DPSDM,DG,UGVG(:,1),UGVG(:,MLEVEL+1), & + GBREITE,ETAG,MLAT,AK,BK,NGI ,NGJ,MLEVEL) + + CALL GRPH213(XMN,ETAG,GWSAVE,GIFAX,PP,WEIGHT,MLAT,& + MNAUF,NGI,NGJ,MLEVEL) + DO K=1,MLEVEL + IF(MSMOOTH .ne. 0) CALL SPFILTER(XMN(:,K),MNAUF,MSMOOTH) + ENDDO + CALL PHGCUT(XMN,OM,WSAVE,IFAX,Z,MNAUF,MNAUF,MAUF,MANF,MAXL,MAXB,MLEVEL) + + ENDIF !MOMEGA + + CALL GRPH213(XMN,PSG,GWSAVE,GIFAX,PP,WEIGHT,MLAT,MNAUF,NGI,NGJ,1) + CALL PHGCUT(XMN,PS,WSAVE,IFAX,Z,MNAUF,MNAUF,MAUF,MANF,MAXL,MAXB,1) + + CALL STATIS(MAXL,MAXB,1,PS,RMS,MW,SIG) + WRITE(*,'(A12,3F11.4)') 'STATISTICS: ',RMS,MW,SIG + + 114 DEALLOCATE(HILF,PSG,DPSDL,DPSDM,ETAG,DG,LNPMN) + +! ALLOCATE (UV(MAXL, MAXB, 2*MLEVEL)) +! CALL GRPH213(XMN,UGVG,GWSAVE,GIFAX,PP,WEIGHT,MLAT, +! *MNAUF,NGI,NGJ,2*MLEVEL) +! DO K=1,2*MLEVEL +! IF(MSMOOTH .ne. 0) CALL SPFILTER(XMN(:,K),MNAUF,MSMOOTH) +! ENDDO +! CALL PHGCUT(XMN,UV,WSAVE,IFAX,Z, +! *MNAUF,MNAUF,MAUF,MANF,MAXL,MAXB,2*MLEVEL) + DEALLOCATE(PP,P,UGVG,MLAT,GBREITE,WEIGHT,GWSAVE,XMN) + +! CALL ETAGAUSS(Z,WSAVE +! *,BREITE,UV,ETA,OM,PS, +! *MAUF,MAXB,MAXL,MANF,MNAUF,MLEVEL,MSMOOTH) + + ELSE + +!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! +! READING OF PREPARED METEOROLOGICAL FIELDS ! +! ! +! THE FOLLOWING FIELDS ARE EXPECTED: ! +! ! +! UNIT 11: T,U,V (REGULAR GRID) ! +! UNIT 17: Q (REGULAR GRID) ! +! UNIT 13: D (REGULAR GRID) ! +! UNIT 12: LNSP (SPHERICAL HARMONICS) ! +! UNIT 14: SURFACE DATA (REGULAR GRID) ! +! UNIT 16: FLUX DATA (REGULAR GRID) ! +!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! +! + ALLOCATE (MLAT(MAXB)) + MLAT=MAXL + ALLOCATE (Z(0:((MNAUF+3)*(MNAUF+4))/2,1)) + ALLOCATE (DPSDL(MAXL,MAXB),DPSDM(MAXL,MAXB)) + ALLOCATE (UV(MAXL, MAXB, 2*MLEVEL),DIV(MAXL,MAXB,MLEVEL)) + +!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! +! READING OF SURFACE PRESSURE ! +!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! + + FILENAME='fort.12' + CALL READSPECTRAL(FILENAME,LNPS,MNAUF,1,MLEVEL,(/152/),AK,BK) + +!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! +! READING OF U,V ! +!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! +! +! OPENING OF UNBLOCKED GRIB FILE +! + FILENAME='fort.10' + CALL READLATLON(FILENAME,UV,MAXL,MAXB,2*MLEVEL,(/131,132/)) + + + PI=ACOS(-1.D0) + DO J=1,MAXB + + BREITE(J)=SIN((RLA1-(J-1.D0)*(RLA1-RLA0)/(MAXB-1))*PI/180.D0) + + ENDDO +! Avoid possible Pole problem +! IF(RLA0 .EQ. -90.0) BREITE(MAXB)=sin(-89.99*PI/180.d0) +! IF(RLA1 .EQ. 90.0) BREITE(1)=sin(89.99*PI/180.d0) + + DO K=1,2*MLEVEL + DO J=1,MAXB + COSB=SQRT(1.0-(BREITE(J))*(BREITE(J))) + IF(RLA0 .EQ. -90.0 .AND. J .EQ. MAXB .OR. & + RLA1 .EQ. 90.0 .AND. J .EQ. 1) then + UV(:,J,K)=UV(:,J,K)/1.D6 + else + UV(:,J,K)=UV(:,J,K)*COSB + endif + ENDDO + ENDDO + +!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! +! READING OF LNSP on grid ! +!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! + +! For debugging only +! FILENAME='LNSPG_G.20060330.600' +! INQUIRE(FILE=FILENAME,EXIST=EX) +! CALL READLATLON(FILENAME,QA, +! *MAXL,MAXB,1,1,(/152/)) + +!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! +! READING OF DIVERGENCE ! +!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! + + IF(META .EQ. 0 .OR. METADIFF .EQ. 1) THEN + FILENAME='fort.13' + CALL READLATLON(FILENAME,DIV,MAXL,MAXB,MLEVEL,(/155/)) + ENDIF + + +!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! +! CALCULATION OF ETAPOINT --> TOTAL TIME DERIVATIVE OF ! +! ECMWF VERTICAL COORDINATE ETA MULTIPLIED BY DERIVATIVE ! +! OF PRESSURE IN ETA DIRECTION ! +!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! + +! Initialisieren Legendretransformation +! auf das LaT/LON Gitter +! Without Gaussian grid calculation Legendre Polynomials are calculated +! only for one latitude to save space + + DO J=1,MAXB + + CALL PLGNFA(MNAUF,BREITE(J),Z(0,1)) + + CALL PHGCUT(LNPS,PS(:,J,1),WSAVE,IFAX,Z,MNAUF,MNAUF,MAUF,MANF,MAXL,1,1) + + + IF(META .EQ. 0 .or. METADIFF .EQ. 1 ) THEN + CALL PHGRACUT(LNPS,DPSDL(:,J),DPSDM(:,J),WSAVE,IFAX,Z,H,MAUF, & + MNAUF,MAXL,1,MANF,1) + ENDIF + ENDDO + + PS=EXP(PS) + +! For debugging only + CALL STATIS(MAXL,MAXB,1,PS(:,:,1),RMS,MW,SIG) + WRITE(*,'(A12,3F11.4)') 'STATISTICS: ',RMS,MW,SIG + + + IF(MOMEGADIFF .ne. 0) THEN + + CALL OMEGA(PS,DPSDL,DPSDM,DIV,UV(:,:,1),UV(:,:,MLEVEL+1), & + BREITE,OM,MLAT,AK,BK,MAXL*MAXB,MAXB,MLEVEL) + ENDIF + + IF(META .EQ. 0 .OR. METADIFF .ne. 0) THEN + DPSDT=PS + CALL CONTGL(DPSDT,DPSDL,DPSDM,DIV,UV(:,:,1),UV(:,:,MLEVEL+1), & + BREITE,ETA,MLAT,AK,BK,MAXL*MAXB,MAXB,MLEVEL) + ENDIF + + ENDIF ! MGAUSS + +! CREATE FILE VERTICAL.EC NEEDED BY POP MODEL + + open(21,file='VERTICAL.EC') + write(21,'(a)') + write(21,'(a)') 'VERTICAL DISCRETIZATION OF POP MODEL' + write(21,'(a)') + write(21,'(i3,a)') MLEVEL,' number of layers' + write(21,'(a)') + write(21,'(a)') '* A(NLEV+1)' + write(21,'(a)') + do 205 i=1,MLEVEL+1 +205 write(21,'(f18.12)') AK(I) + write(21,'(a)') + write(21,'(a)') '* B(NLEV+1)' + write(21,'(a)') + do 210 i=1,MLEVEL+1 +210 write(21,'(f18.12)') BK(I) + close(21) + +!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! +! READING OF OMEGA ! +!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! + + IF(MOMEGA .NE. 0 ) THEN + + + + ALLOCATE (OMR(MAXL, MAXB, MLEVEL)) + + FILENAME='fort.19' + CALL READLATLON(FILENAME,OMR,MAXL,MAXB,MLEVEL,(/135/)) + + IF(MOMEGADIFF .NE. 0 ) THEN + + DO K=1,MLEVEL + CALL STATIS(MAXL,MAXB,1,ETA(:,:,K),RMS,MW,SIG) + WRITE(*,'(A12,I3,3F11.4)') ' ETA: ',K,RMS,MW,SIG + CALL STATIS(MAXL,MAXB,1,OMR(:,:,K),RMS,MW,SIG) + WRITE(*,'(A12,I3,3F11.4)') ' OMEGA: ',K,RMS,MW,SIG + CALL STATIS(MAXL,MAXB,1,OM(:,:,K)-OMR(:,:,K),RMS,MW,SIG) + WRITE(*,'(A12,I3,3F11.4)') 'OMEGA DIFF: ',K,RMS,MW,SIG + ENDDO + + ENDIF + ENDIF + +!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! +! READING OF ETA ! +!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! + + IF(META .NE. 0 ) THEN + + ALLOCATE (ETAR(MAXL, MAXB, MLEVEL)) + + P00=101325. + FILENAME='fort.21' + CALL READLATLON(FILENAME,ETAR,MAXL,MAXB,MLEVEL,(/77/)) + + if(MDPDETA .EQ. 1) THEN + DO K=1,MLEVEL + DAK=AK(K+1)-AK(K) + DBK=BK(K+1)-BK(K) + DO J=1,MAXB + DO I=1,MAXL + ETAR(I,J,K)=2*ETAR(I,J,K)*PS(I,J,1)*(DAK/PS(I,J,1)+DBK)/ & + (DAK/P00+DBK) + IF(K .GT. 1) ETAR(I,J,K)=ETAR(I,J,K)-ETAR(I,J,K-1) + ENDDO + ENDDO + ENDDO + ENDIF + + IF(METADIFF .NE. 0 ) THEN + + DO K=1,MLEVEL + CALL STATIS(MAXL,MAXB,1,ETA(:,:,K),RMS,MW,SIG) + WRITE(*,'(A12,I3,3F11.4)') ' ETA: ',K,RMS,MW,SIG + CALL STATIS(MAXL,MAXB,1,ETAR(:,:,K),RMS,MW,SIG) + WRITE(*,'(A12,I3,3F11.4)') ' ETAR: ',K,RMS,MW,SIG + CALL STATIS(MAXL,MAXB,1,ETA(:,:,K)-ETAR(:,:,K),RMS,MW,SIG) + WRITE(*,'(A12,I3,3F11.4)') 'ETA DIFF: ',K,RMS,MW,SIG + ENDDO + DO K=1,MLEVEL + WRITE(*,'(I3,2F11.4)') K,ETA(1,MAXB/2,K),ETAR(1,MAXB/2,K) + ENDDO + ELSE + ETA=ETAR + ENDIF + ENDIF + + ALLOCATE (T(MAXL, MAXB, MLEVEL)) + ALLOCATE (QA(MAXL, MAXB, MLEVEL)) +!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! +! READING OF T ! +!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! +! +! OPENING OF UNBLOCKED GRIB FILE +! + FILENAME='fort.11' + CALL READLATLON(FILENAME,T,MAXL,MAXB,MLEVEL,(/130/)) + +!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! +! READING OF SPECIFIC HUMIDITY ! +!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! + + FILENAME='fort.17' + CALL READLATLON(FILENAME,QA,MAXL,MAXB,MLEVEL,(/133/)) + +!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! +! TEST READING OF UV from MARS (debug only) ! +!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! + +! FILENAME='fort.22' +! CALL READLATLON(FILENAME,UV2,MAXL,MAXB,2*MLEVEL,2,(/131,132/)) + + + +!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! +! WRITE MODEL LEVEL DATA TO fort.15 ! +!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! + +! Calculation of etadot in CONTGL needed scaled winds (ucosphi,vcosphi) +! Now we are transforming back to the usual winds. + DO K=1,MLEVEL + DO J=2,MAXB-1 + COSB=SQRT(1.0-(BREITE(J))*(BREITE(J))) + UV(:,J,K)=UV(:,J,K)/COSB + UV(:,J,MLEVEL+K)=UV(:,J,MLEVEL+K)/COSB + ENDDO +! special treatment for poles, if necessary. + DO J=1,MAXB,MAXB-1 + COSB=SQRT(1.0-(BREITE(J))*(BREITE(J))) + if(1.0-BREITE(J)*BREITE(J) .gt. 0 .OR. MGAUSS .NE. 1) then + IF(RLA0 .EQ. -90.0 .AND. J .EQ. MAXB .OR. & + RLA1 .EQ. 90.0 .AND. J .EQ. 1) then + UV(:,J,K)=UV(:,J,K)*1.D6 + UV(:,J,MLEVEL+K)=UV(:,J,MLEVEL+K)*1.D6 + else + UV(:,J,K)=UV(:,J,K)/COSB + UV(:,J,MLEVEL+K)=UV(:,J,MLEVEL+K)/COSB + endif + else + HILFUV(5:MAXL,:)=0. + HILFUV(1:2,:)=0. + IF(J.EQ.MAXB) THEN +! Suedpol + HILFUV(3:4,1)=CUA(:,4,K) + HILFUV(3:4,2)=CVA(:,4,K) + ELSE +! Nordpol + HILFUV(3:4,1)=CUA(:,2,K) + HILFUV(3:4,2)=CVA(:,2,K) + ENDIF + CALL RFOURTR(HILFUV(:,1),WSAVE,IFAX,MAXL/2-1,MAXL,-1) + DO I=0,MAXL-1 + IF(MANF+I.LE.MAXL) THEN + UV(I+1,J,K)=HILFUV(MANF+I,1) + ELSE + UV(I+1,J,K)=HILFUV(MANF-MAXL+I,1) + ENDIF + ENDDO + CALL RFOURTR(HILFUV(:,2),WSAVE,IFAX,MAXL/2-1,MAXL,-1) + DO I=0,MAXL-1 + IF(MANF+I.LE.MAXL) THEN + UV(I+1,J,MLEVEL+K)=HILFUV(MANF+I,2) + ELSE + UV(I+1,J,MLEVEL+K)=HILFUV(MANF-MAXL+I,2) + ENDIF + ENDDO + endif + ENDDO + ENDDO + +! open output file + call grib_open_file(LUNIT,'fort.15','w') + +! we use temperature on lat/lon on model levels as template for model level data + LUNIT2=0 + call grib_open_file(LUNIT2,'fort.11','r') + call grib_new_from_file(LUNIT2,igrib(1), iret) + call grib_close_file(LUNIT2) + + + CALL WRITELATLON(LUNIT,igrib(1),ogrib,UV(:,:,1),MAXL,MAXB,MLEVEL,MLEVELIST,1,(/131/)) + + CALL WRITELATLON(LUNIT,igrib(1),ogrib,UV(:,:,MLEVEL+1),MAXL,MAXB,MLEVEL,MLEVELIST,1,(/132/)) + + IF(MDPDETA .ne. 1 .AND. MGAUSS .EQ. 0 .and. META .eq. 1) THEN + CALL WRITELATLON(LUNIT,igrib(1),ogrib,ETA,MAXL,MAXB,MLEVEL,MLEVELIST,1,(/77/)) + ELSE + CALL WRITELATLON(LUNIT,igrib(1),ogrib,ETA,MAXL,MAXB,MLEVEL,MLEVELIST,1,(/METAPAR/)) + ENDIF + + CALL WRITELATLON(LUNIT,igrib(1),ogrib,T,MAXL,MAXB,MLEVEL,MLEVELIST,1,(/130/)) + + CALL WRITELATLON(LUNIT,igrib(1),ogrib,PS,MAXL,MAXB,1,'1',1,(/134/)) + + call grib_set(igrib(1),"levelType","ml") + call grib_set(igrib(1),"typeOfLevel","hybrid") + CALL WRITELATLON(LUNIT,igrib(1),ogrib,QA,MAXL,MAXB,MLEVEL,MLEVELIST,1,(/133/)) + + + IF(MOMEGA .EQ. 1) THEN + call grib_open_file(LUNIT2,'fort.25','w') + CALL WRITELATLON(LUNIT2,igrib(1),ogrib,OMR,MAXL,MAXB,MLEVEL,MLEVELIST,1,(/135/)) + + IF(MOMEGADIFF .EQ. 1) THEN + + CALL WRITELATLON(LUNIT2,igrib(1),ogrib,DPSDT,MAXL,MAXB,1,'1',1,(/158/)) + + OM=OM-OMR + CALL WRITELATLON(LUNIT2,igrib(1),ogrib,OM,MAXL,MAXB,MLEVEL,MLEVELIST,1,(/001/)) + call grib_close_file(LUNIT2) + ENDIF + ENDIF + + IF(META .EQ. 1 .and. METADIFF .EQ. 1) THEN + call grib_open_file(LUNIT2,'fort.26','w') + CALL WRITELATLON(LUNIT2,igrib(1),ogrib,ETAR,MAXL,MAXB,MLEVEL,MLEVELIST,1,(/135/)) + +! IF(MOMEGADIFF .EQ. 1) THEN + + CALL WRITELATLON(LUNIT2,igrib(1),ogrib,DPSDT,MAXL,MAXB,1,'1',1,(/158/)) + + OM=ETA-ETAR + CALL WRITELATLON(LUNIT2,igrib(1),ogrib,OM,MAXL,MAXB,MLEVEL,MLEVELIST,1,(/001/)) + call grib_close_file(LUNIT2) +! ENDIF + ENDIF + + + call grib_close_file(LUNIT) + + + + 2000 STOP 'SUCCESSFULLY FINISHED CONVERT_PRE: CONGRATULATIONS' + 3000 STOP 'ROUTINE CONVERT_PRE: ERROR' + 9999 stop 'ROUTINE CONVERT_PRE: ERROR' + END + + + +!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! + INTEGER FUNCTION IA (FIELD1,NI,NJ,NK,G) + + IMPLICIT NONE + INTEGER NI,NJ,NK,I,J,K + REAL FIELD1(NI,NJ,NK) + REAL G + REAL RMIN,RMAX,XMAX,A,A1,A2 + + RMAX=FIELD1(1,1,1) + RMIN=FIELD1(1,1,1) + + DO 100 K=1,NK + DO 100 J=1,NJ + DO 100 I=1,NI + IF (FIELD1(I,J,K).GT.RMAX)RMAX=FIELD1(I,J,K) + IF (FIELD1(I,J,K).LT.RMIN)RMIN=FIELD1(I,J,K) +100 CONTINUE + + IF (ABS(RMIN).GT.RMAX.OR.ABS(RMIN).EQ.RMAX) THEN + XMAX=ABS(RMIN) + ELSE + XMAX=RMAX + ENDIF + + IF (XMAX.EQ.0) THEN + IA = 0 + RETURN + ENDIF + + A1=LOG10 ((G/10.d0)/XMAX) + A2=LOG10 ( G/XMAX ) + IF(A1 .gt. A2) THEN + A=A2 + ELSE + A=A1 + ENDIF + + IF (A.GT.0) IA=INT(A) + IF (A.LT.0) IA=INT(A-1.0) + + RETURN + END + + SUBROUTINE STATIS (NI,NJ,NK,PHI,RMS,MW,SIG) + IMPLICIT REAL (A-H,O-Z) + + REAL PHI(NI,NJ,NK),SIG,MW,RMS,P + + N=NI*NJ*NK + + RMS=0. + MW=0. + + DO 10 I=1,NI + DO 10 J=1,NJ + DO 10 K=1,NK + P=PHI(I,J,K) + RMS=RMS+P*P + MW=MW+P +10 CONTINUE + + RMS=SQRT(RMS/N) + MW=MW/N + + IF(RMS*RMS-MW*MW.LT.0.) THEN + SIG=0.0 + ELSE + SIG=SQRT(RMS*RMS-MW*MW) + ENDIF + + RETURN + END + diff --git a/python/pythontest/TestInstallTar/test_untar/src/rwGRIB2.f90 b/python/pythontest/TestInstallTar/test_untar/src/rwGRIB2.f90 new file mode 100644 index 0000000000000000000000000000000000000000..09ec94e305507d30e37c43009d306ffd343cbe05 --- /dev/null +++ b/python/pythontest/TestInstallTar/test_untar/src/rwGRIB2.f90 @@ -0,0 +1,263 @@ + MODULE RWGRIB2 + + CONTAINS + + SUBROUTINE READLATLON(FILENAME,FELD,MAXL,MAXB,MLEVEL,MPAR) + + USE GRIB_API + + IMPLICIT NONE + + integer :: ifile + integer :: iret + integer :: n,mk,parid,nm + integer :: i,k + integer,dimension(:),allocatable :: igrib + integer :: numberOfPointsAlongAParallel + integer :: numberOfPointsAlongAMeridian + real, dimension(:), allocatable :: values + integer :: numberOfValues + real,dimension(maxl,maxb,mlevel) :: feld + integer:: maxl,maxb,mlevel,mstride,mpar(:),irest,div,level + integer :: l(size(mpar)) + character*(*):: filename + + call grib_open_file(ifile, TRIM(FILENAME),'r') + + ! count the messages in the file + call grib_count_in_file(ifile,n) + allocate(igrib(n)) + igrib=-1 + + ! Load the messages from the file. + DO i=1,n + call grib_new_from_file(ifile,igrib(i), iret) + END DO + + ! we can close the file + call grib_close_file(ifile) + + nm=size(mpar) + div=mlevel/nm + l=0 + + ! Loop on all the messages in memory + iloop: DO i=1,n +! write(*,*) 'processing message number ',i + ! get as a integer + call grib_get(igrib(i),'numberOfPointsAlongAParallel', & + numberOfPointsAlongAParallel) + + ! get as a integer + call grib_get(igrib(i),'numberOfPointsAlongAMeridian', & + numberOfPointsAlongAMeridian) + + call grib_get(igrib(i),'numberOfVerticalCoordinateValues',mk) + + call grib_get_size(igrib(i),'values',numberOfValues) +! write(*,*) 'numberOfValues=',numberOfValues + + allocate(values(numberOfValues), stat=iret) + ! get data values + call grib_get(igrib(i),'values',values) + + call grib_get(igrib(i),'paramId',parid) + call grib_get(igrib(i),'level',level) + + kloop: do k=1,nm + if(parid .eq. mpar(k)) then +! l(k)=l(k)+1 + feld(:,:,(k-1)*div+level)=reshape(values,(/maxl,maxb/)) +! print*,(k-1)*div+l(k),parid + exit kloop + endif + enddo kloop + if(k .gt. nm .and. parid .ne. mpar(nm)) then + write(*,*) k,nm,parid,mpar(nm) + write(*,*) 'ERROR readlatlon: parameter ',parid,'is not',mpar + stop + endif + +! print*,i + END DO iloop + write(*,*) 'readlatlon: ',i-1,' records read' + + DO i=1,n + call grib_release(igrib(i)) + END DO + + deallocate(values) + deallocate(igrib) + + END SUBROUTINE READLATLON + + SUBROUTINE WRITELATLON(iunit,igrib,ogrib,FELD,MAXL,MAXB,MLEVEL,& + MLEVELIST,MSTRIDE,MPAR) + + USE GRIB_API + + IMPLICIT NONE + + INTEGER IFIELD,MLEVEL,MNAUF,I,J,K,L,MSTRIDE,IERR,JOUT + INTEGER MPAR(MSTRIDE),MAXL,MAXB,LEVMIN,LEVMAX + INTEGER IUNIT,igrib,ogrib + REAL ZSEC4(MAXL*MAXB) + REAL FELD(MAXL,MAXB,MLEVEL) + CHARACTER*(*) MLEVELIST + INTEGER ILEVEL(MLEVEL),MLINDEX(MLEVEL+1),LLEN + + ! parse MLEVELIST + + LLEN=len(trim(MLEVELIST)) + if(index(MLEVELIST,'to') .ne. 0 .or. index(MLEVELIST,'TO') .ne. 0) THEN + i=index(MLEVELIST,'/') + read(MLEVELIST(1:i-1),*) LEVMIN + i=index(MLEVELIST,'/',.true.) + read(MLEVELIST(i+1:LLEN),*) LEVMAX + l=0 + do i=LEVMIN,LEVMAX + l=l+1 + ILEVEL(l)=i + enddo + else + l=1 + MLINDEX(1)=0 + do i=1,LLEN + if(MLEVELIST(i:i) .eq. '/') THEN + l=l+1 + MLINDEX(l)=i + endif + enddo + MLINDEX(l+1)=LLEN+1 + do i=1,l + read(MLEVELIST(MLINDEX(i)+1:MLINDEX(i+1)-1),*) ILEVEL(i) + enddo + endif + + DO k=1,l + call grib_set(igrib,"level",ILEVEL(k)) + DO j=1,MSTRIDE + call grib_set(igrib,"paramId",MPAR(j)) +! if(MPAR(j) .eq. 87) then +! call grib_set(igrib,"shortName","etadot") +! call grib_set(igrib,"units","Pa,s**-1") +! endif +! if(MPAR(j) .eq. 77) then +! call grib_set(igrib,"shortName","etadot") +! call grib_set(igrib,"units","s**-1") +! endif + if(l .ne. mlevel) then + zsec4(1:maxl*maxb)=RESHAPE(FELD(:,:,ILEVEL(k)),(/maxl*maxb/)) + else + zsec4(1:maxl*maxb)=RESHAPE(FELD(:,:,k),(/maxl*maxb/)) + endif + call grib_set(igrib,"values",zsec4) + + call grib_write(igrib,iunit) + + ENDDO + ENDDO + + + + END SUBROUTINE WRITELATLON + + SUBROUTINE READSPECTRAL(FILENAME,CXMN,MNAUF,MLEVEL,& + MAXLEV,MPAR,A,B) + + USE GRIB_API + + IMPLICIT NONE + + + integer :: ifile + integer :: iret + integer :: n,mk,div,nm,k + integer :: i,j,parid + integer,dimension(:),allocatable :: igrib + real, dimension(:), allocatable :: values + integer :: numberOfValues,maxlev + REAL :: A(MAXLEV+1),B(MAXLEV+1),pv(2*MAXLEV+2) + REAL:: CXMN(0:(MNAUF+1)*(MNAUF+2)-1,MLEVEL) +integer:: maxl,maxb,mlevel,mstride,mpar(:),mnauf,ioffset,ipar,ilev,l(size(mpar)) +character*(*):: filename + + call grib_open_file(ifile, TRIM(FILENAME),'r') + + ! count the messages in the file + call grib_count_in_file(ifile,n) + allocate(igrib(n)) + igrib=-1 + + ! Load the messages from the file. + DO i=1,n + call grib_new_from_file(ifile,igrib(i), iret) + END DO + + ! we can close the file + call grib_close_file(ifile) + + l=0 + ! Loop on all the messages in memory + iloop: DO i=1,n + ! write(*,*) 'processing message number ',i + ! get as a integer + call grib_get(igrib(i),'pentagonalResolutionParameterJ', j) + + call grib_get_size(igrib(i),'values',numberOfValues) + ! write(*,*) 'numberOfValues=',numberOfValues + + call grib_get(igrib(i),'numberOfVerticalCoordinateValues',mk) + + call grib_get(igrib(i),'level',ilev) + + + + call grib_get(igrib(i),'pv',pv) + + allocate(values(numberOfValues), stat=iret) + ! get data values + call grib_get(igrib(i),'values',values) + +! IOFFSET=mod(i-1,MSTRIDE)*(mk/2-1) +! CXMN(:,IOFFSET+ilev)=values(1:(MNAUF+1)*(MNAUF+2)) + + call grib_get(igrib(i),'paramId',parid) + nm=size(mpar) + div=mlevel/nm + kloop: do k=1,nm + if(parid .eq. mpar(k)) then + l(k)=l(k)+1 + cxmn(:,(k-1)*div+l(k))=values(1:(MNAUF+1)*(MNAUF+2)) +! print*,(k-1)*div+l(k),parid + exit kloop + endif + + enddo kloop + if(k .gt. nm .and. parid .ne. mpar(nm)) then + write(*,*) k,nm,parid,mpar(nm) + write(*,*) 'ERROR readspectral: parameter ',parid,'is not',mpar + stop + endif + +! print*,i + + END DO iloop + + write(*,*) 'readspectral: ',i-1,' records read' + + DO i=1,n + call grib_release(igrib(i)) + END DO + + deallocate(values) + deallocate(igrib) + + + + A=pv(1:1+MAXLEV) + B=pv(2+MAXLEV:2*MAXLEV+2) + + END SUBROUTINE READSPECTRAL + + END MODULE RWGRIB2 diff --git a/python/pythontest/testecmwfapi.py b/python/pythontest/testecmwfapi.py new file mode 100644 index 0000000000000000000000000000000000000000..90e49ee2e76f81a61dbdc25c1983c5d942b0f58b --- /dev/null +++ b/python/pythontest/testecmwfapi.py @@ -0,0 +1,16 @@ +#!/usr/bin/env python +from ecmwfapi import ECMWFDataServer + +server = ECMWFDataServer() + +server.retrieve({ + 'dataset' : "interim", + 'time' : "00", + 'date' : "2013-09-01/to/2013-09-30", + 'step' : "0", + 'type' : "an", + 'levtype' : "sfc", + 'param' : "165.128/41.128", + 'grid' : "0.75/0.75", + 'target' : "interim201309.grib" +}) diff --git a/python/submit.py b/python/submit.py index a041dedfa1e9fd99697c6ebc9e0f2e57be11ff3e..2cd757ef6cb4dfde0ce0b73305181e8c5082f429 100755 --- a/python/submit.py +++ b/python/submit.py @@ -15,6 +15,9 @@ # - applied PEP8 style guide # - added documentation # - minor changes in programming style (for consistence) +# - changed path names to variables from config file +# - added option for writing mars requests to extra file +# additionally,as option without submitting the mars jobs # # @License: # (C) Copyright 2014-2018. @@ -72,38 +75,37 @@ def main(): <nothing> ''' - called_from_dir = os.getcwd() - args = get_cmdline_arguments() try: c = ControlFile(args.controlfile) except IOError: - try: - c = ControlFile(_config.PATH_LOCAL_PYTHON + args.controlfile) - except IOError: - print 'Could not read CONTROL file "' + args.controlfile + '"' - print 'Either it does not exist or its syntax is wrong.' - print 'Try "' + sys.argv[0].split('/')[-1] + \ - ' -h" to print usage information' - sys.exit(1) - - env_parameter = read_ecenv(c.ecmwfdatadir + 'python/ECMWF_ENV') + print('Could not read CONTROL file "' + args.controlfile + '"') + print('Either it does not exist or its syntax is wrong.') + print('Try "' + sys.argv[0].split('/')[-1] + \ + ' -h" to print usage information') + sys.exit(1) + + env_parameter = read_ecenv(_config.PATH_ECMWF_ENV) c.assign_args_to_control(args) c.assign_envs_to_control(env_parameter) - c.check_conditions() + c.check_conditions(args.queue) # on local side - # on ECMWF server this would be the local side + # on ECMWF server this would also be the local side + called_from_dir = os.getcwd() if args.queue is None: if c.inputdir[0] != '/': c.inputdir = os.path.join(called_from_dir, c.inputdir) if c.outputdir[0] != '/': c.outputdir = os.path.join(called_from_dir, c.outputdir) get_mars_data(c) - prepare_flexpart(args.ppid, c) - normal_exit(c.mailfail, 'Done!') - # on ECMWF server + if c.request == 0 or c.request == 2: + prepare_flexpart(args.ppid, c) + normal_exit(c.mailfail, 'FLEX_EXTRACT IS DONE!') + else: + normal_exit(c.mailfail, 'PRINTING MARS_REQUESTS DONE!') + # send files to ECMWF server and install there else: submit(args.job_template, c, args.queue) @@ -116,7 +118,8 @@ def submit(jtemplate, c, queue): @Input: jtemplate: string - Job template file for submission to ECMWF. It contains all necessary + Job template file from sub-directory "_templates" for + submission to ECMWF. It contains all necessary module and variable settings for the ECMWF environment as well as the job call and mail report instructions. Default is "job.temp". @@ -142,17 +145,18 @@ def submit(jtemplate, c, queue): ''' # read template file and get index for CONTROL input - with open(jtemplate) as f: + with open(os.path.join(_config.PATH_TEMPLATES, jtemplate)) as f: lftext = f.read().split('\n') insert_point = lftext.index('EOF') if not c.basetime: # --------- create on demand job script ------------------------------------ if c.maxstep > 24: - print '---- Pure forecast mode! ----' + print('---- Pure forecast mode! ----') else: - print '---- On-demand mode! ----' - job_file = jtemplate[:-4] + 'ksh' + print('---- On-demand mode! ----') + job_file = os.path.join(_config.PATH_JOBSCRIPTS, + jtemplate[:-4] + 'ksh') clist = c.to_list() lftextondemand = lftext[:insert_point] + clist + lftext[insert_point:] @@ -160,13 +164,13 @@ def submit(jtemplate, c, queue): with open(job_file, 'w') as f: f.write('\n'.join(lftextondemand)) - result_code = submit_job_to_ecserver(queue, job_file) + submit_job_to_ecserver(queue, job_file) else: # --------- create operational job script ---------------------------------- - print '---- Operational mode! ----' - job_file = jtemplate[:-5] + 'oper.ksh' - #colist = [] + print('---- Operational mode! ----') + job_file = os.path.join(_config.PATH_JOBSCRIPTS, + jtemplate[:-5] + 'oper.ksh') if c.maxstep: mt = int(c.maxstep) @@ -186,10 +190,10 @@ def submit(jtemplate, c, queue): with open(job_file, 'w') as f: f.write('\n'.join(lftextoper)) - result_code = submit_job_to_ecserver(queue, job_file) + submit_job_to_ecserver(queue, job_file) # -------------------------------------------------------------------------- - print 'You should get an email with subject flex.hostname.pid' + print('You should get an email with subject flex.hostname.pid') return diff --git a/python/tools.py b/python/tools.py index a432e64d2ec487286a52be76bf69f3aaa3c723d0..b2cd1dc33dfcadd2dbf20a908998b87d12a2472a 100644 --- a/python/tools.py +++ b/python/tools.py @@ -123,6 +123,9 @@ def get_cmdline_arguments(): help="file with CONTROL parameters") parser.add_argument("--debug", dest="debug", default=None, help="debug mode - leave temporary files intact") + parser.add_argument("--request", dest="request", default=None, + help="list all mars request in file mars_requests.dat \ + and skip submission to mars") args = parser.parse_args() @@ -136,13 +139,15 @@ def read_ecenv(filename): @Input: filename: string - Name of file where the ECMWV environment parameters are stored. + Path to file where the ECMWV environment parameters are stored. @Return: envs: dict + Contains the environment parameter ecuid, ecgid, gateway + and destination for ECMWF server environments. ''' envs= {} - print filename + with open(filename, 'r') as f: for line in f: data = line.strip().split() @@ -174,7 +179,7 @@ def clean_up(c): <nothing> ''' - print "clean_up" + print("clean_up") cleanlist = glob.glob(c.inputdir + "/*") for clist in cleanlist: @@ -183,7 +188,7 @@ def clean_up(c): if c.ecapi is False and (c.ectrans == '1' or c.ecstorage == '1'): silent_remove(clist) - print "Done" + print("Done") return @@ -207,7 +212,7 @@ def my_error(users, message='ERROR'): <nothing> ''' - print message + print(message) # comment if user does not want email notification directly from python for user in users: @@ -223,11 +228,11 @@ def my_error(users, message='ERROR'): trace = '\n'.join(traceback.format_stack()) pout = p.communicate(input=message + '\n\n' + trace)[0] except ValueError as e: - print 'ERROR: ', e + print('ERROR: ', e) sys.exit('Email could not be sent!') else: - print 'Email sent to ' + os.path.expandvars(user) + ' ' + \ - pout.decode() + print('Email sent to ' + os.path.expandvars(user) + ' ' + + pout.decode()) sys.exit(1) @@ -252,7 +257,7 @@ def normal_exit(users, message='Done!'): <nothing> ''' - print message + print(message) # comment if user does not want notification directly from python for user in users: @@ -267,11 +272,11 @@ def normal_exit(users, message='Done!'): bufsize=1) pout = p.communicate(input=message+'\n\n')[0] except ValueError as e: - print 'ERROR: ', e - print 'Email could not be sent!' + print('ERROR: ', e) + print('Email could not be sent!') else: - print 'Email sent to ' + os.path.expandvars(user) + ' ' + \ - pout.decode() + print('Email sent to ' + os.path.expandvars(user) + ' ' + + pout.decode()) return @@ -391,7 +396,7 @@ def to_param_id(pars, table): ipar.append(int(k)) break else: - print 'Warning: par ' + par + ' not found in table 128' + print('Warning: par ' + par + ' not found in table 128') return ipar @@ -437,7 +442,7 @@ def make_dir(directory): # errno.EEXIST = directory already exists raise # re-raise exception if a different error occured else: - print 'WARNING: Directory {0} already exists!'.format(directory) + print('WARNING: Directory {0} already exists!'.format(directory)) return @@ -481,11 +486,10 @@ def put_file_to_ecserver(ecd, filename, target, ecuid, ecgid): '/' + filename], stderr=subprocess.STDOUT) except subprocess.CalledProcessError as e: - print '... ERROR CODE:\n ... ', e.returncode - print '... ERROR MESSAGE:\n ... ', e - print '... COMMAND MESSAGE:\n ...', e.output + print('... ERROR CODE:\n ... ' + str(e.returncode)) + print('... ERROR MESSAGE:\n ... ' + str(e)) - print '\nDo you have a valid eccert key?' + print('\n... Do you have a valid ecaccess certification key?') sys.exit('... ECACCESS-FILE-PUT FAILED!') return rcode @@ -519,11 +523,11 @@ def submit_job_to_ecserver(target, jobname): '-queueName', target, jobname]) except subprocess.CalledProcessError as e: - print '... ERROR CODE: ', e.returncode - print '... ERROR MESSAGE:\n ... ', e - print '... COMMAND MESSAGE:\n ...', e.output + print('... ERROR CODE:\n ... ' + str(e.returncode)) + print('... ERROR MESSAGE:\n ... ' + str(e)) + - print '\nDo you have a valid eccert key?' + print('\n... Do you have a valid ecaccess certification key?') sys.exit('... ECACCESS-JOB-SUBMIT FAILED!') return rcode diff --git a/python/CONTROL.temp b/run/control/CONTROL.temp similarity index 100% rename from python/CONTROL.temp rename to run/control/CONTROL.temp diff --git a/run/control/CONTROL.temp.backup b/run/control/CONTROL.temp.backup new file mode 100644 index 0000000000000000000000000000000000000000..2ee99922380e222e1239f1f09ce7dcdd17ed2fb8 --- /dev/null +++ b/run/control/CONTROL.temp.backup @@ -0,0 +1,37 @@ +DAY1 +DAY2 +DTIME 3 +TYPE AN FC FC FC FC FC AN FC FC FC FC FC AN FC FC FC FC FC AN FC FC FC FC FC +TIME 00 00 00 00 00 00 06 00 00 00 00 00 12 12 12 12 12 12 18 12 12 12 12 12 +STEP 00 01 02 03 04 05 00 07 08 09 10 11 00 01 02 03 04 05 00 07 08 09 10 11 +CLASS EI +STREAM OPER +NUMBER OFF +EXPVER 1 +GRID 5000 +LEFT -175000 +LOWER -90000 +UPPER 90000 +RIGHT 180000 +LEVEL 60 +LEVELIST 1/to/60 +RESOL 63 +GAUSS 1 +ACCURACY 16 +OMEGA 0 +OMEGADIFF 0 +ETA 0 +ETADIFF 0 +DPDETA 1 +SMOOTH 0 +FORMAT GRIB1 +ADDPAR 186/187/188/235/139/39 +PREFIX EN +ECSTORAGE 1 +ECTRANS 0 +ECFSDIR ectmp:/${USER}/econdemand/ +MAILFAIL ${USER} +MAILOPS ${USER} +GRIB2FLEXPART 0 +EOF + diff --git a/run/control/CONTROL.test b/run/control/CONTROL.test new file mode 100644 index 0000000000000000000000000000000000000000..b693ee4d1364bc38deb58739b3c8844f4ee5490c --- /dev/null +++ b/run/control/CONTROL.test @@ -0,0 +1,18 @@ +DTIME 3 +TYPE AN FC FC FC FC FC AN FC FC FC FC FC AN FC FC FC FC FC AN FC FC FC FC FC +TIME 00 00 00 00 00 00 06 00 00 00 00 00 12 12 12 12 12 12 18 12 12 12 12 12 +STEP 00 01 02 03 04 05 00 07 08 09 10 11 00 01 02 03 04 05 00 07 08 09 10 11 +CLASS EI +STREAM OPER +EXPVER 1 +GRID 5000 +LEFT -10000 +LOWER 30000 +UPPER 40000 +RIGHT 10000 +LEVELIST 59/to/60 +RESOL 63 +GAUSS 1 +ADDPAR 186/187/188/235/139/39 +PREFIX EItest_ +ECTRANS 1 diff --git a/python/CONTROL.worktest b/run/control/CONTROL.worktest similarity index 100% rename from python/CONTROL.worktest rename to run/control/CONTROL.worktest diff --git a/python/CONTROL_CV b/run/control/CONTROL_CV similarity index 100% rename from python/CONTROL_CV rename to run/control/CONTROL_CV diff --git a/python/CONTROL_CV_PREOP b/run/control/CONTROL_CV_PREOP similarity index 100% rename from python/CONTROL_CV_PREOP rename to run/control/CONTROL_CV_PREOP diff --git a/python/CONTROL_DELIA_HR b/run/control/CONTROL_DELIA_HR similarity index 100% rename from python/CONTROL_DELIA_HR rename to run/control/CONTROL_DELIA_HR diff --git a/python/CONTROL_DELIA_LR b/run/control/CONTROL_DELIA_LR similarity index 100% rename from python/CONTROL_DELIA_LR rename to run/control/CONTROL_DELIA_LR diff --git a/python/CONTROL_EI b/run/control/CONTROL_EI similarity index 100% rename from python/CONTROL_EI rename to run/control/CONTROL_EI diff --git a/python/CONTROL_ERA_HAIYAN b/run/control/CONTROL_ERA_HAIYAN similarity index 100% rename from python/CONTROL_ERA_HAIYAN rename to run/control/CONTROL_ERA_HAIYAN diff --git a/python/CONTROL_ERA__GLOBALETA b/run/control/CONTROL_ERA__GLOBALETA similarity index 100% rename from python/CONTROL_ERA__GLOBALETA rename to run/control/CONTROL_ERA__GLOBALETA diff --git a/python/CONTROL_ERA__HIRES b/run/control/CONTROL_ERA__HIRES similarity index 100% rename from python/CONTROL_ERA__HIRES rename to run/control/CONTROL_ERA__HIRES diff --git a/python/CONTROL_FC b/run/control/CONTROL_FC similarity index 100% rename from python/CONTROL_FC rename to run/control/CONTROL_FC diff --git a/python/CONTROL_FC12 b/run/control/CONTROL_FC12 similarity index 100% rename from python/CONTROL_FC12 rename to run/control/CONTROL_FC12 diff --git a/python/CONTROL_HIRES b/run/control/CONTROL_HIRES similarity index 100% rename from python/CONTROL_HIRES rename to run/control/CONTROL_HIRES diff --git a/python/CONTROL_HIRESGAUSS b/run/control/CONTROL_HIRESGAUSS similarity index 100% rename from python/CONTROL_HIRESGAUSS rename to run/control/CONTROL_HIRESGAUSS diff --git a/python/CONTROL_OD b/run/control/CONTROL_OD similarity index 100% rename from python/CONTROL_OD rename to run/control/CONTROL_OD diff --git a/python/CONTROL_OPS_V4.temp b/run/control/CONTROL_OPS_V4.temp similarity index 100% rename from python/CONTROL_OPS_V4.temp rename to run/control/CONTROL_OPS_V4.temp diff --git a/python/CONTROL_OPS_V6.0 b/run/control/CONTROL_OPS_V6.0 similarity index 100% rename from python/CONTROL_OPS_V6.0 rename to run/control/CONTROL_OPS_V6.0 diff --git a/python/CONTROL_OPS_V6.0_4V.temp b/run/control/CONTROL_OPS_V6.0_4V.temp similarity index 100% rename from python/CONTROL_OPS_V6.0_4V.temp rename to run/control/CONTROL_OPS_V6.0_4V.temp diff --git a/python/CONTROL_PF.temp b/run/control/CONTROL_PF.temp similarity index 100% rename from python/CONTROL_PF.temp rename to run/control/CONTROL_PF.temp diff --git a/python/compilejob.ksh b/run/jobscripts/compilejob.ksh similarity index 100% rename from python/compilejob.ksh rename to run/jobscripts/compilejob.ksh diff --git a/python/job.ksh b/run/jobscripts/job.ksh similarity index 93% rename from python/job.ksh rename to run/jobscripts/job.ksh index ae3bec5d9abcc16bde9ed3d42bb355242bdb4599..6373f11b6eb6424da55426a44f88987b531b477a 100644 --- a/python/job.ksh +++ b/run/jobscripts/job.ksh @@ -58,7 +58,7 @@ basetime None controlfile CONTROL.test cwc 0 date_chunk 3 -debug 0 +debug 1 destination annep@genericSftp dpdeta 1 dtime 3 @@ -67,7 +67,7 @@ ecgid at ecstorage 0 ectrans 1 ecuid km4a -end_date 20100606 +end_date 20160606 eta 0 etadiff 0 etapar 77 @@ -83,10 +83,11 @@ job_template job.temp left -10000 level 60 levelist 59/to/60 +logicals gauss omega omegadiff eta etadiff dpdeta cwc wrf grib2flexpart ecstorage ectrans debug request lower 30000 mailfail ${USER} mailops ${USER} -makefile None +makefile Makefile.gfortran marsclass EI maxstep 11 number OFF @@ -95,10 +96,11 @@ omegadiff 0 outputdir ../work prefix EItest_ queue ecgate +request 0 resol 63 right 10000 smooth 0 -start_date 20100606 +start_date 20160606 step 00 01 02 03 04 05 00 07 08 09 10 11 00 01 02 03 04 05 00 07 08 09 10 11 stream OPER time 00 00 00 00 00 00 06 00 00 00 00 00 12 12 12 12 12 12 18 12 12 12 12 12 diff --git a/python/job.temp.o b/run/jobscripts/job.temp similarity index 90% rename from python/job.temp.o rename to run/jobscripts/job.temp index fbdbd81aa1ea7100a84ac0858ed29804526e98a5..e8ce37658f0f12745bee16445498904c464fe929 100644 --- a/python/job.temp.o +++ b/run/jobscripts/job.temp @@ -4,7 +4,7 @@ # start with ecaccess-job-submit -queueName ecgb NAME_OF_THIS_FILE on gateway server # start with sbatch NAME_OF_THIS_FILE directly on machine -#SBATCH --workdir=/scratch/ms/spatlh00/lh0 +#SBATCH --workdir=/scratch/ms/at/km4a #SBATCH --qos=normal #SBATCH --job-name=flex_ecmwf #SBATCH --output=flex_ecmwf.%j.out @@ -32,7 +32,7 @@ case $HOST in module unload emos module load grib_api/1.14.5 module load emos/437-r64 - export PATH=${PATH}: + export PATH=${PATH}:${HOME}/flex_extract_v7.1/python ;; *cca*) module switch PrgEnv-cray PrgEnv-intel @@ -40,7 +40,7 @@ case $HOST in module load emos module load python export SCRATCH=$TMPDIR - export PATH=${PATH}: + export PATH=${PATH}:${HOME}/flex_extract_v7.1/python ;; esac @@ -73,3 +73,4 @@ else l=$(($l+1)) done fi + diff --git a/run/mars_requests.dat b/run/mars_requests.dat new file mode 100644 index 0000000000000000000000000000000000000000..12a9af3ce77403ee204420f9242df8cc203ad06b --- /dev/null +++ b/run/mars_requests.dat @@ -0,0 +1,252 @@ +mars +gaussian: +stream: OPER +levelist: 1 +area: 40.0/-10.0/30.0/10.0 +marsclass: EI +levtype: SFC +number: OFF +expver: 1 +repres: +target: /raid60/nas/tmc/Anne/Interpolation/flexextract/flex_extract_v7.1/python/../work/FCOG_acc_SL.20170808.41511.90849.grb +step: 3/to/12/by/3 +grid: 5.0/5.0 +param: LSP/CP/SSHF/EWSS/NSSS/SSR +time: 00/12 +date: 20170808/to/20170810 +resol: 63 +type: FC +accuracy: 24 + + +mars +gaussian: +stream: OPER +levelist: 59/to/60 +area: 40.0/-10.0/30.0/10.0 +marsclass: EI +levtype: ML +number: OFF +expver: 1 +repres: +target: /raid60/nas/tmc/Anne/Interpolation/flexextract/flex_extract_v7.1/python/../work/FCOG__ML.20170809.41511.90849.grb +step: 03/09 +grid: 5.0/5.0 +param: T/Q +time: 00/12 +date: 20170809/to/20170809 +resol: 63 +type: FC +accuracy: 24 + + +mars +gaussian: +stream: OPER +levelist: 1 +area: 40.0/-10.0/30.0/10.0 +marsclass: EI +levtype: SFC +number: OFF +expver: 1 +repres: +target: /raid60/nas/tmc/Anne/Interpolation/flexextract/flex_extract_v7.1/python/../work/OG_OROLSM__SL.20170809.41511.90849.grb +step: 000 +grid: 5.0/5.0 +param: 160/27/28/173 +time: 00 +date: 20170809 +resol: 63 +type: AN +accuracy: 24 + + +mars +gaussian: reduced +stream: OPER +levelist: 1 +area: +marsclass: EI +levtype: ML +number: OFF +expver: 1 +repres: +target: /raid60/nas/tmc/Anne/Interpolation/flexextract/flex_extract_v7.1/python/../work/FCGG__SL.20170809.41511.90849.grb +step: 03/09 +grid: 32 +param: Q +time: 00/12 +date: 20170809/to/20170809 +resol: 63 +type: FC +accuracy: 24 + + +mars +gaussian: +stream: OPER +levelist: 1/to/60 +area: 40.0/-10.0/30.0/10.0 +marsclass: EI +levtype: ML +number: OFF +expver: 1 +repres: +target: /raid60/nas/tmc/Anne/Interpolation/flexextract/flex_extract_v7.1/python/../work/FCGG__ML.20170809.41511.90849.grb +step: 03/09 +grid: 32 +param: U/V/D/77 +time: 00/12 +date: 20170809/to/20170809 +resol: 63 +type: FC +accuracy: 24 + + +mars +gaussian: +stream: OPER +levelist: 1 +area: 40.0/-10.0/30.0/10.0 +marsclass: EI +levtype: ML +number: OFF +expver: 1 +repres: +target: /raid60/nas/tmc/Anne/Interpolation/flexextract/flex_extract_v7.1/python/../work/FCSH__SL.20170809.41511.90849.grb +step: 03/09 +grid: OFF +param: LNSP +time: 00/12 +date: 20170809/to/20170809 +resol: 63 +type: FC +accuracy: 24 + + +mars +gaussian: +stream: OPER +levelist: 1 +area: 40.0/-10.0/30.0/10.0 +marsclass: EI +levtype: SFC +number: OFF +expver: 1 +repres: +target: /raid60/nas/tmc/Anne/Interpolation/flexextract/flex_extract_v7.1/python/../work/FCOG__SL.20170809.41511.90849.grb +step: 03/09 +grid: 5.0/5.0 +param: 141/151/164/165/166/167/168/129/172/186/187/188/235/139/39 +time: 00/12 +date: 20170809/to/20170809 +resol: 63 +type: FC +accuracy: 24 + + +mars +gaussian: +stream: OPER +levelist: 59/to/60 +area: 40.0/-10.0/30.0/10.0 +marsclass: EI +levtype: ML +number: OFF +expver: 1 +repres: +target: /raid60/nas/tmc/Anne/Interpolation/flexextract/flex_extract_v7.1/python/../work/ANOG__ML.20170809.41511.90849.grb +step: 00 +grid: 5.0/5.0 +param: T/Q +time: 00/06/12/18 +date: 20170809/to/20170809 +resol: 63 +type: AN +accuracy: 24 + + +mars +gaussian: reduced +stream: OPER +levelist: 1 +area: +marsclass: EI +levtype: ML +number: OFF +expver: 1 +repres: +target: /raid60/nas/tmc/Anne/Interpolation/flexextract/flex_extract_v7.1/python/../work/ANGG__SL.20170809.41511.90849.grb +step: 00 +grid: 32 +param: Q +time: 00/06/12/18 +date: 20170809/to/20170809 +resol: 63 +type: AN +accuracy: 24 + + +mars +gaussian: +stream: OPER +levelist: 1/to/60 +area: 40.0/-10.0/30.0/10.0 +marsclass: EI +levtype: ML +number: OFF +expver: 1 +repres: +target: /raid60/nas/tmc/Anne/Interpolation/flexextract/flex_extract_v7.1/python/../work/ANGG__ML.20170809.41511.90849.grb +step: 00 +grid: 32 +param: U/V/D/77 +time: 00/06/12/18 +date: 20170809/to/20170809 +resol: 63 +type: AN +accuracy: 24 + + +mars +gaussian: +stream: OPER +levelist: 1 +area: 40.0/-10.0/30.0/10.0 +marsclass: EI +levtype: ML +number: OFF +expver: 1 +repres: +target: /raid60/nas/tmc/Anne/Interpolation/flexextract/flex_extract_v7.1/python/../work/ANSH__SL.20170809.41511.90849.grb +step: 00 +grid: OFF +param: LNSP +time: 00/06/12/18 +date: 20170809/to/20170809 +resol: 63 +type: AN +accuracy: 24 + + +mars +gaussian: +stream: OPER +levelist: 1 +area: 40.0/-10.0/30.0/10.0 +marsclass: EI +levtype: SFC +number: OFF +expver: 1 +repres: +target: /raid60/nas/tmc/Anne/Interpolation/flexextract/flex_extract_v7.1/python/../work/ANOG__SL.20170809.41511.90849.grb +step: 00 +grid: 5.0/5.0 +param: 141/151/164/165/166/167/168/129/172/186/187/188/235/139/39 +time: 00/06/12/18 +date: 20170809/to/20170809 +resol: 63 +type: AN +accuracy: 24 + + diff --git a/run/run.sh b/run/run.sh new file mode 100644 index 0000000000000000000000000000000000000000..afabaa83e75e9f27f62cd6df4682380ead0e30d6 --- /dev/null +++ b/run/run.sh @@ -0,0 +1,4 @@ +#!/bin/bash + +pyscript = ../python/submit.py + diff --git a/setup.sh b/setup.sh new file mode 100755 index 0000000000000000000000000000000000000000..0446f0a7d519c216e0da5409ce051ffe556db137 --- /dev/null +++ b/setup.sh @@ -0,0 +1,77 @@ +#!/bin/bash +# +# @Author: Anne Philipp +# +# @Date: September, 10 2018 +# +# @Description: +# + +# ----------------------------------------------------------------- +# AVAILABLE COMMANDLINE ARGUMENTS TO SET +# +# THE USER HAS TO SPECIFY THESE PARAMETER +# +TARGET='ecgate' +MAKEFILE='Makefile.gfortran' +ECUID='km4a' +ECGID='at' +GATEWAY='srvx8.img.univie.ac.at' +DESTINATION='annep@genericSftp' +FLEXPART_ROOT_SCRIPTS="" +JOB_TEMPLATE="" +CONTROLFILE="CONTROL.test" +# ----------------------------------------------------------------- +# +# AFTER THIS LINE THE USER DOES NOT HAVE TO CHANGE ANYTHING !!! +# +# ----------------------------------------------------------------- + +# PATH TO INSTALLATION SCRIPT +script="python/install.py" + +# INITIALIZE EMPTY PARAMETERLIST +parameterlist="" + +# DEFAULT PARAMETERLIST +if [ -n "$TARGET" ]; then + parameterlist=" --target=$TARGET" +else + echo "ERROR: No installation target specified." + echo "EXIT WITH ERROR" + exit +fi + +# CHECK FOR MORE PARAMETER +if [ "$TARGET" == "ecgate" ] || [ "$TARGET" == "cca" ]; then + # check if necessary Parameters are set + if [ -z "$ECUID" ] || [ -z "$ECGID" ] || [ -z "$GATEWAY" ] || [ -z "$DESTINATION" ]; then + echo "ERROR: At least one of the following parameters are not set: ECUID, ECGID, GATEWAY, DESTINATION!" + echo "EXIT WITH ERROR" + exit + else + parameterlist+=" --ecuid=$ECUID --ecgid=$ECGID --gateway=$GATEWAY --destination=$DESTINATION" + fi +fi +if [ -n "$MAKEFILE" ]; then + parameterlist+=" --makefile=$MAKEFILE" +fi +if [ -n "$FLEXPART_ROOT_SCRIPTS" ]; then # not empty + parameterlist+=" --flexpart_root_scripts=$FLEXPART_ROOT_SCRIPTS" +fi +if [ -n "$JOB_TEMPLATE" ]; then + parameterlist+=" --job_template=$JOB_TEMPLATE" +fi +if [ -n "$CONTROLFILE" ]; then + parameterlist+=" --controlfile=$CONTROLFILE" +fi + +# ----------------------------------------------------------------- +# CALL INSTALLATION SCRIPT WITH DETERMINED COMMANDLINE ARGUMENTS + +$script $parameterlist + +# ----------------------------------------------------------------- +# CALL INSTALLATION TESTS + +# TODO !!!!