# -*- coding: iso-8859-15 -*-
# Generic program to transfrom ASCII observational data in columns to a netcdf
# L. Fita, LMD February 2015
## e.g. # create_OBSnetcdf.py -c '#' -d ACAR/description.dat -e space -f ACAR/2012/10/ACAR_121018.asc -g true -t 19491201000000,seconds -k trajectory

import numpy as np
from netCDF4 import Dataset as NetCDFFile
import os
import re
from optparse import OptionParser

# version
version=1.1

# Filling values for floats, integer and string
fillValueF = 1.e20
fillValueI = -99999
fillValueS = '---'

# Length of the string variables
StringLength = 200

# Size of the map for the complementary variables/maps
Ndim2D = 100

main = 'create_OBSnetcdf.py'
errormsg = 'ERROR -- error -- ERROR -- error'
warnmsg = 'WARNING -- warning -- WARNING -- warning'

fillValue = 1.e20

def searchInlist(listname, nameFind):
    """ Function to search a value within a list
    listname = list
    nameFind = value to find
    >>> searInlist(['1', '2', '3', '5'], '5')
    True
    """
    for x in listname:
      if x == nameFind:
        return True
    return False

def set_attribute(ncvar, attrname, attrvalue):
    """ Sets a value of an attribute of a netCDF variable. Removes previous attribute value if exists
    ncvar = object netcdf variable
    attrname = name of the attribute
    attrvalue = value of the attribute
    """
    import numpy as np
    from netCDF4 import Dataset as NetCDFFile

    attvar = ncvar.ncattrs()
    if searchInlist(attvar, attrname):
        attr = ncvar.delncattr(attrname)

    attr = ncvar.setncattr(attrname, attrvalue)

    return ncvar

def basicvardef(varobj, vstname, vlname, vunits):
    """ Function to give the basic attributes to a variable
    varobj= netCDF variable object
    vstname= standard name of the variable
    vlname= long name of the variable
    vunits= units of the variable
    """
    attr = varobj.setncattr('standard_name', vstname)
    attr = varobj.setncattr('long_name', vlname)
    attr = varobj.setncattr('units', vunits)

    return

def remove_NONascii(string):
    """ Function to remove that characters which are not in the standard 127 ASCII
      string= string to transform
    >>> remove_NONascii('Lluís')
    Lluis
    """
    fname = 'remove_NONascii'

    newstring = string

    RTFchar= ['á', 'é', 'í', 'ó', 'ú', 'à', 'è', 'ì', 'ò', 'ù', 'â', 'ê', 'î', 'ô',  \
      'û', 'ä', 'ë', 'ï', 'ö', 'ü', 'ç', 'ñ','æ', 'œ', 'Á', 'É', 'Í', 'Ó', 'Ú', 'À', \
      'È', 'Ì', 'Ò', 'Ù', 'Â', 'Ê', 'Î', 'Ô', 'Û', 'Ä', 'Ë', 'Ï', 'Ö', 'Ü', 'Ç', 'Ñ',\
      'Æ', 'Œ', '\n', '\t']
    ASCchar= ['a', 'e', 'i', 'o', 'u', 'a', 'e', 'i', 'o', 'u', 'a', 'e', 'i', 'o',  \
      'u', 'a', 'e', 'i', 'o', 'u', 'c', 'n','ae', 'oe', 'A', 'E', 'I', 'O', 'U', 'A', \
      'E', 'I', 'O', 'U', 'A', 'E', 'I', 'O', 'U', 'A', 'E', 'I', 'O', 'U', 'C', 'N',\
      'AE', 'OE', '', ' ']

    Nchars = len(RTFchar)
    for ichar in range(Nchars):
        foundchar = string.find(RTFchar[ichar])
        if foundchar != -1:
            newstring = newstring.replace(RTFchar[ichar], ASCchar[ichar])

    return newstring

def read_description(fdobs, dbg):
    """ reads the description file of the observational data-set 
    fdobs= descriptive observational data-set
    dbg= boolean argument for debugging
    * Each station should have a 'description.dat' file with:
      institution=Institution who creates the data
      department=Department within the institution
      scientists=names of the data producers
      contact=contact of the data producers
      description=description of the observations
      acknowledgement=sentence of acknowlegement
      comment=comment for the measurements

      MissingValue='|' list of ASCII values for missing values within the data 
        (as they appear!)
      comment=comments

      varN='|' list of variable names
      varLN='|' list of long variable names
      varU='|' list units of the variables
      varBUFR='|' list BUFR code of the variables
      varTYPE='|' list of variable types ('D', 'F', 'I', 'I64', 'S')

      NAMElon=name of the variable with the longitude (x position)
      NAMElat=name of the variable with the latitude (y position)
      NAMEheight=ind_alt
      NAMEtime=name of the varibale with the time
      FMTtime=format of the time (as in 'C', 'CFtime' for already CF-like time)

    """
    fname = 'read_description'

    descobj = open(fdobs, 'r')
    desc = {}

    namevalues = []

    for line in descobj:
        if line[0:1] != '#' and len(line) > 1 :
            descn = remove_NONascii(line.split('=')[0])
            descv = remove_NONascii(line.split('=')[1])
            namevalues.append(descn)
            if descn[0:3] != 'var':
                if descn != 'MissingValue':
                    desc[descn] = descv
                else:
                    desc[descn] = []
                    for dn in descv.split('|'): desc[descn].append(dn)
                    print '  ' + fname + ': missing values found:',desc[descn]
            elif descn[0:3] == 'var':
                desc[descn] = descv.split('|')
            elif descn[0:4] == 'NAME':
                desc[descn] = descv
            elif descn[0:3] == 'FMT':
                desc[descn] = descv

    if dbg:
        Nvars = len(desc['varN'])
        print '  ' + fname + ": description content of '" + fdobs + "'______________"
        for varn in namevalues:
            if varn[0:3] != 'var':
                print '    ' + varn + ':',desc[varn]
            elif varn == 'varN':
                print '    * Variables:'
                for ivar in range(Nvars):
                    varname = desc['varN'][ivar]
                    varLname = desc['varLN'][ivar]
                    varunits = desc['varU'][ivar]
                    if desc.has_key('varBUFR'): 
                        varbufr=[ivar]
                        print '      ', ivar, varname + ':',varLname,'[',varunits,   \
                          ']','bufr code:',varbufr
                    else:
                        print '      ', ivar, varname + ':',varLname,'[',varunits,']'

    descobj.close()

    return desc

def value_fmt(val, miss, fmt):
    """ Function to transform an ASCII value to a given format
      val= value to transform
      miss= list of possible missing values
      fmt= format to take:
        'D': float double precission
        'F': float
        'I': integer
        'I64': 64-bits integer
        'S': string
    >>> value_fmt('9876.12', '-999', 'F')
    9876.12
    """

    fname = 'value_fmt'

    fmts = ['D', 'F', 'I', 'I64', 'S']
    Nfmts = len(fmts)

    if not searchInlist(miss,val):
        if not searchInlist(fmts, fmt):
            print errormsg
            print '  ' + fname + ": format '" + fmt + "' not ready !!"
            quit(-1)
        else:
            if fmt == 'D':
                newval = np.float32(val)
            elif fmt == 'F':
                newval = np.float(val)
            elif fmt == 'I':
                newval = int(val)
            elif fmt == 'I64':
                newval = np.int64(val)
            elif fmt == 'S':
                newval = val
    else:
        newval = None

    return newval

def read_datavalues(dataf, comchar, colchar, fmt, miss, varns, dbg):
    """ Function to read from an ASCII file values in column
      dataf= data file
      comchar= list of the characters indicating comment in the file
      colchar= character which indicate end of value in the column
      dbg= debug mode or not
      fmt= list of kind of values to be found
      miss= missing value
      varns= list of name of the variables to find
    """

    fname = 'read_datavalues'

    ofile = open(dataf, 'r')
    Nvals = len(fmt)

    finalvalues = {}

    iline = 0
    for line in ofile:
        line = line.replace('\n','').replace(chr(13),'')
        if not searchInlist(comchar,line[0:1]) and len(line) > 1:
            values0 = line.split(colchar)
# Removing no-value columns
            values = []
            for iv in values0:
                if len(iv) > 0: values.append(iv)

            Nvalues = len(values)
# Checkings for wierd characters at the end of lines (use it to check)
#            if values[Nvalues-1][0:4] == '-999':
#                print line,'last v:',values[Nvalues-1],'len:',len(values[Nvalues-1])
#                for ic in range(len(values[Nvalues-1])):
#                    print ic,ord(values[Nvalues-1][ic:ic+1])
#                quit()

            if len(values[Nvalues-1]) == 0:
                Nvalues = Nvalues - 1
                
            if Nvalues != Nvals:
                print warnmsg
                print '  ' + fname + ': number of formats:',Nvals,' and number of ', \
                  'values:',Nvalues,' with split character *' + colchar +            \
                  '* does not coincide!!'
                print '    * what is found is ________'
                if Nvalues > Nvals:
                    Nshow = Nvals
                    for ivar in range(Nshow):
                        print '    ',varns[ivar],'fmt:',fmt[ivar],'value:',values[ivar]
                    print '      missing formats for:',values[Nshow:Nvalues+1]
                    print '      values not considered, continue'
                else:
                    Nshow = Nvalues
                    for ivar in range(Nshow):
                        print '   ',varns[ivar],'fmt:',fmt[ivar],'value:',values[ivar]
                    print '      excess of formats:',fmt[Nshow:Nvals+1]
                    quit(-1)

# Reading and transforming values
            if dbg: print '  ' + fname + ': values found _________'

            for ivar in range(Nvals):
                if dbg: 
                    print iline, varns[ivar],'value:',values[ivar],miss,fmt[ivar]

                if iline == 0:
                    listvals = []
                    listvals.append(value_fmt(values[ivar], miss, fmt[ivar]))
                    finalvalues[varns[ivar]] = listvals
                else:
                    listvals = finalvalues[varns[ivar]]
                    listvals.append(value_fmt(values[ivar], miss, fmt[ivar]))
                    finalvalues[varns[ivar]] = listvals
        else:
# First line without values
            if iline == 0: iline = -1
    
        iline = iline + 1

    ofile.close()

    return finalvalues

def writing_str_nc(varo, values, Lchar):
    """ Function to write string values in a netCDF variable as a chain of 1char values
    varo= netCDF variable object
    values = list of values to introduce
    Lchar = length of the string in the netCDF file
    """

    Nvals = len(values)
    for iv in range(Nvals):    
        stringv=values[iv]  
        charvals = np.chararray(Lchar)
        Lstr = len(stringv)
        charvals[Lstr:Lchar] = ''

        for ich in range(Lstr):
            charvals[ich] = stringv[ich:ich+1]

        varo[iv,:] = charvals

    return

def Stringtimes_CF(tvals, fmt, Srefdate, tunits, dbg):
    """ Function to transform a given data in String formats to a CF date
      tvals= string temporal values
      fmt= format of the the time values
      Srefdate= reference date in [YYYY][MM][DD][HH][MI][SS] format
      tunits= units to use ('weeks', 'days', 'hours', 'minutes', 'seconds') 
      dbg= debug
    >>> Stringtimes_CF(['19760217082712','20150213101837'], '%Y%m%d%H%M%S', 
      '19491201000000', 'hours', False)
    [229784.45333333  571570.31027778]
    """
    import datetime as dt

    fname = 'Stringtimes'

    dimt = len(tvals)

    yrref = int(Srefdate[0:4])
    monref = int(Srefdate[4:6])
    dayref = int(Srefdate[6:8])
    horref = int(Srefdate[8:10])
    minref = int(Srefdate[10:12])
    secref = int(Srefdate[12:14])
    refdate = dt.datetime( yrref, monref, dayref, horref, minref, secref)

    cftimes = np.zeros((dimt), dtype=np.float)

    Nfmt=len(fmt.split('%'))

    if dbg: print '  ' + fname + ': fmt=',fmt,'refdate:',Srefdate,'uits:',tunits,    \
      'date dt_days dt_time deltaseconds CFtime _______'
    for it in range(dimt):

# Removing excess of mili-seconds (up to 6 decimals)
        if fmt.split('%')[Nfmt-1] == 'f':
            tpoints = tvals[it].split('.')
            if len(tpoints[len(tpoints)-1]) > 6:
                milisec = '{0:.6f}'.format(np.float('0.'+tpoints[len(tpoints)-1]))[0:7]
                newtval = ''
                for ipt in range(len(tpoints)-1):
                    newtval = newtval + tpoints[ipt] + '.'
                newtval = newtval + str(milisec)[2:len(milisec)+1]
            else:
                newtval = tvals[it]
            tval = dt.datetime.strptime(newtval, fmt)
        else:
            tval = dt.datetime.strptime(tvals[it], fmt)

        deltatime = tval - refdate
        deltaseconds = deltatime.days*24.*3600. + deltatime.seconds +                \
          deltatime.microseconds/100000.
        if tunits == 'weeks':
            deltat = 7.*24.*3600.
        elif tunits == 'days':
            deltat = 24.*3600.
        elif tunits == 'hours':
            deltat = 3600.
        elif tunits == 'minutes':
            deltat = 60.
        elif tunits == 'seconds':
            deltat = 1.
        else:
            print errormsg
            print '  ' + fname + ": time units '" + tunits + "' not ready !!"
            quit(-1)

        cftimes[it] = deltaseconds / deltat
        if dbg:
            print '  ' + tvals[it], deltatime, deltaseconds, cftimes[it]

    return cftimes

def adding_complementary(onc, dscn, okind, dvalues, tvals, refCFt, CFtu, Nd, dbg):
    """ Function to add complementary variables as function of the observational type
      onc= netcdf objecto file to add the variables
      dscn= description dictionary
      okind= observational kind
      dvalues= values
      tvals= CF time values
      refCFt= reference time of CF time (in [YYYY][MM][DD][HH][MI][SS])
      CFtu= CF time units
      Nd= size of the domain
      dbg= debugging flag
    """
    import numpy.ma as ma

    fname = 'adding_complementary'
  
# Kind of observations which require de integer lon/lat (for the 2D map)
    map2D=['multi-points', 'trajectory']

    SrefCFt = refCFt[0:4] +'-'+ refCFt[4:6] +'-'+ refCFt[6:8] + ' ' + refCFt[8:10] + \
      ':'+ refCFt[10:12] +':'+ refCFt[12:14]

    if dscn['NAMElon'] == '-' or dscn['NAMElat'] == '-':
        print errormsg
        print '  ' + fname + ": to complement a '" + okind + "' observation kind " + \
          " a given longitude ('NAMElon':",dscn['NAMElon'],") and latitude ('" +     \
          "'NAMElat:'", dscn['NAMElat'],') from the data has to be provided and ' +  \
          'any are given !!'
        quit(-1)

    if not dvalues.has_key(dscn['NAMElon']) or not dvalues.has_key(dscn['NAMElat']):
        print errormsg
        print '  ' + fname + ": observations do not have 'NAMElon':",                \
          dscn['NAMElon'],"and/or 'NAMElat:'", dscn['NAMElat'],' !!'
        print '    available data:',dvalues.keys()
        quit(-1)

    if okind == 'trajectory':
        if dscn['NAMEheight'] == '-':
            print warnmsg
            print '  ' + fname + ": to complement a '" + okind + "' observation " +  \
              "kind a given height ('NAMEheight':",dscn['NAMEheight'],"') might " +  \
              'be provided and any is given !!'
            quit(-1)

        if not dvalues.has_key(dscn['NAMEheight']):
            print errormsg
            print '  ' + fname + ": observations do not have 'NAMEtime':",           \
              dscn['NAMEtime'],' !!'
            print '    available data:',dvalues.keys()
            quit(-1)

    if searchInlist(map2D, okind):
# A new 2D map with the number of observation will be added for that 'NAMElon' 
#   and 'NAMElat' are necessary. A NdxNd domain space size will be used. 
        objfile.createDimension('lon2D',Nd)
        objfile.createDimension('lat2D',Nd)
        lonvals = ma.masked_equal(dvalues[dscn['NAMElon']], None)
        latvals = ma.masked_equal(dvalues[dscn['NAMElat']], None)

        minlon = min(lonvals)
        maxlon = max(lonvals)
        minlat = min(latvals)
        maxlat = max(latvals)

        blon = (maxlon - minlon)/(Nd-1)
        blat = (maxlat - minlat)/(Nd-1)

        newvar = onc.createVariable( 'lon2D', 'f8', ('lon2D'))
        basicvardef(newvar, 'longitude', 'longitude map observations','degrees_East')
        newvar[:] = minlon + np.arange(Nd)*blon
        newattr = set_attribute(newvar, 'axis', 'X')

        newvar = onc.createVariable( 'lat2D', 'f8', ('lat2D'))
        basicvardef(newvar, 'latitude', 'latitude map observations', 'degrees_North')
        newvar[:] = minlat + np.arange(Nd)*blat
        newattr = set_attribute(newvar, 'axis', 'Y')

        if dbg: 
            print '  ' + fname + ': minlon=',minlon,'maxlon=',maxlon
            print '  ' + fname + ': minlat=',minlat,'maxlat=',maxlat
            print '  ' + fname + ': precission on x-axis=', blon*(Nd-1), 'y-axis=',  \
              blat*(Nd-1)
        
    if okind == 'multi-points':
        map2D = np.ones((Nd, Nd), dtype=np.float)*fillValueI

        dimt = len(tvals)
        Nlost = 0
        for it in range(dimt):
            lon = dvalues[dscn['NAMElon']][it]
            lat = dvalues[dscn['NAMElat']][it]
            if lon is not None and lat is not None:
                ilon = int((Nd-1)*(lon - minlon)/(maxlon - minlon))
                ilat = int((Nd-1)*(lat - minlat)/(maxlat - minlat))

                if map2D[ilat,ilon] == fillValueI: 
                    map2D[ilat,ilon] = 1
                else:
                    map2D[ilat,ilon] = map2D[ilat,ilon] + 1
                if dbg: print it, lon, lat, ilon, ilat, map2D[ilat,ilon]

        newvar = onc.createVariable( 'mapobs', 'f4', ('lat2D', 'lon2D'),             \
          fill_value = fillValueI)
        basicvardef(newvar, 'map_observations', 'number of observations', '-')
        newvar[:] = map2D
        newattr = set_attribute(newvar, 'coordinates', 'lon2D lat2D')

    elif okind == 'trajectory':
# A new 2D map with the trajectory 'NAMElon' and 'NAMElat' and maybe 'NAMEheight'
#   are necessary. A NdxNdxNd domain space size will be used. Using time as 
#   reference variable
        if dscn['NAMEheight'] == '-':
# No height
            map2D = np.ones((Nd, Nd), dtype=np.float)*fillValueI

            dimt = len(tvals)
            Nlost = 0
            if dbg: print ' time-step lon lat ix iy passes _______'
            for it in range(dimt):
                lon = dvalues[dscn['NAMElon']][it]
                lat = dvalues[dscn['NAMElat']][it]
                if lon is  not None and lat is not None:
                    ilon = int((Nd-1)*(lon - minlon)/(maxlon - minlon))
                    ilat = int((Nd-1)*(lat - minlat)/(maxlat - minlat))

                    if map2D[ilat,ilon] == fillValueI: 
                        map2D[ilat,ilon] = 1
                    else:
                        map2D[ilat,ilon] = map2D[ilat,ilon] + 1
                    if dbg: print it, lon, lat, ilon, ilat, map2D[ilat,ilon]

            newvar = onc.createVariable( 'trjobs', 'i', ('lat2D', 'lon2D'),          \
              fill_value = fillValueI)
            basicvardef(newvar, 'trajectory_observations', 'number of passes', '-' )
            newvar[:] = map2D
            newattr = set_attribute(newvar, 'coordinates', 'lon2D lat2D')

        else:
            ivn = 0
            for vn in dscn['varN']:
                if vn == dscn['NAMEheight']: 
                    zu = dscn['varU'][ivn]
                    break
                ivn = ivn + 1
                    
            objfile.createDimension('z2D',Nd)
            zvals = ma.masked_equal(dvalues[dscn['NAMEheight']], None)
            minz = min(zvals)
            maxz = max(zvals)

            bz = (maxz - minz)/(Nd-1)

            newvar = onc.createVariable( 'z2D', 'f8', ('z2D'))
            basicvardef(newvar, 'z2D', 'z-coordinate map observations', zu)
            newvar[:] = minz + np.arange(Nd)*bz
            newattr = set_attribute(newvar, 'axis', 'Z')

            if dbg:
                print '  ' + fname + ': zmin=',minz,zu,'zmax=',maxz,zu
                print '  ' + fname + ': precission on z-axis=', bz*(Nd-1), zu

            map3D = np.ones((Nd, Nd, Nd), dtype=int)*fillValueI
            dimt = len(tvals)
            Nlost = 0
            if dbg: print ' time-step lon lat z ix iy iz passes _______'
            for it in range(dimt):
                lon = dvalues[dscn['NAMElon']][it]
                lat = dvalues[dscn['NAMElat']][it]
                z = dvalues[dscn['NAMEheight']][it]
                if lon is  not None and lat is not None and z is not None:
                    ilon = int((Nd-1)*(lon - minlon)/(maxlon - minlon))
                    ilat = int((Nd-1)*(lat - minlat)/(maxlat - minlat))
                    iz = int((Nd-1)*(z - minz)/(maxz - minz))

                    if map3D[iz,ilat,ilon] == fillValueI: 
                        map3D[iz,ilat,ilon] = 1
                    else:
                        map3D[iz,ilat,ilon] = map3D[iz,ilat,ilon] + 1
                    if dbg: print it, lon, lat, z, ilon, ilat, iz, map3D[iz,ilat,ilon]

            newvar = onc.createVariable( 'trjobs', 'i', ('z2D', 'lat2D', 'lon2D'),   \
              fill_value = fillValueI)
            basicvardef(newvar, 'trajectory_observations', 'number of passes', '-')
            newvar[:] = map3D
            newattr = set_attribute(newvar, 'coordinates', 'lon2D lat2D z2D')

    onc.sync()
    return

def adding_station_desc(onc,stdesc):
    """ Function to add a station description in a netCDF file
      onc= netCDF object
      stdesc= station description lon, lat, height
    """
    fname = 'adding_station_desc'

    newdim = onc.createDimension('nst',1)

    if onc.variables.has_key('lon'):
        print warnmsg
        print '  ' + fname + ": variable 'lon' already exist !!"
        print "    renaming it as 'lonst'"
        lonname = 'lonst'
    else:
        lonname = 'lon'

    newvar = objfile.createVariable( lonname, 'f4', ('nst'))
    basicvardef(newvar, lonname, 'longitude', 'degrees_West' )
    newvar[:] = stdesc[0]

    if onc.variables.has_key('lat'):
        print warnmsg
        print '  ' + fname + ": variable 'lat' already exist !!"
        print "    renaming it as 'latst'"
        latname = 'latst'
    else:
        latname = 'lat'

    newvar = objfile.createVariable( latname, 'f4', ('nst'))
    basicvardef(newvar, lonname, 'latitude', 'degrees_North' )
    newvar[:] = stdesc[1]

    if onc.variables.has_key('height'):
        print warnmsg
        print '  ' + fname + ": variable 'height' already exist !!"
        print "    renaming it as 'heightst'"
        heightname = 'heightst'
    else:
        heightname = 'height'

    newvar = objfile.createVariable( heightname, 'f4', ('nst'))
    basicvardef(newvar, heightname, 'height above sea level', 'm' )
    newvar[:] = stdesc[2]

    return

####### ###### ##### #### ### ## #

strCFt="Refdate,tunits (CF reference date [YYYY][MM][DD][HH][MI][SS] format and " +  \
  " and time units: 'weeks', 'days', 'hours', 'miuntes', 'seconds')"

kindobs=['multi-points', 'single-station', 'trajectory']
strkObs="kind of observations; 'multi-points': multiple individual punctual obs " +  \
  "(e.g., lightning strikes), 'single-station': single station on a fixed position,"+\
  "'trajectory': following a trajectory"

parser = OptionParser()
parser.add_option("-c", "--comments", dest="charcom",
  help="':', list of characters used for comments", metavar="VALUES")
parser.add_option("-d", "--descriptionfile", dest="fdesc",
  help="description file to use", metavar="FILE")
parser.add_option("-e", "--end_column", dest="endcol",
  help="character to indicate end of the column ('space', for ' ')", metavar="VALUE")
parser.add_option("-f", "--file", dest="obsfile",
  help="observational file to use", metavar="FILE")
parser.add_option("-g", "--debug", dest="debug",
  help="whther debug is required ('false', 'true')", metavar="VALUE")
parser.add_option("-k", "--kindObs", dest="obskind", type='choice', choices=kindobs, 
  help=strkObs, metavar="FILE")
parser.add_option("-s", "--stationLocation", dest="stloc",  
  help="longitude, latitude and height of the station (only for 'single-station')", 
  metavar="FILE")
parser.add_option("-t", "--CFtime", dest="CFtime", help=strCFt, metavar="VALUE")

(opts, args) = parser.parse_args()

#######    #######
## MAIN
    #######

ofile='OBSnetcdf.nc'

if opts.charcom is None:
    print warnmsg
    print '  ' + main + ': No list of comment characters provided!!'
    print '    assuming no need!'
    charcomments = []
else:
    charcomments = opts.charcom.split(':')    

if opts.fdesc is None:
    print errormsg
    print '  ' + main + ': No description file for the observtional data provided!!'
    quit(-1)

if opts.endcol is None:
    print warnmsg
    print '  ' + main + ': No list of comment characters provided!!'
    print "    assuming 'space'"
    endcol = ' '
else:
    if opts.endcol == 'space':
        endcol = ' '
    else:
        endcol = opts.endcol

if opts.obsfile is None:
    print errormsg
    print '  ' + main + ': No observations file provided!!'
    quit(-1)

if opts.debug is None:
    print warnmsg
    print '  ' + main + ': No debug provided!!'
    print "    assuming 'False'"
    debug = False
else:
    if opts.debug == 'true':
        debug = True
    else:
        debug = False

if not os.path.isfile(opts.fdesc):
    print errormsg
    print '   ' + main + ": description file '" + opts.fdesc + "' does not exist !!"
    quit(-1)

if not os.path.isfile(opts.obsfile):
    print errormsg
    print '   ' + main + ": observational file '" + opts.obsfile + "' does not exist !!"
    quit(-1)

if opts.CFtime is None:
    print warnmsg
    print '  ' + main + ': No CFtime criteria are provided !!'
    print "    either time is already in CF-format ('timeFMT=CFtime') in '" +        \
      opts.fdesc + "'"
    print "    or assuming refdate: '19491201000000' and time units: 'hours'"
    referencedate = '19491201000000'
    timeunits = 'hours'
else:
    referencedate = opts.CFtime.split(',')[0]
    timeunits = opts.CFtime.split(',')[1]

if opts.obskind is None:
    print warnmsg
    print '  ' + main + ': No kind of observations provided !!'
    print "    assuming 'single-station': single station on a fixed position at 0,0,0"
    obskind = 'single-station'
    stationdesc = [0.0, 0.0, 0.0]
else:
    obskind = opts.obskind
    if obskind == 'single-station':
        if opts.stloc is None:
            print errornmsg
            print '  ' + main + ': No station location provided !!'
            quit(-1)
        else:
            stationdesc = [np.float(opts.stloc.split(',')[0]),                       \
              np.float(opts.stloc.split(',')[1]), np.float(opts.stloc.split(',')[2])]
    else:
        obskind = opts.obskind

# Reading description file
##
description = read_description(opts.fdesc, debug)

Nvariables=len(description['varN'])
formats = description['varTYPE']

if len(formats) != Nvariables: 
    print errormsg
    print '  ' + main + ': number of formats:',len(formats),' and number of ' +     \
      'variables', Nvariables,' does not coincide!!'
    print '    * what is found is _______'
    if Nvariables > len(formats):
        Nshow = len(formats)
        for ivar in range(Nshow):
            print '      ',description['varN'][ivar],':', description['varLN'][ivar],\
               '[', description['varU'][ivar], '] fmt:', formats[ivar]
        print '      missing values for:', description['varN'][Nshow:Nvariables+1]
    else:
        Nshow = Nvariables
        for ivar in range(Nshow):
            print '      ',description['varN'][ivar],':', description['varLN'][ivar],\
               '[', description['varU'][ivar], '] fmt:', formats[ivar]
        print '      excess of formats for:', formats[Nshow:len(formats)+1]

    quit(-1)

# Reading values
##
datavalues = read_datavalues(opts.obsfile, charcomments, endcol, formats, 
  description['MissingValue'], description['varN'], debug)

# Total number of values
Ntvalues = len(datavalues[description['varN'][0]])
print main + ': total temporal values found:',Ntvalues

objfile = NetCDFFile(ofile, 'w')
  
# Creation of dimensions
##
objfile.createDimension('time',None)
objfile.createDimension('StrLength',StringLength)

# Creation of variables
##
for ivar in range(Nvariables):
    varn = description['varN'][ivar]
    print "  including: '" + varn + "' ... .. ."

    if formats[ivar] == 'D':
        newvar = objfile.createVariable(varn, 'f32', ('time'), fill_value=fillValueF)
        basicvardef(newvar, varn, description['varLN'][ivar],                        \
          description['varU'][ivar])
        newvar[:] = np.where(datavalues[varn] is None, fillValueF, datavalues[varn])
    elif formats[ivar] == 'F':
        newvar = objfile.createVariable(varn, 'f', ('time'), fill_value=fillValueF)
        basicvardef(newvar, varn, description['varLN'][ivar],                        \
          description['varU'][ivar])
        newvar[:] = np.where(datavalues[varn] is None, fillValueF, datavalues[varn])
    elif formats[ivar] == 'I':
        newvar = objfile.createVariable(varn, 'i', ('time'), fill_value=fillValueI)
        basicvardef(newvar, varn, description['varLN'][ivar],                        \
          description['varU'][ivar])
# Why is not wotking with integers?
        vals = np.array(datavalues[varn])
        for iv in range(Ntvalues):
            if vals[iv] is None: vals[iv] = fillValueI
        newvar[:] = vals
    elif formats[ivar] == 'S':
        newvar = objfile.createVariable(varn, 'c', ('time','StrLength'))
        basicvardef(newvar, varn, description['varLN'][ivar],                        \
          description['varU'][ivar])
        writing_str_nc(newvar, datavalues[varn], StringLength)

# Extra variable descriptions/attributes
    if description.has_key('varBUFR'): 
        set_attribute(newvar,'bufr_code',description['varBUFR'][ivar])

    objfile.sync()

# Time variable in CF format
##
if description['FMTtime'] == 'CFtime':
    timevals = datavalues[description['NAMEtime']]
    iv = 0
    for ivn in description['varN']:
        if ivn == description['NAMEtime']:
            tunits = description['varU'][iv]
            break
        iv = iv + 1
else:
    # Time as a composition of different columns
    tcomposite = description['NAMEtime'].find('@')
    if tcomposite != -1:
        timevars = description['NAMEtime'].split('@')

        print warnmsg
        print '  ' + main + ': time values as combination of different columns!'
        print '    combining:',timevars,' with a final format: ',description['FMTtime']

        timeSvals = []
        if debug: print '  ' + main + ': creating times _______'
        for it in range(Ntvalues):
            tSvals = ''
            for tvar in timevars:
                tSvals = tSvals + datavalues[tvar][it] + ' '

            timeSvals.append(tSvals[0:len(tSvals)-1])
            if debug: print it, '*' + timeSvals[it] + '*'

        timevals = Stringtimes_CF(timeSvals, description['FMTtime'], referencedate,      \
          timeunits, debug)
    else:
        timevals = Stringtimes_CF(datavalues[description['NAMEtime']],                   \
          description['FMTtime'], referencedate, timeunits, debug)

    CFtimeRef = referencedate[0:4] +'-'+ referencedate[4:6] +'-'+ referencedate[6:8] +   \
      ' ' + referencedate[8:10] +':'+ referencedate[10:12] +':'+ referencedate[12:14]
    tunits = timeunits + ' since ' + CFtimeRef

if objfile.variables.has_key('time'):
    print warnmsg
    print '  ' + main + ": variable 'time' already exist !!"
    print "    renaming it as 'CFtime'"
    timeCFname = 'CFtime'
    newdim = objfile.renameDimension('time','CFtime')
    newvar = objfile.createVariable( timeCFname, 'f8', ('CFtime'))
    basicvardef(newvar, timeCFname, 'time', tunits )
else:
    timeCFname = 'time'
    newvar = objfile.createVariable( timeCFname, 'f8', ('time'))
    basicvardef(newvar, timeCFname, 'time', tunits )

set_attribute(newvar, 'calendar', 'standard')
newvar[:] = timevals

# Global attributes
##
for descn in description.keys():
    if descn[0:3] != 'var' and descn[0:4] != 'NAME' and descn[0:3] != 'FMT':
        set_attribute(objfile, descn, description[descn])

set_attribute(objfile,'author_nc','Lluis Fita')
set_attribute(objfile,'institution_nc','Laboratoire de Meteorology Dynamique, ' +    \
  'LMD-Jussieu, UPMC, Paris')
set_attribute(objfile,'country_nc','France')
set_attribute(objfile,'script_nc','create_OBSnetcdf.py')
set_attribute(objfile,'version_script',version)
set_attribute(objfile,'information',                                                 \
  'http://www.lmd.jussieu.fr/~lflmd/ASCIIobs_nc/index.html')

objfile.sync()

# Adding new variables as function of the observational type
## 'multi-points', 'single-station', 'trajectory'

if obskind != 'single-station':
    adding_complementary(objfile, description, obskind, datavalues, timevals,        \
      referencedate, timeunits, Ndim2D, debug)
else:
# Adding three variables with the station location, longitude, latitude and height
    adding_station_desc(objfile,stationdesc)

objfile.sync()
objfile.close()

print main + ": Successfull generation of netcdf observational file '" + ofile + "' !!"
