# Python script to generate the files and the plots for a sensitivity study with multiple modle configurations and models
#   L. Fita
#   LMD, Jussieu/Palaisseau, France
#  Script configuration get from ASCII file 'model_graphics.dat'

import numpy as np
import nc_var_tools as ncvar
import generic_tools as gen
import time as tim
#  To avoid errors like: '/bin/sh: 1: Syntax error: Bad fd number'
#    Make sure that /bin/sh directs to bash and not to dash: 
#  http://stackoverflow.com/questions/15809060/sh-syntax-error-bad-fd-number
import subprocess as sub
import os

main = 'model_graphics.py'

errmsg = ncvar.errormsg
warnmsg = ncvar.warnmsg

def scratches(config):
    """ Function to set-up if it is needed to start from the scratch
      config: dictionary with the configuration
    """
    fname = 'scratches'

    if config['scratch'] == 'true':
        scr = True
        print warnmsg
        print "  " + main + ": starting from the SCRATCH !!"
        print "    10 seconds left!!"
        filescr = True
        figscr = True
        tim.sleep(10)
    else:
        scr = False
        if config['filescratch'] == 'true':
            filescr = True
            print warnmsg
            print "  " + main + ": files starting from the SCRATCH !!"
            print "    5 seconds left!!"
            tim.sleep(5)
        else:
            filescr = False

        if config['figscratch'] == 'true':
            figscr = True
            print warnmsg
            print "  " + main + ": figures starting from the SCRATCH !!"
            print "    5 seconds left!!"
            tim.sleep(5)
        else:
            figscr = False

    if config['addfiles'] == 'true':
        addfils = True
    else:
        addfils = False

    if config['addfigures'] == 'true':
        addfigs = True
    else:
        addfigs = False

    if config['debug'] == 'true':
        debug = True
    else:
        debug = False

    return scr, filescr, figscr, addfils, addfigs, debug

def exp_headers(mod,config):
    """ Function to provide the headers and the experiments of a given model
      model= model
      config= configuration of the experiment
    """
    fname = 'exp_headers'

    # No CASE in python!
    #  case ${mod} in ...
    if mod == 'WRF':
        expers = config['WRFexps'].split(':')
        fhs = config['WRFheaders'].split(':')
    elif mod == 'LMDZ':
        expers = config['LMDZexps'].split(':')
        fhs = config['LMDZheaders'].split(':')
    elif mod == 'WRF_LMDZ':
        expers = config['WRF_LMDZexps'].split(':')
        fhs = config['WRF_LMDZheaders'].split(':')
    else:
        print errmsg
        print "  " + fname + ": model '" + mod + "' not ready!!"
        quit(-1)

    return expers, fhs

class VariableInf(object):
    """ Class which holds all information of a given variable
      name= CF name of the variable
      header= header of the file from which it can be computed
      model= file-header variable from which it can be directly computed
      diag= file-header diagnostics from which it can be computed
    """
    def __init__( self, name, fheader, model, diag):
       self.name= None
       self.fheader= None
       self.model= None
       self.diag= None
       if name is not None:
           self.name = name
           self.fheader= fheader
           self.model= model
           self.diag= diag

def variable_compute(idir,var,ftests,db):
    """ Function to retrieve the computation way of a given variable using a series of test files
      iwdir= directory with the test files
      var= name of the variable to test
      filtests= dictionary with the test files for each header
    """
    fname='variable_compute'

    cancompute = True
    for headerf in ftests.keys():
        filen=ftests[headerf]
        vmod, vdiag = ncvar.computevar_model(var, idir + '/' + filen)

        if vmod is None and vdiag is None:
            cancompute = False
        else:
            cancompute = True
            # Should be considered that variable can also be computed by both ways?
            break

    if not cancompute:
        print warnmsg
        print '  ' + fname + ": there is no way to compute '" + var +                \
          "' for model '" + mod
# Too extrict!
#        quit(-1)

    # Getting only the first possibility of model and diagnostic
    if vmod is not None:
        modV = vmod[0]
    else:
        modV = None
    if vdiag is not None:
        diagV = vdiag[0]
    else:
        diagV = None

    varcomp = VariableInf(var, headerf, modV, diagV)

    # a ';' list 'varcompute' it is created for each variable giving:
    #   [var]|[vark]|[headerf][varmod]|[vardiag]
    # This list will be used to compute a new file for each variable

    if db:
        print 'Variable information _______'
        gen.printing_class(varcomp)

    return varcomp

def get_operations_var(vc,db):
    """ Function to provide the operations to make for each variable from 'VAR_' dictionary
      vc= 'VAR_' dictionary, dictionary with all the parameters started with 'VAR_'
    """
    fname = 'get_operations_var'

    LH = len('VAR_')
    iop = 1
    # list of operations
    doopers = {}
    # operations by variable
    operationsvar = {}
    # individual operations by variable (not repeated)
    indivoperationsvar = {}

    # Variables with a calculation whic requires vertical interpolation to all the 
    #   file (operations stating by 'VAR_pinterp')
    LVp = len('VAR_pinterp')
    varglobalp = []

    for oper in vc.keys():
        Loper = len(oper)
        opn = oper[LH:Loper+1]
        vns = vc[oper].split(':')
        ops1 = opn.split('+')
        doopers[opn] = ops1
        for vn in vns:
            if not operationsvar.has_key(vn):
                operationsvar[vn] = [opn]
                indivoperationsvar[vn] = ops1
            else:
                opers = operationsvar[vn]
                opers.append(opn)
                operationsvar[vn] = opers
                opers1 = indivoperationsvar[vn]
                for op1 in ops1:
                    if not gen.searchInlist(opers1, op1):
                        opers1.append(op1)
                indivoperationsvar[vn] = opers1

        if oper[0:LVp] == 'VAR_pinterp':
            for vn in vns:
                if not gen.searchInlist(varglobalp,vn): varglobalp.append(vn)

        iop = iop + 1

    if db:
        print '  operations to make _______'
        gen.printing_dictionary(doopers)
        print '  operations by variable _______'
        gen.printing_dictionary(operationsvar)
        print '  individual operations by variable _______'
        gen.printing_dictionary(indivoperationsvar)
        print '  variables with the vertical interpolation for all the file _______'
        print '    #', varglobalp

    return doopers, operationsvar, indivoperationsvar, varglobalp

def pinterp_var(oper):
    """ Function to retrieve characteristics of the vertical interpolation for the operation
      oper= operation
        Wether vertical interpolation is:
          # 'global': for all file
          # 'local': at a given step of the process
          # 'none': no vertical interpolation
    >>> pinterp_var('last+pinterp+xmean')
    local
    >>> pinterp_var('pinterp+tmean+xmean')
    global
    >>> pinterp_var('xmean')
    none
    """
    fname = 'pinterp_var'

    pinn = 'pinterp'

    Lpinterp = len(pinn)
    if oper[0:Lpinterp] == pinn: 
        pkind = 'global'
        return pkind

    if oper.find(pinn) != -1:
        pkind = 'local'
    else:
        pkind = 'none'
        
    return pkind

def compvars_listconstruct(config, minf, Files, TestFiles, idir, odir, debug):
    """ Function to construct the list of variables to compute
      config= dictionary with the configuration of the execution
         variables to compute are get with all values started by `VAR_' in 'module_graphics.dat', and then
           providing a consecutive number of calculations separated by '+'
             VAR_[calc1]+[calc2] = tas:wss
           will compute first [calc1] and then [calc2] for 'tas' and 'wss'
      minf= class with information about the model
      Files= dictionary of files for heach header
      TestFiles= dictionary of files to test how to compute the variable
      odir= output directory
    """
    fname='compvars_listconstruct'

    varcomp = gen.get_specdictionary_HMT(config,H='VAR_')

    if debug:
        print '  variables to compute ________'
        gen.printing_dictionary(varcomp)

    # Getting operations by variable
    opers, varoper, indivaroper, varglobp = get_operations_var(varcomp,debug)    

    strfiles = gen.dictKeysVals_stringList(Files)
    strtestfiles = gen.dictKeysVals_stringList(TestFiles)

    print 'Lluis' + fname + ': strtestfile:', strtestfiles

    Svarcompute = ''

    # Main dictionary with all the variables and their calculation
    allvarcomp = {}

    ivop = 0
    for vn in varoper:
        vcomp = variable_compute(idir,vn,TestFiles,False)
        for op in varoper[vn]:
            # Creation of a String as: [CFname]|[operation]|[fheader]|[vmodel]|[vdiag]|[globalP]
            #   [CFname]: CF-name of the variable (must appear in 'variables_values.dat')
            #   [operation]: operation to compute
            #   [fheader]: header of file with the required variables
            #   [vmodel]: 
            #   [vdiag]:
            #   [globalP]: Wether vertical interpolation is:
            #     'global': for all file
            #     'local': at a given step of the process
            #     'none': no vertical interpolation
            globalP = pinterp_var(op)
            if vcomp.model is not None:
                Smodel = ':'.join(vcomp.model)
            else:
                Smodel = 'None'
            if vcomp.diag is not None:
                Sdiag = ':'.join(vcomp.diag)
            else:
                Sdiag = 'None'

            Svc = vcomp.name + '|' + op + '|' + vcomp.fheader + '|' + Smodel + '|' + \
              Sdiag + '|' + globalP
            if ivop == 0:
                Svarcompute = Svc 
            else:
                Svarcompute = Svarcompute + ',' + Svc 

            allvarcomp[vn + '_' + op] = [vcomp.fheader, vcomp.model, vcomp.diag, globalP]
            ivop = ivop + 1

    if debug:
        print '  Variables to compute _______'
        gen.printing_dictionary(allvarcomp)

    # Outwritting the varcompute to avoid next time (if it is not filescratch!)
    objf = open(owdir + '/varcompute.inf', 'w')
    objf.write('files: ' + strfiles + '\n')
    objf.write('testfiles: ' + strtestfiles + '\n')
    objf.write('varcompute: ' + Svarcompute + '\n')
    objf.write('itotv: ' + str(ivop) + '\n')
    objf.close()

    return allvarcomp, ivop

def read_varcomp_file(filen):
    """ Function to read 'varcompute.inf' and reconstruct the dictionaries
    """
    fname = 'read_varcomp_file'

    allvarcomp = {}
    
    objf = open(filen, 'r')

    for line in objf:
        vals = line.split(' ')
        if vals[0] == 'files':
            Files = stringList_dictKeysVals(vals[1])
        elif vals[0] == 'testfiles':
            TestFiles = stringList_dictKeysVals(vals[1])
        elif vals[0] == 'varcompute':
            Vvals = vals[1].split('|')
            if Vvals[3] == 'None':
                mod = None
            else:
                mod = Vvals[3].split(':')
            if Vvals[4] == 'None':
                diag = None
            else:
                diag = Vvals[4].split(':')

            allvarcomp[Vvals[0]+'_'+Vvals[1]] = [Vvals[2], mod, diag, Vvals[5]]
        elif vals[0] == 'itotv':
            ivop = int(vals[1])

    objf.close()

    return Files, TestFiles, allvarcomp, ivop

def compute_variable(minf, idir, usefiles, odir, cvar, gP, scr, pyH, Tref, Tunits, db):
    """ Function to compute a variable
      minf= class with the information of the model
      idir= directory with the input files
      usefiles= dictionary of files as dict([headerf]) = [file1], ..., [fileN]
      odir= directory to write the output files
      cvar= class with the information of the variable: 'name', 'fheader', 'varmod', 'vardiag'
      gP= kind of vertical interpolation ('global', 'local', 'none')
      scr= should it be done from the scratch?
      pyH= location of the python HOME
      Tref= CF time reference
      Tunits= CF time units
    """
    fname='compute_variable'

    CFvarn=cvar.name
    headerf = cvar.fheader 
    modvar = cvar.model
    diagvar = cvar.diag

    cfiles = usefiles[headerf]

    # dimensions
    dnx = minf.dimxn
    dny = minf.dimyn
    # var-dimensions
    vdnx = minf.vardxn
    vdny = minf.vardyn

    # Computing separately and then joinging for all files
    Ntotfiles = len(cfiles)
    Nzeros = len(str(Ntotfiles))
    NStot = str(Ntotfiles).zfill(Nzeros)

    # For that variables which require vertical interpolation 'p' suffix to the 
    #   file header is added
    if gP != 'none':
        SgP = 'p'
    else:
        SgP = ''

    # Getting in working dir
    os.chdir(odir)

    # File to keep track of all operations
    otrackf = open( odir + '/all_computevars.inf', 'a')

    # Computing variable
    ifile=1
    for cf in cfiles:
        ifS = str(ifile).zfill(Nzeros)
        ifilen = odir + '/' + CFvarn + '_' + headerf + SgP + '_' + ifS + '-' +       \
          NStot + '.nc'
        fileon = odir + '/' + CFvarn + '_' + headerf + SgP + '.nc'

        if db:
            print '  ' + fname + ": creation of variable file '" + ifilen + "' ..."

        if scr:
            sout = sub.call('rm ' + ifilen + ' >& /dev/null', shell=True)
            sout = sub.call('rm ' + fileon + ' >& /dev/null', shell=True)

        if not os.path.isfile(ifilen) and not os.path.isfile(fileon):
# Since model direct values are retrieved from `variables_valules.dat' which was 
#   initially coincived as a way to only give variable attributes, range and color 
#   bars, if a variable has a diagnostic way to be computed, the later one will be 
#   preferred

            if modvar is not None and diagvar is None:
                # model variable
                values = modvar + ',0,-1,-1'
                vs = modvar + ',' + vdnx + ',' + vdny + ',' + vdnz + ',' + vdnt
                with gen.Capturing() as output:
                    ncvar.DataSetSection_multivars(values, cf, vs)
                
                newfile, loc = gen.search_sec_list(output,'succesfull')
                print fname, 'Lluis newfile', newfile
                ofile = newfile[0].split(' ')[7]
                sub.call('mv ' + ofile + ' ' + ifilen, shell=True)

                # Keeping track of the operations
                pyins = pyH + "/nc_var.py -f "+cf+" -o DataSetSection_multivars -v "+\
                  vs + " -S '" + values + "'"
                otrackf.write('\n')
                otrackf.write('# ' + CFvarn + " " + modvar + '\n')
                otrackf.write(pyins + '\n')

                # CF renaming of variable
                ncvar.chvarname(CFvarn,ifilen,modvar)
            else:
                # diagnostic variable
                dims = dnt+'@'+vdnt+','+dnz+'@'+vdnz+','+dny+'@'+vdny+','+dnx+'@'+vdnx
                diagn = diagvar[0]
                Ndiagvars = len(diagvar)
                diagc = '@'.join(diagvar[1:])
            
                values = '-f ' + cf + ' -d ' + dims + " -v '" + diagn + '|' +diagc+"'"
                sout = sub.call('python ' + pyH + '/diagnostics.py ' + values,       \
                  shell=True)
                sout = sub.call(' mv diagnostics.nc ' + ifilen, shell=True)

                # Keeping track of the operations
                pyins = 'python ' + pyH + '/diagnostics.py ' + values
                otrackf.write('\n')
                otrackf.write('# ' + CFvarn + " " + diagn + '\n')
                otrackf.write(pyins + '\n')

        # adding CF lon,lat,time in WRF files
        if minf.name == 'WRF':
            values = vdnx + ':' + vdny + ':'+ Tref + ':' + Tunits
            ncvar.WRF_toCF(values, ifilen) 

        # Attaching necessary variables for the pressure interpolation
        if gP != 'none':
            requiredinterpvars = ['P', 'PB', 'PSFC', 'PH', 'PHB', 'HGT', 'T','QVAPOR']
            print "    " + ": adding variables:", requiredinterpvars, ' to allow ' + \
              'pressure interpolation'
            for rqv in requiredinterpvars:
                ncvar.fvaradd(cf+','+rqv,ifilen)

        ifile = ifile + 1

    otrackf.close()

    # Joining variable files
    if not os.path.isfile(fileon):
        if db:
            print '  ' + fname + ": concatenating all variable files to create '" +  \
              fileon + "..."
        ncvar.netcdf_fold_concatenation_HMT('./,time', CFvarn+'_'+headerf+SgP+       \
          '_,-,.nc', 'all')
        sout = sub.call('mv netcdf_fold_concatenated_HMT.nc ' + fileon, shell=True)
        if os.path.isfile(fileon):
            sout = sub.call('rm ' + CFvarn + '_' + headerf + '_*-*.nc', shell=True)

    return

def compute_statistics(minf, idir, usefiles, odir, cvar, plevels, gP, Opers, scr,    \
  pyH, db):
    """ Function to compute different statistics it will take previous steps if they 
        are availale
      minf= class with the information of the model
      idir= directory with the input files
      usefiles= ',' list of files to use [file1],...,[fileN]
      odir= directory to write the output files
      cvar= class with the information of the variable: 'name', 'fheader', 'varmod', 'vardiag'
      plevels= ':' separated list of pressures (in Pa) to use for the vertical interpolation
      gP= kind of vertical interpolation ('global', 'local', 'none')
      Opers= kind of operation: (as possible multiple consecutive combination of operations separated by '+'
        [calc1]+[calc2] will compute first [calc1] and then [calc2] 
        acc: temporal accumulated values
        diff: differences between models
        direct: no statistics
        last: last temporal value
        Lmean: latitudinal mean values
        Lsec: latitudinal section (latitudinal value must be given, [var]@[lat]) 
        lmean: longitudinal mean values
        lsec: longitudinal section (longitudinal value must be given, [var]@[lat])
        pinterp: pressure interpolation (to the given $plevels)
        tmean: temporal mean values
        turb: Taylor's turbulence decomposition value
        xmean: x-axis mean values
        ymean: y-axis mean values
        zsum: vertical aggregated values
      scr= should it be done from the scratch?
      pyH= location of the python HOME
    """
    fname='compute_statistics'

    # Getting a previous file name to continue with(for combinations)
    CFvarn=cvar.name
    headerf = cvar.fheader 
    modvar = cvar.model
    diagvar = cvar.diag

    cfiles = usefiles[headerf]

    # dimensions
    dnx = minf.dimxn
    dny = minf.dimyn
    # var-dimensions
    vdnx = minf.vardxn
    vdny = minf.vardyn

    # For that variables which require vertical interpolation 'p' suffix to the 
    #   file header is added
    if gP != 'none':
        SgP = 'p'
        if gP == 'local':
            CFvarnp = CFvarn + ',P,PB,PSFC,PH,PHB,HGT,T,QVAPOR,XLONG,XLAT,Times'
        else:
            CFvarnp = CFvarn
    else:
        SgP = ''
        CFvarnp = CFvarn

    # Getting in working dir
    os.chdir(odir)

    # File to keep track of all operations
    otrackf = open( odir + '/all_statsvars.inf', 'a')

    # Input file
    ifilen = odir + '/' + CFvarn + '_' + headerf + SgP + '.nc'

    # Computing variable statisitcs
    istats=0

    # List of not computed statistics
    wrongstats = []

    # Variables to be kept in the final file
    varkeep = []

    # Mandatory CF variables to be in all files
    varnCFs = ['lon', 'lat', 'pres', 'time']

    opers = Opers.split('+')
    Fopers = ''
    if db: print '    computing statistics of variable:', CFvarn, ' ...'
    for op in opers:
        if op == opers[0]:
            Fopers = op
            prevfile = ifilen
        else:
            Fopers = Fopers + '_' + op
            prevfile = fileon
        fileon = odir + '/' + CFvarn + '_' + headerf + SgP + '_' + Fopers + '.nc'

        if scr:
            sout = sub.call('rm ' + fileon + ' >& /dev/null', shell=True)

        hprevfile = prevfile[0:len(prevfile)-3]
        if not os.path.isfile(fileon):
            if db: print '      ', op, "using '" + prevfile + "' "
        
            if op == 'acc':
                # temporal accumulated values
                print "  " + fname + ": kind '" + op + "' not ready !!"
                wrongstats.appen(CFvarn + '_' + opers)
                break
            elif op == 'direct':
                # no statistics
                sout = sub.call('mv ' + prevfile + ' ' + fileon, shell=True)
            elif op == 'last':
                # last temporal value
                vals='time,-9,0,0'
                with gen.Capturing() as output:
                    ncvar.DataSetSection(vals,prevfile)
                
                sout = sub.call('mv ' + hprevfile + '_time_B-9-E0-I0.nc ' + fileon, \
                  shell=True)

                # Keeping the operations
                pyins=pyH + "/nc_var.py -o DataSetSection -S '" + vals + "' -f " +  \
                  prevfile
                otrackf.write("\n")
                otrackf.write("# " + CFvarn + " " + Fopers + "\n")
                otrackf.write(pyins + "\n")
            elif op =='Lmean':
                # latitudinal mean values
                print "  " + fname + ": kind '" + op + "' not ready !!"
                wrongstats.appen(CFvarn + '_' + opers)
                break
            elif op =='Lsec':
                # latitudinal section (latitudinal value must be given, [var]@[lat]) 
                print "  " + fname + ": kind '" + op + "' not ready !!"
                wrongstats.appen(CFvarn + '_' + opers)
                break
            elif op =='lmean':
                # longitudinal mean values
                print "  " + fname + ": kind '" + op + "' not ready !!"
                wrongstats.appen(CFvarn + '_' + opers)
                break
            elif op =='lsec':
                # longitudinal section (longitudinal value must be given, [var]@[lon])
                print "  " + fname + ": kind '" + op + "' not ready !!"
                wrongstats.appen(CFvarn + '_' + opers)
                break
            elif op == 'pinterp':
                # pinterp: pressure interpolation (to the given $plevels)
                vals=plevels + ',1,1'
                ncvar.pinterp(vals,prevfile,CFvarnp)
                sout = sub.vall('mv pinterp.nc ' + fileon, shell=True)

                # Keeping the operations
                pyins=pyH + "/nc_var.py -o pinterp -S '" + vals + "' -f " +          \
                  prevfile + "-v " + CFvarnp
                otrackf.write("\n")
                otrackf.write("# " + CFvarn + " " + Fopers + "\n")
                otrackf.write(pyins + "\n")

                # adding CF lon,lat,time in WRF files
                if minf.name == 'WRF':
                    values = vdnx + ':' + vdny + ':'+ Tref + ':' + Tunits
                    ncvar.WRF_toCF(values, fileon)

                # vertical interpolation variables are no more needed
                CFvarnp = CFvarn

            elif op == 'tmean':
                # temporal mean values
                vals='time|-1,time,mean,lon:lat:' + vdnz + ':time:pres'
                dims='time@time,' + dnz + '@' + vdnz + ',lat@lat,lon@lon'
                ncvar.file_oper_alongdims(vals,prevfile,CFvarnp)
                sout = sub.call('mv file_oper_alongdims_mean.nc '+fileon, shell=True)

                # Keeping the operations
                pyins=pyH + "/nc_var.py -o file_oper_alongdims -S '" + vals +        \
                  "' -f " + prevfile + " -v " + CFvarnp
                otrackf.write("\n")
                otrackf.write("# " + CFvarn + " " + Fopers + "\n")
                otrackf.write(pyins + "\n")

                varkeep.append(CFvarn + 'mean')
                varkeep.append('timestats')

#            elif op == 'turb':
#                # turbulence values
#                vals='turbulence|'${CFvarn}
#                dims='time@time,'${dnz}'@'${vdnz}',lat@lat,lon@lon,pres@pres'
#                pyout=`python ${pyHOME}/diagnostics.py -d ${dims} -v ${vals} -f ${cfiles}`

#                # Keeping the operations
#                pyins="python "${pyHOME}"/diagnostics.py -d "${dims}" -v '"${vals}
#                pyins=${pyins}"' -f ${cfiles}"
#                echo " " >> ${odir}/all_statsvars.inf
#                echo "# ${CFvarn}" "${vark}" >> ${odir}/all_statsvars.inf
#                echo ${pyins} >> ${odir}/all_statsvars.inf

#                varkeep=':'${CFvarn}'turb'

            elif op == 'xmean':
                # x-axis mean values
                vals='lon|-1,lon,mean,lon:lat:' + vdnz + ':time:pres'
                dims='time@time,' + dnz + '@' + vdnz + ',lat@lat,lon@lon'
                ncvar.file_oper_alongdims(vals,prevfile,CFvarnp)
                sout = sub.call('mv file_oper_alongdims_mean.nc '+fileon, shell=True)

                # Keeping the operations
                pyins=pyH + "/nc_var.py -o file_oper_alongdims -S '" + vals +        \
                  "' -f " + prevfile + " -v " + CFvarnp
                otrackf.write("\n")
                otrackf.write("# " + CFvarn + " " + Fopers + "\n")
                otrackf.write(pyins + "\n")

                varkeep.append(CFvarn + 'mean')
                varkeep.append('lonstats')

            elif op == 'ymean':
                # y-axis mean values
                vals='lat|-1,lat,mean,lon:lat:' + vdnz + ':time:pres'
                dims='time@time,' + dnz + '@' + vdnz + ',lat@lat,lon@lon'
                ncvar.file_oper_alongdims(vals,prevfile,CFvarnp)
                sout = sub.call('mv file_oper_alongdims_mean.nc '+fileon, shell=True)

                # Keeping the operations
                pyins=pyH + "/nc_var.py -o file_oper_alongdims -S '" + vals +        \
                  "' -f " + prevfile + " -v " + CFvarnp
                otrackf.write("\n")
                otrackf.write("# " + CFvarn + " " + Fopers + "\n")
                otrackf.write(pyins + "\n")

                varkeep.append(CFvarn + 'mean')
                varkeep.append('latstats')
            elif op == 'zsum':
                # vertical aggregated values
                print "  " + fname + ": kind '" + op + "' not ready !!"
                wrongstats.appen(CFvarn + '_' + opers)
                break
            else:
                print errmsg
                print '  ' + fname + ": operation '" + op + "' not ready !!"
                quit(-1)

            # End of kind of operation
            if len(varkeep) > 0:
                varkeepS = ',' + ','.join(varkeep)
            else:
                varkeepS = ''

            totalvarkeeps = CFvarnp + ',' + ','.join(varnCFs) + varkeepS
            oclean = ncvar.cleaning_varsfile(totalvarkeeps,fileon)

    # End of operations
    if len(wrongstats) > 1:
        print warnmsg
        print '  ' + fname + ": statisitcs not possible to compute:", wrongstats

    return 

def compute_vars(config, modinf, idir, odir, Files, allvarcomp, fscr, debug):
    """ Function to compute the variables
      config= Configuration of the experiment
      modinf= class with information about the model
      idir= input experiment folder
      odir= output experiment folder
      Files= dictionary with the header and the files' correspondence
      allvarcomp= dictionary with all the variables to compute and their information
      fscr= whether files should be done from the scratch or not
    """
    fname = 'compute_vars'

    for vopn in allvarcomp:
        # variable & operation
        vn = vopn.split('_')[0]
        op = vopn.split('_')[1]
        vopnV = allvarcomp[vopn]
        fheader = vopnV[0]
        model = vopnV[1]
        diag = vopnV[2]
        globalP = vopnV[3]

        if debug:
            print '      ' + vn + ': ' + op

        # Computing CF variable
        if model is not None or diag is not None:
            vinf = VariableInf(vn,fheader,model,diag)
            # Comppute variable
            compute_variable(modinf, idir, Files, odir, vinf, globalP, fscr,         \
              config['pyHOME'], config['CFreftime'], config['CFunitstime'], debug)

            # Compute variable statistics
            compute_statistics(modinf, iwdir, Files, owdir, vinf, config['plevels'], \
              globalP, op, fscr, config['pyHOME'], debug)

        else:
            print errmsg
            print '  ' + fname + ": neither 'model' or 'diag' variables for '" + vn  \
              + "' !!"
            quit(-1)

    return

# Files with information about the configuration of the script
inffiles = ['varcompute.inf', 'all_computevars.inf', 'all_statsvars.inf']

#######    #######
## MAIN
    #######

# Getting configuration from external ASCII file 'model_graphics.dat'
cnf = gen.get_configuration('model_graphics.dat', False)

# scratches
scratch, filescratch, figscratch, addfiles, addfigures, dbg = scratches(cnf)

# Getting models
mods = cnf['models'].split(':')

# Models loop
##
for mod in mods:
    print mod
    # Get experiments and headers of model
    exps, fheaders = exp_headers(mod,cnf)

    # Characteristics of the model
    Modinf = ncvar.model_characteristics(mod,'None','False')
    dnx = Modinf.dimxn
    dny = Modinf.dimyn
    dnz = Modinf.dimzn
    dnt = Modinf.dimtn
    vdnx = Modinf.vardxn
    vdny = Modinf.vardyn
    vdnz = Modinf.vardzn
    vdnt = Modinf.vardtn

    if dbg:
        print '  model characteristics _______'
        print "  dims:", dnx, dny, dnz, dnt
        print "  var dims:", vdnx, vdny, vdnz, vdnt

    moddims = dnx + ',' + dny + ',' + dnz + ',' + dnt
    modvdims = vdnx + ',' + vdny + ',' + vdnz + ',' + vdnt

# Experiments loop
##
    for exp in exps:
        print '  ' + exp + '...'

        # input folder
        iwdir = cnf['ifold'] + '/' + mod + '/' + exp

        # Does input folder exist?
        if not os.path.isdir(iwdir):
            print errmsg
            print "  " + main + ": folder '" + iwdir + "' does not exist !!"
            quit(-1)

        owdir = cnf['ofold'] + '/' + mod + '/' + exp
        sout = sub.call('mkdir -p ' + owdir, shell=True)

        # Need to pass to analyze all the data?
        if filescratch:
            for inff in inffiles:
                if dbg: print "    removing information file '" + inff + "' ..."
                ins = 'rm ' + owdir + '/' + inff + ' >& /dev/null'
                sout = sub.call(ins, shell=True)

            objf = open(owdir+'/all_computevars.inf','w')
            objf.write("## Computation of variables \n")
            objf.close()
            objf = open(owdir+'/all_statsvars.inf','w')
            objf.write("## Computation of statistics \n")
            objf.close()

        if addfiles:
            sub.call('rm ' + owdir +'/varcompute.inf >& /dev/null', shell=True)

        varcompf = owdir + '/varcompute.inf'
        if not os.path.isfile(varcompf):
            # Does input folder has header files?
            ih=1
            # Dictionary with the list of files for each headers
            files = {}
            # Dictionary with a file fromor each headers
            testfiles = {}

            for fh in fheaders:
                if filescratch:
                    ins = 'rm '+ owdir+'/*_' + fh + '*.nc >& /dev/null'
                    sout = sub.call(ins, shell=True)
                    files1h = gen.files_folder(iwdir,fh)
                    if len(files1h) < 1:
                        print errmsg
                        print '  ' + main + ": folder '" + iwdir + "' does not " +   \
                          "contain files '" + fh + "*' !!"
                        quit(-1)
                    files[fh] = files1h
                    testfiles[fh] = files1h[0]

            if dbg:
                print '  Dictionary of files _______'
                gen.printing_dictionary(files)

                print '  Dictionary of test-files _______'
                gen.printing_dictionary(testfiles)
         
            allcompvar, Nvar = compvars_listconstruct(cnf, Modinf, files, testfiles, \
              iwdir, owdir, dbg)
        else: 
            print warnmsg
            print '  ' + main + ": getting variables to compute already from file !!"
            files, testfiles, allcompvar, Nvar = read_varcomp_file(owdir +           \
              '/varcompute.inf')          
        # End of avoiding to repeat all the experiment search

        print "  For experiment '"+exp+"' is required to compute:", Nvar, "variables"

###
# Computing variables
###
        print "    Computing variables ..."
        compute_vars(cnf, Modinf, iwdir, owdir, files, allcompvar, filescratch, dbg)

        quit()

    # end of experiments loop
# end of mods loop

