Changeset 809 in lmdz_wrf


Ignore:
Timestamp:
Jun 7, 2016, 5:17:44 PM (9 years ago)
Author:
lfita
Message:

Adding new functions
Adding 'cE' as the character for spaces
Using 'cV' as character to split multiple values

Location:
trunk/tools
Files:
2 edited

Legend:

Unmodified
Added
Removed
  • trunk/tools/generic.py

    r807 r809  
    1717# Character to split serie of values
    1818cV = '@'
     19# Character for spaces
     20cE = '!'
     21
    1922# List of available operations
    20 operations=['coincident_CFtimes', 'count_cond', 'grid_combinations', 'PolyArea',     \
    21   'radial_points',                                                                   \
     23operations=['coincident_CFtimes', 'count_cond', 'datetimeStr_conversion',            \
     24  'grid_combinations',                                                               \
     25  'interpolate_locs', 'PolyArea',                                                    \
     26  'radial_points', 'radius_dist',                                                    \
    2227  'rmNOnum', 'running_mean',                                                         \
    2328  'significant_decomposition', 'squared_radial',                                     \
    24   'unitsDate', 'wdismean']
     29  'table_tex_file', 'unitsDate', 'wdismean']
    2530
    2631hundredvals = '0'
    27 for i in range(1,100): hundredvals = hundredvals + '@' + str(i)
     32for i in range(1,100): hundredvals = hundredvals + cV + str(i)
    2833
    2934vs100 = '0@1@2@3@4@5@6@7@8@9@10@11@12@13@14@15@16@17@18@19@20@21@22@23@24@25@26@27'
     
    3540## e.g. # generic.py -o 'coincident_CFtimes' -S '0@1@2@3@4@5@6@7@8@9,seconds since 1949-12-01 00:00:00,hours since 1949-12-01 00:00:00'
    3641## e.g. # generic.py -o count_cond -S 0@1@2@3@4@5@6@7@8@9,4,le
     42## e.g. # generic.py -o datetimeStr_conversion -S '1976-02-17_08:32:05,Y-m-d_H:M:S,matYmdHMS'
    3743## e.g. # generic.py -o grid_combinations -S 1,2
     44## e.g. # generic.py -o interpolate_locs -S -1.2@2.4@5.6@7.8@12.0,0.5@2.5,lin
    3845## e.g. # generic.py -o PolyArea -S -0.5@0.5@0.5@-0.5,0.5@0.5@-0.5@-0.5
    3946## e.g. # generic.py -o radial_points -S 0.785398163397,5
     47## e.g. # generic.py -o radius_dist -S 3,5,2,2
    4048## e.g. # generic.py -o rmNOnum -S LMD123IPSL
    41 ## e.g. # generic.py -o significant_decomposition -S 3.576,-2
     49## e.g. # generic.py -o significant_decomposition -S 3.576,-2,0@1@2@3@4@5@6@7@8@9@10@11@12@13@14,a@b@c@d@e,i@ii@iii,table.tex
     50## e.g. # generic.py -o table_tex_file -S '5,3,0@5@10@1@6@11@2@7@12@3@8@13@4@9@14,!@a@b@c@d@e,i@ii@iii,table.tex'
    4251## e.g. # generic.py -o unitsDate -S '19490101000000,19760217082932,second'
    4352## e.g. # generic.py -o running_mean -S 0@1@2@3@4@5@6@7@8@9,10
     
    8291            print gen.coincident_CFtimes.__doc__
    8392            quit(-1)
    84         vals0 = np.array(vals[0].split('@'), dtype=np.float)
     93        vals0 = np.array(vals[0].split(cV), dtype=np.float)
    8594        print gen.coincident_CFtimes(vals0, vals[1], vals[2])
    8695
     
    98107            print gen.count_cond.__doc__
    99108            quit(-1)
    100         vals0 = np.array(vals[0].split('@'), dtype=np.float)
    101         print gen.count_cond(np.array(vals[0].split('@'), dtype=np.float),           \
     109        vals0 = np.array(vals[0].split(cV), dtype=np.float)
     110        print gen.count_cond(np.array(vals[0].split(cV), dtype=np.float),           \
    102111          np.float(vals[1]), vals[2])
    103112
     113elif oper == 'datetimeStr_conversion':
     114    Nvals = 3
     115    vals = opts.values.split(cS)
     116    if vals[0] == 'h':
     117        print gen.datetimeStr_conversion.__doc__
     118        quit(-1)
     119    else:
     120        if len(vals) != Nvals:
     121            print errormsg
     122            print '  ' + main + ": operation '" + oper + "' requires", Nvals, 'and', \
     123              len(vals), ' has passed!!'
     124            print gen.datetimeStr_conversion.__doc__
     125            quit(-1)
     126        print gen.datetimeStr_conversion(vals[0], vals[1], vals[2])
     127
    104128elif oper == 'grid_combinations':
    105129    Nvals = 2
     
    117141
    118142        print gen.grid_combinations(np.int(vals[0]), np.int(vals[1]))
     143
     144elif oper == 'interpolate_locs':
     145    Nvals = 3
     146    vals = opts.values.split(cS)
     147    if vals[0] == 'h':
     148        print gen.interpolate_locs.__doc__
     149        quit(-1)
     150    else:
     151        if len(vals) != Nvals:
     152            print errormsg
     153            print '  ' + main + ": operation '" + oper + "' requires", Nvals, 'and', \
     154              len(vals), ' has passed!!'
     155            print gen.interpolate_locs.__doc__
     156            quit(-1)
     157        vals0 = np.array(vals[0].split(cV), dtype=np.float)
     158        vals1 = np.array(vals[1].split(cV), dtype=np.float)
     159
     160        print gen.interpolate_locs(vals0, vals1, vals[2])
    119161
    120162elif oper == 'PolyArea':
     
    151193        print gen.radial_points(np.float(vals[0]), int(vals[1]))
    152194
     195elif oper == 'radius_dist':
     196    Nvals = 1
     197    vals = opts.values.split(cS)
     198    if vals[0] == 'h':
     199        print gen.radius_dist.__doc__
     200        quit(-1)
     201    else:
     202        if len(vals) != Nvals:
     203            print errormsg
     204            print '  ' + main + ": operation '" + oper + "' requires", Nvals, 'and', \
     205              len(vals), ' has passed!!'
     206            print gen.radius_dist.__doc__
     207            quit(-1)
     208        print gen.radius_dist(int(vals[0]), int(vals[1]), int(vals[2]), int(vals[2]))
     209
    153210elif oper == 'rmNOnum':
    154211    Nvals = 1
     
    179236            print gen.running_mean.__doc__
    180237            quit(-1)
    181         print gen.running_mean(np.array(vals[0].split('@'), dtype=np.float), int(vals[1]))
     238        print gen.running_mean(np.array(vals[0].split(cV), dtype=np.float), int(vals[1]))
    182239
    183240elif oper == 'significant_decomposition':
     
    211268        print gen.squared_radial(int(vals[0]))
    212269
     270elif oper == 'table_tex_file':
     271    Nvals = 6
     272    vals = opts.values.split(cS)
     273    if vals[0] == 'h':
     274        print gen.table_tex_file.__doc__
     275        quit(-1)
     276    else:
     277        if len(vals) != Nvals:
     278            print errormsg
     279            print '  ' + main + ": operation '" + oper + "' requires", Nvals, 'and', \
     280              len(vals), ' has passed!!'
     281            print gen.table_tex_file.__doc__
     282            quit(-1)
     283        vals2 = np.array(vals[2].split(cV),dtype=np.float).reshape(int(vals[0]),     \
     284          int(vals[1]))
     285        vals3 = vals[3].replace(cE,' ').split(cV)
     286        vals4 = vals[4].replace(cE,' ').split(cV)
     287
     288        print gen.table_tex_file(int(vals[0]), int(vals[1]), vals2, vals3, vals4,    \
     289          vals[5])
     290
    213291elif oper == 'unitsDate':
    214292    Nvals = 3
     
    239317            print gen.wdismean.__doc__
    240318            quit(-1)
    241         vals0 = np.array(vals[0].split('@'), dtype=np.float)
    242         vals1 = np.array(vals[1].split('@'), dtype=np.float).reshape(2,2)
     319        vals0 = np.array(vals[0].split(cV), dtype=np.float)
     320        vals1 = np.array(vals[1].split(cV), dtype=np.float).reshape(2,2)
    243321
    244322        print gen.wdismean(vals0, vals1)
  • trunk/tools/generic_tools.py

    r805 r809  
    45284528
    45294529def table_tex(tablevals, colnames, rownames, of):
    4530     """ Function to write into a LaTeX tabukar from a table of values
     4530    """ Function to write into a LaTeX tabular from a table of values
    45314531      tablevals = (ncol nrow) of values
    4532       colnames = list with ncol labels for the columns
     4532      colnames = list with ncol labels for the columns (1 more than data for the row names)
    45334533      rownames = list with nrow labels for the rows
    45344534      of= object ASCII file to write the table
     
    45814581
    45824582    of.write('\\end{tabular}\n')
     4583
     4584    return
     4585
     4586def table_tex_file(Ncol, Nrow, tablevals, colnames, rownames, ofile):
     4587    """ Function to write into a file a LaTeX tabular from a table of values
     4588      tablevals = (ncol nrow) of values
     4589      colnames = list with ncol labels for the columns (1 more than data for the row names)
     4590      rownames = list with nrow labels for the rows
     4591      ofile= ASCII file to write the table
     4592    >>> values = np.arange(15).reshape(5,3)
     4593    >>> colns = ['a','b','c','d','e']
     4594    >>> rowns = ['i','ii','iii']
     4595    >>> table_text_file(5, 3, values, colns, rowns, 'table.tex')
     4596    """
     4597    fname = 'table_tex_file'
     4598
     4599    objf = open(ofile, 'w')
     4600    objf.write('\\documentclass{article}\n')
     4601    objf.write('\n')
     4602    objf.write('\\begin{document}\n')
     4603
     4604    table_tex(tablevals, colnames, rownames, objf)
     4605
     4606    objf.write('\\end{document}\n')
     4607    objf.close()
     4608
     4609    print fname + "': successfull written of '" + ofile + "' !!"
    45834610
    45844611    return
     
    46414668
    46424669    return lonvv, latvv
    4643 
    46444670
    46454671def interpolate_locs(locs,coords,kinterp):
Note: See TracChangeset for help on using the changeset viewer.