[259] | 1 | # -*- coding: iso-8859-15 -*- |
---|
| 2 | # Generic program to transfrom ASCII observational data in columns to a netcdf |
---|
| 3 | # L. Fita, LMD February 2015 |
---|
| 4 | ## e.g. # create_OBSnetcdf.py -c '#' -d ACAR/description.dat -e space -f ACAR/2012/10/ACAR_121018.asc -g true -t 19491201000000,seconds -k trajectory |
---|
| 5 | |
---|
| 6 | import numpy as np |
---|
| 7 | from netCDF4 import Dataset as NetCDFFile |
---|
| 8 | import os |
---|
| 9 | import re |
---|
| 10 | from optparse import OptionParser |
---|
| 11 | |
---|
| 12 | # version |
---|
[1950] | 13 | version=1.2 |
---|
[259] | 14 | |
---|
| 15 | # Filling values for floats, integer and string |
---|
| 16 | fillValueF = 1.e20 |
---|
| 17 | fillValueI = -99999 |
---|
| 18 | fillValueS = '---' |
---|
| 19 | |
---|
| 20 | # Length of the string variables |
---|
| 21 | StringLength = 200 |
---|
| 22 | |
---|
| 23 | # Size of the map for the complementary variables/maps |
---|
| 24 | Ndim2D = 100 |
---|
| 25 | |
---|
| 26 | main = 'create_OBSnetcdf.py' |
---|
| 27 | errormsg = 'ERROR -- error -- ERROR -- error' |
---|
| 28 | warnmsg = 'WARNING -- warning -- WARNING -- warning' |
---|
| 29 | |
---|
| 30 | fillValue = 1.e20 |
---|
| 31 | |
---|
| 32 | def searchInlist(listname, nameFind): |
---|
| 33 | """ Function to search a value within a list |
---|
| 34 | listname = list |
---|
| 35 | nameFind = value to find |
---|
| 36 | >>> searInlist(['1', '2', '3', '5'], '5') |
---|
| 37 | True |
---|
| 38 | """ |
---|
| 39 | for x in listname: |
---|
| 40 | if x == nameFind: |
---|
| 41 | return True |
---|
| 42 | return False |
---|
| 43 | |
---|
[1948] | 44 | |
---|
| 45 | def typemod(value, typeval): |
---|
| 46 | """ Function to give back a value in a given dtype |
---|
| 47 | >>> print(typemod(8.2223, 'np.float64')) |
---|
| 48 | <type 'numpy.float64'> |
---|
| 49 | >>> print(typemod(8.2223, 'tuple')) |
---|
| 50 | <type 'tuple'> |
---|
| 51 | """ |
---|
| 52 | fname='typemod' |
---|
| 53 | |
---|
| 54 | if typeval == 'int' or typeval == 'I': |
---|
| 55 | return int(value) |
---|
| 56 | elif typeval == 'long': |
---|
| 57 | return long(value) |
---|
| 58 | elif typeval == 'float' or typeval == 'F' or typeval == 'R': |
---|
| 59 | return float(value) |
---|
| 60 | elif typeval == 'complex': |
---|
| 61 | return complex(value) |
---|
| 62 | elif typeval == 'str' or typeval == 'S': |
---|
| 63 | return str(value) |
---|
| 64 | elif typeval == 'bool': |
---|
| 65 | return bool(value) |
---|
| 66 | elif typeval == 'B': |
---|
| 67 | return Str_Bool(value) |
---|
| 68 | elif typeval == 'list': |
---|
| 69 | newval = [] |
---|
| 70 | newval.append(value) |
---|
| 71 | return newval |
---|
| 72 | elif typeval == 'dic': |
---|
| 73 | newval = {} |
---|
| 74 | newval[value] = value |
---|
| 75 | return newval |
---|
| 76 | elif typeval == 'tuple': |
---|
| 77 | newv = [] |
---|
| 78 | newv.append(value) |
---|
| 79 | newval = tuple(newv) |
---|
| 80 | return newval |
---|
| 81 | elif typeval == 'np.int8': |
---|
| 82 | return np.int8(value) |
---|
| 83 | elif typeval == 'np.int16': |
---|
| 84 | return np.int16(value) |
---|
| 85 | elif typeval == 'np.int32': |
---|
| 86 | return np.int32(value) |
---|
| 87 | elif typeval == 'np.int64': |
---|
| 88 | return np.int64(value) |
---|
| 89 | elif typeval == 'np.uint8': |
---|
| 90 | return np.uint8(value) |
---|
| 91 | elif typeval == 'np.uint16': |
---|
| 92 | return np.uint16(value) |
---|
| 93 | elif typeval == 'np.np.uint32': |
---|
| 94 | return np.uint32(value) |
---|
| 95 | elif typeval == 'np.uint64': |
---|
| 96 | return np.uint64(value) |
---|
| 97 | elif typeval == 'np.float' or typeval == 'R': |
---|
| 98 | return np.float(value) |
---|
| 99 | elif typeval == 'np.float16': |
---|
| 100 | return np.float16(value) |
---|
| 101 | elif typeval == 'np.float32': |
---|
| 102 | return np.float32(value) |
---|
| 103 | elif typeval == 'float32': |
---|
| 104 | return np.float32(value) |
---|
| 105 | elif typeval == 'np.float64' or typeval == 'D': |
---|
| 106 | return np.float64(value) |
---|
| 107 | elif typeval == 'float64': |
---|
| 108 | return np.float64(value) |
---|
| 109 | elif typeval == 'np.complex': |
---|
| 110 | return np.complex(value) |
---|
| 111 | elif typeval == 'np.complex64': |
---|
| 112 | return np.complex64(value) |
---|
| 113 | elif typeval == 'np.complex128': |
---|
| 114 | return np.complex128(value) |
---|
| 115 | else: |
---|
| 116 | print errormsg |
---|
| 117 | print fname + ': data type "' + typeval + '" is not ready !' |
---|
| 118 | print errormsg |
---|
| 119 | quit(-1) |
---|
| 120 | |
---|
| 121 | return |
---|
| 122 | |
---|
| 123 | def str_list_k(string, cdiv, kind): |
---|
| 124 | """ Function to obtain a list of types of values from a string giving a split character |
---|
| 125 | string= String from which to obtain a list ('None' for None) |
---|
| 126 | cdiv= character to use to split the string |
---|
| 127 | kind= kind of desired value (as string like: 'np.float', 'int', 'np.float64', ....) |
---|
| 128 | >>> str_list_k('1:@#:$:56', ':', 'S') |
---|
| 129 | ['1', '@#', '$', '56'] |
---|
| 130 | >>> str_list_k('1:3.4:12.3', ':', 'np.float64') |
---|
| 131 | [1.0, 3.3999999999999999, 12.300000000000001] |
---|
| 132 | """ |
---|
| 133 | fname = 'str_list' |
---|
| 134 | |
---|
| 135 | if string.find(cdiv) != -1: |
---|
| 136 | listv = string.split(cdiv) |
---|
| 137 | else: |
---|
| 138 | if string == 'None': |
---|
| 139 | listv = None |
---|
| 140 | else: |
---|
| 141 | listv = [string] |
---|
| 142 | |
---|
| 143 | if listv is not None: |
---|
| 144 | finalist = [] |
---|
| 145 | for lv in listv: |
---|
| 146 | finalist.append(typemod(lv, kind)) |
---|
| 147 | else: |
---|
| 148 | finalist = None |
---|
| 149 | |
---|
| 150 | return finalist |
---|
| 151 | |
---|
| 152 | |
---|
[259] | 153 | def set_attribute(ncvar, attrname, attrvalue): |
---|
| 154 | """ Sets a value of an attribute of a netCDF variable. Removes previous attribute value if exists |
---|
| 155 | ncvar = object netcdf variable |
---|
| 156 | attrname = name of the attribute |
---|
| 157 | attrvalue = value of the attribute |
---|
| 158 | """ |
---|
| 159 | import numpy as np |
---|
| 160 | from netCDF4 import Dataset as NetCDFFile |
---|
| 161 | |
---|
| 162 | attvar = ncvar.ncattrs() |
---|
| 163 | if searchInlist(attvar, attrname): |
---|
| 164 | attr = ncvar.delncattr(attrname) |
---|
| 165 | |
---|
| 166 | attr = ncvar.setncattr(attrname, attrvalue) |
---|
| 167 | |
---|
| 168 | return ncvar |
---|
| 169 | |
---|
[1952] | 170 | def set_attributek(ncv, attrname, attrval, attrkind): |
---|
| 171 | """ Sets a value of an attribute of a netCDF variable with a kind. Removes previous attribute value if exists |
---|
| 172 | ncvar = object netcdf variable |
---|
| 173 | attrname = name of the attribute |
---|
| 174 | attrvalue = value of the attribute |
---|
| 175 | atrtrkind = kind of attribute: 'S', string ('!' as spaces); 'U', unicode ('!' as spaces); 'I', integer; |
---|
| 176 | 'Inp32', numpy integer 32; 'R', ot 'F' real; ' npfloat', np.float; 'D', np.float64 |
---|
| 177 | """ |
---|
| 178 | fname = 'set_attributek' |
---|
| 179 | validk = {'S': 'string', 'U': 'unicode', 'I': 'integer', \ |
---|
| 180 | 'Inp32': 'integer long (np.int32)', 'F': 'float', 'R': 'float', \ |
---|
| 181 | 'npfloat': 'np.float', 'D': 'float long (np.float64)'} |
---|
| 182 | |
---|
| 183 | if type(attrkind) == type('s'): |
---|
| 184 | if attrkind == 'S': |
---|
| 185 | attrvalue = str(attrval.replace('!', ' ')) |
---|
| 186 | elif attrkind == 'U': |
---|
| 187 | attrvalue = unicode(attrval.replace('!',' ')) |
---|
| 188 | elif attrkind == 'I': |
---|
| 189 | attrvalue = np.int(attrval) |
---|
| 190 | elif attrkind == 'R' or attrkind == 'F' : |
---|
| 191 | attrvalue = float(attrval) |
---|
| 192 | elif attrkind == 'npfloat': |
---|
| 193 | attrvalue = np.float(attrval) |
---|
| 194 | elif attrkind == 'D': |
---|
| 195 | attrvalue = np.float64(attrval) |
---|
| 196 | else: |
---|
| 197 | print errormsg |
---|
| 198 | print ' ' + fname + ": '" + attrkind + "' kind of attribute is not ready!" |
---|
| 199 | print ' valid values: _______' |
---|
| 200 | for key in validk.keys(): |
---|
| 201 | print ' ', key,':', validk[key] |
---|
| 202 | quit(-1) |
---|
| 203 | else: |
---|
| 204 | if attrkind == type(str('a')): |
---|
| 205 | attrvalue = str(attrval.replace('!', ' ')) |
---|
| 206 | elif attrkind == type(unicode('a')): |
---|
| 207 | attrvalue = unicode(attrval.replace('!',' ')) |
---|
| 208 | elif attrkind == type(np.int(1)): |
---|
| 209 | attrvalue = np.int(attrval) |
---|
| 210 | elif attrkind == np.dtype('i'): |
---|
| 211 | attrvalue = np.int32(attrval) |
---|
| 212 | elif attrkind == type(float(1.)): |
---|
| 213 | attrvalue = float(attrval) |
---|
| 214 | elif attrkind == type(np.float(1.)): |
---|
| 215 | attrvalue = np.float(attrval) |
---|
| 216 | elif attrkind == np.dtype('float32'): |
---|
| 217 | attrvalue = np.array(attrval, dtype='f') |
---|
| 218 | elif attrkind == type(np.float32(1.)): |
---|
| 219 | attrvalue = np.float32(attrval) |
---|
| 220 | elif attrkind == type(np.float64(1.)): |
---|
| 221 | attrvalue = np.float64(attrval) |
---|
| 222 | elif attrkind == type(np.array([1.,2.])): |
---|
| 223 | attrvalue = attrval |
---|
| 224 | else: |
---|
| 225 | print errormsg |
---|
| 226 | print ' ' + fname + ": '" + attrkind + "' kind of attribute is not ready!" |
---|
| 227 | print ' valid values: _______' |
---|
| 228 | for key in validk.keys(): |
---|
| 229 | print ' ', key,':', validk[key] |
---|
| 230 | quit(-1) |
---|
| 231 | |
---|
| 232 | attvar = ncv.ncattrs() |
---|
| 233 | if searchInlist(attvar, attrname): |
---|
| 234 | attr = ncv.delncattr(attrname) |
---|
| 235 | |
---|
| 236 | if attrname == 'original_subroutines_author': |
---|
| 237 | attrvalue = 'Cindy Bruyere' |
---|
| 238 | attr = ncv.setncattr(attrname, attrvalue) |
---|
| 239 | |
---|
| 240 | return attr |
---|
| 241 | |
---|
| 242 | |
---|
[259] | 243 | def basicvardef(varobj, vstname, vlname, vunits): |
---|
| 244 | """ Function to give the basic attributes to a variable |
---|
| 245 | varobj= netCDF variable object |
---|
| 246 | vstname= standard name of the variable |
---|
| 247 | vlname= long name of the variable |
---|
| 248 | vunits= units of the variable |
---|
| 249 | """ |
---|
| 250 | attr = varobj.setncattr('standard_name', vstname) |
---|
| 251 | attr = varobj.setncattr('long_name', vlname) |
---|
| 252 | attr = varobj.setncattr('units', vunits) |
---|
| 253 | |
---|
[332] | 254 | return |
---|
| 255 | |
---|
[259] | 256 | def remove_NONascii(string): |
---|
| 257 | """ Function to remove that characters which are not in the standard 127 ASCII |
---|
| 258 | string= string to transform |
---|
| 259 | >>> remove_NONascii('LluÃs') |
---|
| 260 | Lluis |
---|
| 261 | """ |
---|
| 262 | fname = 'remove_NONascii' |
---|
| 263 | |
---|
| 264 | newstring = string |
---|
| 265 | |
---|
| 266 | RTFchar= ['á', 'é', 'Ã', 'ó', 'ú', 'à ', 'Ú', 'ì', 'ò', 'ù', 'â', 'ê', 'î', 'ÃŽ', \ |
---|
| 267 | 'û', 'À', 'ë', 'ï', 'ö', 'ÃŒ', 'ç', 'ñ','Ê', 'Å', 'Ã', 'Ã', 'Ã', 'Ã', 'Ã', 'Ã', \ |
---|
| 268 | 'Ã', 'Ã', 'Ã', 'Ã', 'Ã', 'Ã', 'Ã', 'Ã', 'Ã', 'Ã', 'Ã', 'Ã', 'Ã', 'Ã', 'Ã', 'Ã',\ |
---|
| 269 | 'Ã', 'Å', '\n', '\t'] |
---|
| 270 | ASCchar= ['a', 'e', 'i', 'o', 'u', 'a', 'e', 'i', 'o', 'u', 'a', 'e', 'i', 'o', \ |
---|
| 271 | 'u', 'a', 'e', 'i', 'o', 'u', 'c', 'n','ae', 'oe', 'A', 'E', 'I', 'O', 'U', 'A', \ |
---|
| 272 | 'E', 'I', 'O', 'U', 'A', 'E', 'I', 'O', 'U', 'A', 'E', 'I', 'O', 'U', 'C', 'N',\ |
---|
| 273 | 'AE', 'OE', '', ' '] |
---|
| 274 | |
---|
| 275 | Nchars = len(RTFchar) |
---|
| 276 | for ichar in range(Nchars): |
---|
| 277 | foundchar = string.find(RTFchar[ichar]) |
---|
[332] | 278 | if foundchar != -1: |
---|
[259] | 279 | newstring = newstring.replace(RTFchar[ichar], ASCchar[ichar]) |
---|
| 280 | |
---|
| 281 | return newstring |
---|
| 282 | |
---|
| 283 | def read_description(fdobs, dbg): |
---|
| 284 | """ reads the description file of the observational data-set |
---|
| 285 | fdobs= descriptive observational data-set |
---|
| 286 | dbg= boolean argument for debugging |
---|
| 287 | * Each station should have a 'description.dat' file with: |
---|
| 288 | institution=Institution who creates the data |
---|
| 289 | department=Department within the institution |
---|
| 290 | scientists=names of the data producers |
---|
| 291 | contact=contact of the data producers |
---|
| 292 | description=description of the observations |
---|
| 293 | acknowledgement=sentence of acknowlegement |
---|
| 294 | comment=comment for the measurements |
---|
| 295 | |
---|
| 296 | MissingValue='|' list of ASCII values for missing values within the data |
---|
[553] | 297 | (as they appear!, 'empty' for no value at all) |
---|
[259] | 298 | comment=comments |
---|
| 299 | |
---|
| 300 | varN='|' list of variable names |
---|
| 301 | varLN='|' list of long variable names |
---|
| 302 | varU='|' list units of the variables |
---|
| 303 | varBUFR='|' list BUFR code of the variables |
---|
| 304 | varTYPE='|' list of variable types ('D', 'F', 'I', 'I64', 'S') |
---|
[553] | 305 | varOPER='|' list of operations to do to the variables to meet their units ([oper],[val]) |
---|
[587] | 306 | [oper]: |
---|
| 307 | -, nothing |
---|
| 308 | sumc, add [val] |
---|
| 309 | subc, rest [val] |
---|
| 310 | mulc, multiply by [val] |
---|
| 311 | divc, divide by [val] |
---|
| 312 | rmchar,[val],[pos], remove [val] characters from [pos]='B', beginning, 'E', end |
---|
[259] | 313 | NAMElon=name of the variable with the longitude (x position) |
---|
| 314 | NAMElat=name of the variable with the latitude (y position) |
---|
| 315 | NAMEheight=ind_alt |
---|
| 316 | NAMEtime=name of the varibale with the time |
---|
| 317 | FMTtime=format of the time (as in 'C', 'CFtime' for already CF-like time) |
---|
| 318 | |
---|
| 319 | """ |
---|
| 320 | fname = 'read_description' |
---|
| 321 | |
---|
| 322 | descobj = open(fdobs, 'r') |
---|
| 323 | desc = {} |
---|
| 324 | |
---|
| 325 | namevalues = [] |
---|
| 326 | |
---|
| 327 | for line in descobj: |
---|
| 328 | if line[0:1] != '#' and len(line) > 1 : |
---|
| 329 | descn = remove_NONascii(line.split('=')[0]) |
---|
| 330 | descv = remove_NONascii(line.split('=')[1]) |
---|
| 331 | namevalues.append(descn) |
---|
| 332 | if descn[0:3] != 'var': |
---|
| 333 | if descn != 'MissingValue': |
---|
| 334 | desc[descn] = descv |
---|
| 335 | else: |
---|
| 336 | desc[descn] = [] |
---|
| 337 | for dn in descv.split('|'): desc[descn].append(dn) |
---|
| 338 | print ' ' + fname + ': missing values found:',desc[descn] |
---|
| 339 | elif descn[0:3] == 'var': |
---|
| 340 | desc[descn] = descv.split('|') |
---|
| 341 | elif descn[0:4] == 'NAME': |
---|
| 342 | desc[descn] = descv |
---|
| 343 | elif descn[0:3] == 'FMT': |
---|
| 344 | desc[descn] = descv |
---|
[553] | 345 | if not desc.has_key('varOPER'): desc['varOPER'] = None |
---|
[259] | 346 | |
---|
| 347 | if dbg: |
---|
| 348 | Nvars = len(desc['varN']) |
---|
| 349 | print ' ' + fname + ": description content of '" + fdobs + "'______________" |
---|
| 350 | for varn in namevalues: |
---|
| 351 | if varn[0:3] != 'var': |
---|
| 352 | print ' ' + varn + ':',desc[varn] |
---|
| 353 | elif varn == 'varN': |
---|
| 354 | print ' * Variables:' |
---|
| 355 | for ivar in range(Nvars): |
---|
| 356 | varname = desc['varN'][ivar] |
---|
| 357 | varLname = desc['varLN'][ivar] |
---|
| 358 | varunits = desc['varU'][ivar] |
---|
| 359 | if desc.has_key('varBUFR'): |
---|
[553] | 360 | varbufr = desc['varBUFR'][ivar] |
---|
[259] | 361 | else: |
---|
[553] | 362 | varbufr = None |
---|
| 363 | if desc['varOPER'] is not None: |
---|
| 364 | opv = desc['varOPER'][ivar] |
---|
| 365 | else: |
---|
| 366 | opv = None |
---|
| 367 | print ' ', ivar, varname+':',varLname,'[',varunits, \ |
---|
| 368 | ']','bufr code:',varbufr,'oper:',opv |
---|
[259] | 369 | |
---|
| 370 | descobj.close() |
---|
| 371 | |
---|
| 372 | return desc |
---|
| 373 | |
---|
[553] | 374 | def value_fmt(val, miss, op, fmt): |
---|
[259] | 375 | """ Function to transform an ASCII value to a given format |
---|
| 376 | val= value to transform |
---|
| 377 | miss= list of possible missing values |
---|
[553] | 378 | op= operation to perform to the value |
---|
[259] | 379 | fmt= format to take: |
---|
| 380 | 'D': float double precission |
---|
| 381 | 'F': float |
---|
| 382 | 'I': integer |
---|
| 383 | 'I64': 64-bits integer |
---|
| 384 | 'S': string |
---|
| 385 | >>> value_fmt('9876.12', '-999', 'F') |
---|
| 386 | 9876.12 |
---|
| 387 | """ |
---|
| 388 | |
---|
| 389 | fname = 'value_fmt' |
---|
| 390 | |
---|
[587] | 391 | aopers = ['sumc','subc','mulc','divc', 'rmchar'] |
---|
[553] | 392 | |
---|
[259] | 393 | fmts = ['D', 'F', 'I', 'I64', 'S'] |
---|
| 394 | Nfmts = len(fmts) |
---|
| 395 | |
---|
| 396 | if not searchInlist(miss,val): |
---|
[553] | 397 | if searchInlist(miss,'empty') and len(val) == 0: |
---|
| 398 | newval = None |
---|
[259] | 399 | else: |
---|
[553] | 400 | if op != '-': |
---|
| 401 | opern = op.split(',')[0] |
---|
[587] | 402 | operv = op.split(',')[1] |
---|
[553] | 403 | |
---|
| 404 | if not searchInlist(aopers,opern): |
---|
| 405 | print errormsg |
---|
| 406 | print ' ' + fname + ": operation '" + opern + "' not ready!!" |
---|
| 407 | print ' availables:',aopers |
---|
| 408 | quit(-1) |
---|
| 409 | else: |
---|
| 410 | opern = 'sumc' |
---|
[1948] | 411 | operv = '0' |
---|
[553] | 412 | |
---|
| 413 | if not searchInlist(fmts, fmt): |
---|
| 414 | print errormsg |
---|
| 415 | print ' ' + fname + ": format '" + fmt + "' not ready !!" |
---|
| 416 | quit(-1) |
---|
| 417 | else: |
---|
| 418 | if fmt == 'D': |
---|
| 419 | opv = np.float32(operv) |
---|
| 420 | if opern == 'sumc': newval = np.float32(val) + opv |
---|
| 421 | elif opern == 'subc': newval = np.float32(val) - opv |
---|
| 422 | elif opern == 'mulc': newval = np.float32(val) * opv |
---|
| 423 | elif opern == 'divc': newval = np.float32(val) / opv |
---|
[587] | 424 | elif opern == 'rmchar': |
---|
| 425 | opv = int(operv) |
---|
| 426 | Lval = len(val) |
---|
| 427 | if op.split(',')[2] == 'B': |
---|
| 428 | newval = np.float32(val[opv:Lval+1]) |
---|
| 429 | elif op.split(',')[2] == 'E': |
---|
| 430 | newval = np.float32(val[Lval-opv:Lval]) |
---|
| 431 | else: |
---|
| 432 | print errormsg |
---|
| 433 | print ' ' + fname + ": operation '" + opern + "' not " +\ |
---|
| 434 | " work with '" + op.split(',')[2] + "' !!" |
---|
| 435 | quit(-1) |
---|
[553] | 436 | elif fmt == 'F': |
---|
| 437 | opv = np.float(operv) |
---|
| 438 | if opern == 'sumc': newval = np.float(val) + opv |
---|
| 439 | elif opern == 'subc': newval = np.float(val) - opv |
---|
| 440 | elif opern == 'mulc': newval = np.float(val) * opv |
---|
| 441 | elif opern == 'divc': newval = np.float(val) / opv |
---|
[587] | 442 | elif opern == 'rmchar': |
---|
| 443 | opv = int(operv) |
---|
| 444 | Lval = len(val) |
---|
| 445 | if op.split(',')[2] == 'B': |
---|
| 446 | newval = np.float(val[opv:Lval+1]) |
---|
| 447 | elif op.split(',')[2] == 'E': |
---|
| 448 | newval = np.float(val[0:Lval-opv]) |
---|
| 449 | else: |
---|
| 450 | print errormsg |
---|
| 451 | print ' ' + fname + ": operation '" + opern + "' not " +\ |
---|
| 452 | " work with '" + op.split(',')[2] + "' !!" |
---|
| 453 | quit(-1) |
---|
| 454 | |
---|
[553] | 455 | elif fmt == 'I': |
---|
| 456 | opv = int(operv) |
---|
| 457 | if opern == 'sumc': newval = int(val) + opv |
---|
| 458 | elif opern == 'subc': newval = int(val) - opv |
---|
| 459 | elif opern == 'mulc': newval = int(val) * opv |
---|
| 460 | elif opern == 'divc': newval = int(val) / opv |
---|
[587] | 461 | elif opern == 'rmchar': |
---|
| 462 | opv = int(operv) |
---|
| 463 | Lval = len(val) |
---|
| 464 | if op.split(',')[2] == 'B': |
---|
| 465 | newval = int(val[opv:Lval+1]) |
---|
| 466 | elif op.split(',')[2] == 'E': |
---|
| 467 | newval = int(val[0:Lval-opv]) |
---|
| 468 | else: |
---|
| 469 | print errormsg |
---|
| 470 | print ' ' + fname + ": operation '" + opern + "' not " +\ |
---|
| 471 | " work with '" + op.split(',')[2] + "' !!" |
---|
| 472 | quit(-1) |
---|
[553] | 473 | elif fmt == 'I64': |
---|
| 474 | opv = np.int64(operv) |
---|
| 475 | if opern == 'sumc': newval = np.int64(val) + opv |
---|
| 476 | elif opern == 'subc': newval = np.int64(val) - opv |
---|
| 477 | elif opern == 'mulc': newval = np.int64(val) * opv |
---|
| 478 | elif opern == 'divc': newval = np.int64(val) / opv |
---|
[587] | 479 | elif opern == 'rmchar': |
---|
| 480 | opv = int(operv) |
---|
| 481 | Lval = len(val) |
---|
| 482 | if op.split(',')[2] == 'B': |
---|
| 483 | newval = np.int64(val[opv:Lval+1]) |
---|
| 484 | elif op.split(',')[2] == 'E': |
---|
| 485 | newval = np.int64(val[0:Lval-opv]) |
---|
| 486 | else: |
---|
| 487 | print errormsg |
---|
| 488 | print ' ' + fname + ": operation '" + opern + "' not " +\ |
---|
| 489 | " work with '" + op.split(',')[2] + "' !!" |
---|
| 490 | quit(-1) |
---|
[553] | 491 | elif fmt == 'S': |
---|
[587] | 492 | if opern == 'rmchar': |
---|
| 493 | opv = int(operv) |
---|
| 494 | Lval = len(val) |
---|
| 495 | if op.split(',')[2] == 'B': |
---|
| 496 | newval = val[opv:Lval+1] |
---|
| 497 | elif op.split(',')[2] == 'E': |
---|
| 498 | newval = val[0:Lval-opv] |
---|
| 499 | else: |
---|
| 500 | print errormsg |
---|
| 501 | print ' ' + fname + ": operation '" + opern + "' not " +\ |
---|
| 502 | " work with '" + op.split(',')[2] + "' !!" |
---|
| 503 | quit(-1) |
---|
| 504 | else: |
---|
| 505 | newval = val |
---|
| 506 | |
---|
[259] | 507 | else: |
---|
| 508 | newval = None |
---|
| 509 | |
---|
| 510 | return newval |
---|
| 511 | |
---|
[1948] | 512 | def getting_fixedline(line, cuts, types, dbg=False): |
---|
| 513 | """ Function to get the values from a line of text with fixed lenght of different values |
---|
| 514 | line: line with values |
---|
| 515 | cuts: character number where a value ends |
---|
| 516 | types: consecutive type of values |
---|
| 517 | 'I': integer |
---|
| 518 | 'R': real |
---|
| 519 | 'D': float64 |
---|
| 520 | 'S': string |
---|
| 521 | 'B': boolean |
---|
| 522 | dbg: debug mode (default False) |
---|
| 523 | >>> Sline=' 87007 03012015 25.6 6.4 9.4 5 15' |
---|
| 524 | >>> getting_fixedline(Sline, [8, 17, 23, 29, 36, 40, 45, 50], ['I', 'R', 'R', 'R', 'I', 'R', 'R', 'R']) |
---|
| 525 | [87007, 3012015.0, 25.6, 6.4, -99999, -99999, 9.4, 5.0, 15.0] |
---|
| 526 | """ |
---|
| 527 | fname = 'getting_fixedline' |
---|
| 528 | |
---|
| 529 | if len(cuts) + 1 != len(types): |
---|
| 530 | print errormsg |
---|
| 531 | print ' ' + fname + ': The number of types :', len(types), 'must be +1', \ |
---|
| 532 | 'number of cuts:', len(cuts) |
---|
| 533 | quit(-1) |
---|
| 534 | |
---|
| 535 | values = [] |
---|
| 536 | val = line[0:cuts[0]] |
---|
| 537 | if len(val.replace(' ','')) >= 1: |
---|
| 538 | values.append(typemod(val, types[0])) |
---|
| 539 | else: |
---|
| 540 | if types[0] == 'I': values.append(fillValueI) |
---|
| 541 | elif types[0] == 'R': values.append(fillValueF) |
---|
| 542 | elif types[0] == 'D': values.append(fillValueF) |
---|
| 543 | elif types[0] == 'S': values.append(fillValueS) |
---|
| 544 | elif types[0] == 'B': values.append(fillValueB) |
---|
| 545 | |
---|
| 546 | Ncuts = len(cuts) |
---|
| 547 | for ic in range(1,Ncuts): |
---|
| 548 | val = line[cuts[ic-1]:cuts[ic]] |
---|
| 549 | if dbg: print ic, ':', val, '-->', types[ic] |
---|
| 550 | if len(val.replace(' ','')) >= 1: |
---|
| 551 | values.append(typemod(val, types[ic])) |
---|
| 552 | else: |
---|
| 553 | if types[ic] == 'I': values.append(fillValueI) |
---|
| 554 | elif types[ic] == 'R': values.append(fillValueF) |
---|
| 555 | elif types[ic] == 'D': values.append(fillValueF) |
---|
| 556 | elif types[ic] == 'S': values.append(fillValueS) |
---|
| 557 | elif types[ic] == 'B': values.append(fillValueB) |
---|
| 558 | |
---|
| 559 | # Last value |
---|
| 560 | Lline = len(line) |
---|
| 561 | val = line[cuts[Ncuts-1]:Lline] |
---|
| 562 | if len(val.replace(' ','')) >= 1: |
---|
| 563 | values.append(typemod(val, types[Ncuts])) |
---|
| 564 | else: |
---|
| 565 | if types[Ncuts] == 'I': values.append(fillValueI) |
---|
| 566 | elif types[Ncuts] == 'R': values.append(fillValueF) |
---|
| 567 | elif types[Ncuts] == 'D': values.append(fillValueF) |
---|
| 568 | elif types[Ncuts] == 'S': values.append(fillValueS) |
---|
| 569 | elif types[Ncuts] == 'B': values.append(fillValueB) |
---|
| 570 | |
---|
| 571 | return values |
---|
| 572 | |
---|
| 573 | |
---|
| 574 | def getting_fixedline_NOk(line, cuts, miss, dbg=False): |
---|
| 575 | """ Function to get the values from a line of text with fixed lenght of |
---|
| 576 | different values without types |
---|
| 577 | line: line with values |
---|
| 578 | cuts: character number where a value ends |
---|
| 579 | miss: character form issed values |
---|
| 580 | dbg: debug mode (default False) |
---|
| 581 | >>> Sline=' 87007 03012015 25.6 6.4 9.4 5 15' |
---|
| 582 | >>> getting_fixedline_NOk(Sline, [8, 17, 23, 29, 36, 40, 45, 50], '#') |
---|
| 583 | ['87007', '03012015', '25.6', '6.4', '#', '#', '9.4', '5', '15'] |
---|
| 584 | """ |
---|
| 585 | fname = 'getting_fixedline_NOk' |
---|
| 586 | |
---|
| 587 | values = [] |
---|
| 588 | val = line[0:cuts[0]] |
---|
| 589 | if len(val.replace(' ','')) >= 1: |
---|
| 590 | values.append(val.replace(' ','')) |
---|
| 591 | else: |
---|
| 592 | values.append(miss) |
---|
| 593 | |
---|
| 594 | Ncuts = len(cuts) |
---|
| 595 | for ic in range(1,Ncuts): |
---|
| 596 | val = line[cuts[ic-1]:cuts[ic]] |
---|
| 597 | if dbg: print ic, ':', val |
---|
| 598 | if len(val.replace(' ','')) >= 1: |
---|
| 599 | values.append(val.replace(' ','')) |
---|
| 600 | else: |
---|
| 601 | values.append(miss) |
---|
| 602 | |
---|
| 603 | # Last value |
---|
| 604 | Lline = len(line) |
---|
| 605 | val = line[cuts[Ncuts-1]:Lline] |
---|
| 606 | if len(val.replace(' ','')) >= 1: |
---|
| 607 | values.append(val.replace(' ','')) |
---|
| 608 | else: |
---|
| 609 | values.append(miss) |
---|
| 610 | |
---|
| 611 | return values |
---|
| 612 | |
---|
[553] | 613 | def read_datavalues(dataf, comchar, colchar, fmt, oper, miss, varns, dbg): |
---|
[259] | 614 | """ Function to read from an ASCII file values in column |
---|
| 615 | dataf= data file |
---|
| 616 | comchar= list of the characters indicating comment in the file |
---|
| 617 | colchar= character which indicate end of value in the column |
---|
| 618 | dbg= debug mode or not |
---|
| 619 | fmt= list of kind of values to be found |
---|
[553] | 620 | oper= list of operations to perform |
---|
[259] | 621 | miss= missing value |
---|
| 622 | varns= list of name of the variables to find |
---|
| 623 | """ |
---|
| 624 | |
---|
| 625 | fname = 'read_datavalues' |
---|
| 626 | |
---|
| 627 | ofile = open(dataf, 'r') |
---|
| 628 | Nvals = len(fmt) |
---|
| 629 | |
---|
[553] | 630 | if oper is None: |
---|
| 631 | opers = [] |
---|
| 632 | for ioper in range(Nvals): |
---|
| 633 | opers.append('-') |
---|
| 634 | else: |
---|
| 635 | opers = oper |
---|
| 636 | |
---|
[259] | 637 | finalvalues = {} |
---|
| 638 | |
---|
| 639 | iline = 0 |
---|
| 640 | for line in ofile: |
---|
| 641 | line = line.replace('\n','').replace(chr(13),'') |
---|
| 642 | if not searchInlist(comchar,line[0:1]) and len(line) > 1: |
---|
[1948] | 643 | # Getting values |
---|
| 644 | if colchar[0:4] != 'None': |
---|
| 645 | values0 = line.split(colchar) |
---|
| 646 | else: |
---|
| 647 | ltypes=str_list_k(colchar[5:len(colchar)+1],',','I') |
---|
| 648 | values0 = getting_fixedline_NOk(line, ltypes, miss[0], dbg=dbg) |
---|
[259] | 649 | # Removing no-value columns |
---|
| 650 | values = [] |
---|
| 651 | for iv in values0: |
---|
| 652 | if len(iv) > 0: values.append(iv) |
---|
| 653 | |
---|
| 654 | Nvalues = len(values) |
---|
| 655 | # Checkings for wierd characters at the end of lines (use it to check) |
---|
| 656 | # if values[Nvalues-1][0:4] == '-999': |
---|
| 657 | # print line,'last v:',values[Nvalues-1],'len:',len(values[Nvalues-1]) |
---|
| 658 | # for ic in range(len(values[Nvalues-1])): |
---|
| 659 | # print ic,ord(values[Nvalues-1][ic:ic+1]) |
---|
| 660 | # quit() |
---|
| 661 | |
---|
| 662 | if len(values[Nvalues-1]) == 0: |
---|
| 663 | Nvalues = Nvalues - 1 |
---|
| 664 | |
---|
| 665 | if Nvalues != Nvals: |
---|
| 666 | print warnmsg |
---|
| 667 | print ' ' + fname + ': number of formats:',Nvals,' and number of ', \ |
---|
| 668 | 'values:',Nvalues,' with split character *' + colchar + \ |
---|
| 669 | '* does not coincide!!' |
---|
| 670 | print ' * what is found is ________' |
---|
| 671 | if Nvalues > Nvals: |
---|
| 672 | Nshow = Nvals |
---|
| 673 | for ivar in range(Nshow): |
---|
| 674 | print ' ',varns[ivar],'fmt:',fmt[ivar],'value:',values[ivar] |
---|
| 675 | print ' missing formats for:',values[Nshow:Nvalues+1] |
---|
| 676 | print ' values not considered, continue' |
---|
| 677 | else: |
---|
| 678 | Nshow = Nvalues |
---|
| 679 | for ivar in range(Nshow): |
---|
| 680 | print ' ',varns[ivar],'fmt:',fmt[ivar],'value:',values[ivar] |
---|
| 681 | print ' excess of formats:',fmt[Nshow:Nvals+1] |
---|
| 682 | quit(-1) |
---|
| 683 | |
---|
| 684 | # Reading and transforming values |
---|
| 685 | if dbg: print ' ' + fname + ': values found _________' |
---|
| 686 | |
---|
| 687 | for ivar in range(Nvals): |
---|
| 688 | if dbg: |
---|
[553] | 689 | print iline, varns[ivar],'value:',values[ivar],miss,opers[ivar], \ |
---|
| 690 | fmt[ivar] |
---|
[259] | 691 | |
---|
| 692 | if iline == 0: |
---|
| 693 | listvals = [] |
---|
[553] | 694 | listvals.append(value_fmt(values[ivar], miss, opers[ivar], \ |
---|
| 695 | fmt[ivar])) |
---|
[259] | 696 | finalvalues[varns[ivar]] = listvals |
---|
| 697 | else: |
---|
| 698 | listvals = finalvalues[varns[ivar]] |
---|
[553] | 699 | listvals.append(value_fmt(values[ivar], miss, opers[ivar], \ |
---|
| 700 | fmt[ivar])) |
---|
[259] | 701 | finalvalues[varns[ivar]] = listvals |
---|
| 702 | else: |
---|
| 703 | # First line without values |
---|
| 704 | if iline == 0: iline = -1 |
---|
| 705 | |
---|
| 706 | iline = iline + 1 |
---|
| 707 | |
---|
| 708 | ofile.close() |
---|
| 709 | |
---|
| 710 | return finalvalues |
---|
| 711 | |
---|
| 712 | def writing_str_nc(varo, values, Lchar): |
---|
| 713 | """ Function to write string values in a netCDF variable as a chain of 1char values |
---|
| 714 | varo= netCDF variable object |
---|
| 715 | values = list of values to introduce |
---|
| 716 | Lchar = length of the string in the netCDF file |
---|
| 717 | """ |
---|
| 718 | |
---|
| 719 | Nvals = len(values) |
---|
[553] | 720 | |
---|
[259] | 721 | for iv in range(Nvals): |
---|
| 722 | stringv=values[iv] |
---|
| 723 | charvals = np.chararray(Lchar) |
---|
| 724 | Lstr = len(stringv) |
---|
| 725 | charvals[Lstr:Lchar] = '' |
---|
| 726 | |
---|
| 727 | for ich in range(Lstr): |
---|
| 728 | charvals[ich] = stringv[ich:ich+1] |
---|
| 729 | |
---|
| 730 | varo[iv,:] = charvals |
---|
| 731 | |
---|
| 732 | return |
---|
| 733 | |
---|
| 734 | def Stringtimes_CF(tvals, fmt, Srefdate, tunits, dbg): |
---|
| 735 | """ Function to transform a given data in String formats to a CF date |
---|
| 736 | tvals= string temporal values |
---|
| 737 | fmt= format of the the time values |
---|
| 738 | Srefdate= reference date in [YYYY][MM][DD][HH][MI][SS] format |
---|
| 739 | tunits= units to use ('weeks', 'days', 'hours', 'minutes', 'seconds') |
---|
| 740 | dbg= debug |
---|
| 741 | >>> Stringtimes_CF(['19760217082712','20150213101837'], '%Y%m%d%H%M%S', |
---|
| 742 | '19491201000000', 'hours', False) |
---|
| 743 | [229784.45333333 571570.31027778] |
---|
| 744 | """ |
---|
| 745 | import datetime as dt |
---|
| 746 | |
---|
| 747 | fname = 'Stringtimes' |
---|
| 748 | |
---|
| 749 | dimt = len(tvals) |
---|
| 750 | |
---|
| 751 | yrref = int(Srefdate[0:4]) |
---|
| 752 | monref = int(Srefdate[4:6]) |
---|
| 753 | dayref = int(Srefdate[6:8]) |
---|
| 754 | horref = int(Srefdate[8:10]) |
---|
| 755 | minref = int(Srefdate[10:12]) |
---|
| 756 | secref = int(Srefdate[12:14]) |
---|
| 757 | refdate = dt.datetime( yrref, monref, dayref, horref, minref, secref) |
---|
| 758 | |
---|
| 759 | cftimes = np.zeros((dimt), dtype=np.float) |
---|
| 760 | |
---|
| 761 | Nfmt=len(fmt.split('%')) |
---|
| 762 | |
---|
| 763 | if dbg: print ' ' + fname + ': fmt=',fmt,'refdate:',Srefdate,'uits:',tunits, \ |
---|
| 764 | 'date dt_days dt_time deltaseconds CFtime _______' |
---|
| 765 | for it in range(dimt): |
---|
| 766 | |
---|
| 767 | # Removing excess of mili-seconds (up to 6 decimals) |
---|
| 768 | if fmt.split('%')[Nfmt-1] == 'f': |
---|
| 769 | tpoints = tvals[it].split('.') |
---|
| 770 | if len(tpoints[len(tpoints)-1]) > 6: |
---|
| 771 | milisec = '{0:.6f}'.format(np.float('0.'+tpoints[len(tpoints)-1]))[0:7] |
---|
| 772 | newtval = '' |
---|
| 773 | for ipt in range(len(tpoints)-1): |
---|
| 774 | newtval = newtval + tpoints[ipt] + '.' |
---|
| 775 | newtval = newtval + str(milisec)[2:len(milisec)+1] |
---|
| 776 | else: |
---|
| 777 | newtval = tvals[it] |
---|
| 778 | tval = dt.datetime.strptime(newtval, fmt) |
---|
| 779 | else: |
---|
| 780 | tval = dt.datetime.strptime(tvals[it], fmt) |
---|
| 781 | |
---|
| 782 | deltatime = tval - refdate |
---|
| 783 | deltaseconds = deltatime.days*24.*3600. + deltatime.seconds + \ |
---|
| 784 | deltatime.microseconds/100000. |
---|
| 785 | if tunits == 'weeks': |
---|
| 786 | deltat = 7.*24.*3600. |
---|
| 787 | elif tunits == 'days': |
---|
| 788 | deltat = 24.*3600. |
---|
| 789 | elif tunits == 'hours': |
---|
| 790 | deltat = 3600. |
---|
| 791 | elif tunits == 'minutes': |
---|
| 792 | deltat = 60. |
---|
| 793 | elif tunits == 'seconds': |
---|
| 794 | deltat = 1. |
---|
| 795 | else: |
---|
| 796 | print errormsg |
---|
| 797 | print ' ' + fname + ": time units '" + tunits + "' not ready !!" |
---|
| 798 | quit(-1) |
---|
| 799 | |
---|
| 800 | cftimes[it] = deltaseconds / deltat |
---|
| 801 | if dbg: |
---|
| 802 | print ' ' + tvals[it], deltatime, deltaseconds, cftimes[it] |
---|
| 803 | |
---|
| 804 | return cftimes |
---|
| 805 | |
---|
| 806 | def adding_complementary(onc, dscn, okind, dvalues, tvals, refCFt, CFtu, Nd, dbg): |
---|
| 807 | """ Function to add complementary variables as function of the observational type |
---|
| 808 | onc= netcdf objecto file to add the variables |
---|
| 809 | dscn= description dictionary |
---|
| 810 | okind= observational kind |
---|
| 811 | dvalues= values |
---|
| 812 | tvals= CF time values |
---|
| 813 | refCFt= reference time of CF time (in [YYYY][MM][DD][HH][MI][SS]) |
---|
| 814 | CFtu= CF time units |
---|
| 815 | Nd= size of the domain |
---|
| 816 | dbg= debugging flag |
---|
| 817 | """ |
---|
| 818 | import numpy.ma as ma |
---|
| 819 | |
---|
| 820 | fname = 'adding_complementary' |
---|
| 821 | |
---|
| 822 | # Kind of observations which require de integer lon/lat (for the 2D map) |
---|
| 823 | map2D=['multi-points', 'trajectory'] |
---|
| 824 | |
---|
| 825 | SrefCFt = refCFt[0:4] +'-'+ refCFt[4:6] +'-'+ refCFt[6:8] + ' ' + refCFt[8:10] + \ |
---|
| 826 | ':'+ refCFt[10:12] +':'+ refCFt[12:14] |
---|
| 827 | |
---|
| 828 | if dscn['NAMElon'] == '-' or dscn['NAMElat'] == '-': |
---|
| 829 | print errormsg |
---|
| 830 | print ' ' + fname + ": to complement a '" + okind + "' observation kind " + \ |
---|
| 831 | " a given longitude ('NAMElon':",dscn['NAMElon'],") and latitude ('" + \ |
---|
| 832 | "'NAMElat:'", dscn['NAMElat'],') from the data has to be provided and ' + \ |
---|
| 833 | 'any are given !!' |
---|
| 834 | quit(-1) |
---|
| 835 | |
---|
| 836 | if not dvalues.has_key(dscn['NAMElon']) or not dvalues.has_key(dscn['NAMElat']): |
---|
| 837 | print errormsg |
---|
| 838 | print ' ' + fname + ": observations do not have 'NAMElon':", \ |
---|
| 839 | dscn['NAMElon'],"and/or 'NAMElat:'", dscn['NAMElat'],' !!' |
---|
| 840 | print ' available data:',dvalues.keys() |
---|
| 841 | quit(-1) |
---|
| 842 | |
---|
| 843 | if okind == 'trajectory': |
---|
| 844 | if dscn['NAMEheight'] == '-': |
---|
| 845 | print warnmsg |
---|
| 846 | print ' ' + fname + ": to complement a '" + okind + "' observation " + \ |
---|
| 847 | "kind a given height ('NAMEheight':",dscn['NAMEheight'],"') might " + \ |
---|
| 848 | 'be provided and any is given !!' |
---|
| 849 | quit(-1) |
---|
| 850 | |
---|
| 851 | if not dvalues.has_key(dscn['NAMEheight']): |
---|
| 852 | print errormsg |
---|
| 853 | print ' ' + fname + ": observations do not have 'NAMEtime':", \ |
---|
| 854 | dscn['NAMEtime'],' !!' |
---|
| 855 | print ' available data:',dvalues.keys() |
---|
| 856 | quit(-1) |
---|
| 857 | |
---|
| 858 | if searchInlist(map2D, okind): |
---|
| 859 | # A new 2D map with the number of observation will be added for that 'NAMElon' |
---|
| 860 | # and 'NAMElat' are necessary. A NdxNd domain space size will be used. |
---|
| 861 | objfile.createDimension('lon2D',Nd) |
---|
| 862 | objfile.createDimension('lat2D',Nd) |
---|
| 863 | lonvals = ma.masked_equal(dvalues[dscn['NAMElon']], None) |
---|
| 864 | latvals = ma.masked_equal(dvalues[dscn['NAMElat']], None) |
---|
| 865 | |
---|
| 866 | minlon = min(lonvals) |
---|
| 867 | maxlon = max(lonvals) |
---|
| 868 | minlat = min(latvals) |
---|
| 869 | maxlat = max(latvals) |
---|
| 870 | |
---|
| 871 | blon = (maxlon - minlon)/(Nd-1) |
---|
| 872 | blat = (maxlat - minlat)/(Nd-1) |
---|
| 873 | |
---|
| 874 | newvar = onc.createVariable( 'lon2D', 'f8', ('lon2D')) |
---|
| 875 | basicvardef(newvar, 'longitude', 'longitude map observations','degrees_East') |
---|
| 876 | newvar[:] = minlon + np.arange(Nd)*blon |
---|
| 877 | newattr = set_attribute(newvar, 'axis', 'X') |
---|
| 878 | |
---|
| 879 | newvar = onc.createVariable( 'lat2D', 'f8', ('lat2D')) |
---|
| 880 | basicvardef(newvar, 'latitude', 'latitude map observations', 'degrees_North') |
---|
| 881 | newvar[:] = minlat + np.arange(Nd)*blat |
---|
| 882 | newattr = set_attribute(newvar, 'axis', 'Y') |
---|
| 883 | |
---|
| 884 | if dbg: |
---|
| 885 | print ' ' + fname + ': minlon=',minlon,'maxlon=',maxlon |
---|
| 886 | print ' ' + fname + ': minlat=',minlat,'maxlat=',maxlat |
---|
| 887 | print ' ' + fname + ': precission on x-axis=', blon*(Nd-1), 'y-axis=', \ |
---|
| 888 | blat*(Nd-1) |
---|
| 889 | |
---|
| 890 | if okind == 'multi-points': |
---|
| 891 | map2D = np.ones((Nd, Nd), dtype=np.float)*fillValueI |
---|
| 892 | |
---|
| 893 | dimt = len(tvals) |
---|
| 894 | Nlost = 0 |
---|
| 895 | for it in range(dimt): |
---|
| 896 | lon = dvalues[dscn['NAMElon']][it] |
---|
| 897 | lat = dvalues[dscn['NAMElat']][it] |
---|
| 898 | if lon is not None and lat is not None: |
---|
| 899 | ilon = int((Nd-1)*(lon - minlon)/(maxlon - minlon)) |
---|
| 900 | ilat = int((Nd-1)*(lat - minlat)/(maxlat - minlat)) |
---|
| 901 | |
---|
| 902 | if map2D[ilat,ilon] == fillValueI: |
---|
| 903 | map2D[ilat,ilon] = 1 |
---|
| 904 | else: |
---|
| 905 | map2D[ilat,ilon] = map2D[ilat,ilon] + 1 |
---|
| 906 | if dbg: print it, lon, lat, ilon, ilat, map2D[ilat,ilon] |
---|
| 907 | |
---|
| 908 | newvar = onc.createVariable( 'mapobs', 'f4', ('lat2D', 'lon2D'), \ |
---|
| 909 | fill_value = fillValueI) |
---|
| 910 | basicvardef(newvar, 'map_observations', 'number of observations', '-') |
---|
| 911 | newvar[:] = map2D |
---|
| 912 | newattr = set_attribute(newvar, 'coordinates', 'lon2D lat2D') |
---|
| 913 | |
---|
| 914 | elif okind == 'trajectory': |
---|
| 915 | # A new 2D map with the trajectory 'NAMElon' and 'NAMElat' and maybe 'NAMEheight' |
---|
| 916 | # are necessary. A NdxNdxNd domain space size will be used. Using time as |
---|
| 917 | # reference variable |
---|
| 918 | if dscn['NAMEheight'] == '-': |
---|
| 919 | # No height |
---|
| 920 | map2D = np.ones((Nd, Nd), dtype=np.float)*fillValueI |
---|
| 921 | |
---|
| 922 | dimt = len(tvals) |
---|
| 923 | Nlost = 0 |
---|
| 924 | if dbg: print ' time-step lon lat ix iy passes _______' |
---|
| 925 | for it in range(dimt): |
---|
| 926 | lon = dvalues[dscn['NAMElon']][it] |
---|
| 927 | lat = dvalues[dscn['NAMElat']][it] |
---|
| 928 | if lon is not None and lat is not None: |
---|
| 929 | ilon = int((Nd-1)*(lon - minlon)/(maxlon - minlon)) |
---|
| 930 | ilat = int((Nd-1)*(lat - minlat)/(maxlat - minlat)) |
---|
| 931 | |
---|
| 932 | if map2D[ilat,ilon] == fillValueI: |
---|
| 933 | map2D[ilat,ilon] = 1 |
---|
| 934 | else: |
---|
| 935 | map2D[ilat,ilon] = map2D[ilat,ilon] + 1 |
---|
| 936 | if dbg: print it, lon, lat, ilon, ilat, map2D[ilat,ilon] |
---|
| 937 | |
---|
| 938 | newvar = onc.createVariable( 'trjobs', 'i', ('lat2D', 'lon2D'), \ |
---|
| 939 | fill_value = fillValueI) |
---|
| 940 | basicvardef(newvar, 'trajectory_observations', 'number of passes', '-' ) |
---|
| 941 | newvar[:] = map2D |
---|
| 942 | newattr = set_attribute(newvar, 'coordinates', 'lon2D lat2D') |
---|
| 943 | |
---|
| 944 | else: |
---|
| 945 | ivn = 0 |
---|
| 946 | for vn in dscn['varN']: |
---|
| 947 | if vn == dscn['NAMEheight']: |
---|
| 948 | zu = dscn['varU'][ivn] |
---|
| 949 | break |
---|
| 950 | ivn = ivn + 1 |
---|
| 951 | |
---|
| 952 | objfile.createDimension('z2D',Nd) |
---|
| 953 | zvals = ma.masked_equal(dvalues[dscn['NAMEheight']], None) |
---|
| 954 | minz = min(zvals) |
---|
| 955 | maxz = max(zvals) |
---|
| 956 | |
---|
| 957 | bz = (maxz - minz)/(Nd-1) |
---|
| 958 | |
---|
| 959 | newvar = onc.createVariable( 'z2D', 'f8', ('z2D')) |
---|
| 960 | basicvardef(newvar, 'z2D', 'z-coordinate map observations', zu) |
---|
| 961 | newvar[:] = minz + np.arange(Nd)*bz |
---|
| 962 | newattr = set_attribute(newvar, 'axis', 'Z') |
---|
| 963 | |
---|
| 964 | if dbg: |
---|
| 965 | print ' ' + fname + ': zmin=',minz,zu,'zmax=',maxz,zu |
---|
| 966 | print ' ' + fname + ': precission on z-axis=', bz*(Nd-1), zu |
---|
| 967 | |
---|
| 968 | map3D = np.ones((Nd, Nd, Nd), dtype=int)*fillValueI |
---|
| 969 | dimt = len(tvals) |
---|
| 970 | Nlost = 0 |
---|
| 971 | if dbg: print ' time-step lon lat z ix iy iz passes _______' |
---|
| 972 | for it in range(dimt): |
---|
| 973 | lon = dvalues[dscn['NAMElon']][it] |
---|
| 974 | lat = dvalues[dscn['NAMElat']][it] |
---|
| 975 | z = dvalues[dscn['NAMEheight']][it] |
---|
| 976 | if lon is not None and lat is not None and z is not None: |
---|
| 977 | ilon = int((Nd-1)*(lon - minlon)/(maxlon - minlon)) |
---|
| 978 | ilat = int((Nd-1)*(lat - minlat)/(maxlat - minlat)) |
---|
| 979 | iz = int((Nd-1)*(z - minz)/(maxz - minz)) |
---|
| 980 | |
---|
| 981 | if map3D[iz,ilat,ilon] == fillValueI: |
---|
| 982 | map3D[iz,ilat,ilon] = 1 |
---|
| 983 | else: |
---|
| 984 | map3D[iz,ilat,ilon] = map3D[iz,ilat,ilon] + 1 |
---|
| 985 | if dbg: print it, lon, lat, z, ilon, ilat, iz, map3D[iz,ilat,ilon] |
---|
| 986 | |
---|
| 987 | newvar = onc.createVariable( 'trjobs', 'i', ('z2D', 'lat2D', 'lon2D'), \ |
---|
| 988 | fill_value = fillValueI) |
---|
| 989 | basicvardef(newvar, 'trajectory_observations', 'number of passes', '-') |
---|
| 990 | newvar[:] = map3D |
---|
| 991 | newattr = set_attribute(newvar, 'coordinates', 'lon2D lat2D z2D') |
---|
| 992 | |
---|
| 993 | onc.sync() |
---|
| 994 | return |
---|
| 995 | |
---|
| 996 | def adding_station_desc(onc,stdesc): |
---|
| 997 | """ Function to add a station description in a netCDF file |
---|
| 998 | onc= netCDF object |
---|
[553] | 999 | stdesc= station description name, lon, lat, height |
---|
[259] | 1000 | """ |
---|
| 1001 | fname = 'adding_station_desc' |
---|
| 1002 | |
---|
| 1003 | newdim = onc.createDimension('nst',1) |
---|
| 1004 | |
---|
[553] | 1005 | newvar = objfile.createVariable( 'station', 'c', ('nst','StrLength')) |
---|
| 1006 | writing_str_nc(newvar, [stdesc[0].replace('!', ' ')], StringLength) |
---|
| 1007 | |
---|
| 1008 | newvar = objfile.createVariable( 'lonstGDM', 'c', ('nst','StrLength')) |
---|
| 1009 | Gv = int(stdesc[1]) |
---|
| 1010 | Dv = int((stdesc[1] - Gv)*60.) |
---|
| 1011 | Mv = int((stdesc[1] - Gv - Dv/60.)*3600.) |
---|
| 1012 | writing_str_nc(newvar, [str(Gv)+"d" + str(Dv)+"m" + str(Mv)+'s'], StringLength) |
---|
| 1013 | |
---|
[259] | 1014 | if onc.variables.has_key('lon'): |
---|
| 1015 | print warnmsg |
---|
| 1016 | print ' ' + fname + ": variable 'lon' already exist !!" |
---|
| 1017 | print " renaming it as 'lonst'" |
---|
| 1018 | lonname = 'lonst' |
---|
| 1019 | else: |
---|
| 1020 | lonname = 'lon' |
---|
| 1021 | |
---|
| 1022 | newvar = objfile.createVariable( lonname, 'f4', ('nst')) |
---|
| 1023 | basicvardef(newvar, lonname, 'longitude', 'degrees_West' ) |
---|
[553] | 1024 | newvar[:] = stdesc[1] |
---|
[259] | 1025 | |
---|
[553] | 1026 | newvar = objfile.createVariable( 'latstGDM', 'c', ('nst','StrLength')) |
---|
| 1027 | Gv = int(stdesc[2]) |
---|
| 1028 | Dv = int((stdesc[2] - Gv)*60.) |
---|
| 1029 | Mv = int((stdesc[2] - Gv - Dv/60.)*3600.) |
---|
| 1030 | writing_str_nc(newvar, [str(Gv)+"d" + str(Dv)+"m" + str(Mv)+'s'], StringLength) |
---|
| 1031 | |
---|
[259] | 1032 | if onc.variables.has_key('lat'): |
---|
| 1033 | print warnmsg |
---|
| 1034 | print ' ' + fname + ": variable 'lat' already exist !!" |
---|
| 1035 | print " renaming it as 'latst'" |
---|
| 1036 | latname = 'latst' |
---|
| 1037 | else: |
---|
| 1038 | latname = 'lat' |
---|
| 1039 | |
---|
| 1040 | newvar = objfile.createVariable( latname, 'f4', ('nst')) |
---|
| 1041 | basicvardef(newvar, lonname, 'latitude', 'degrees_North' ) |
---|
[553] | 1042 | newvar[:] = stdesc[2] |
---|
[259] | 1043 | |
---|
| 1044 | if onc.variables.has_key('height'): |
---|
| 1045 | print warnmsg |
---|
| 1046 | print ' ' + fname + ": variable 'height' already exist !!" |
---|
| 1047 | print " renaming it as 'heightst'" |
---|
| 1048 | heightname = 'heightst' |
---|
| 1049 | else: |
---|
| 1050 | heightname = 'height' |
---|
| 1051 | |
---|
| 1052 | newvar = objfile.createVariable( heightname, 'f4', ('nst')) |
---|
| 1053 | basicvardef(newvar, heightname, 'height above sea level', 'm' ) |
---|
[553] | 1054 | newvar[:] = stdesc[3] |
---|
[259] | 1055 | |
---|
| 1056 | return |
---|
| 1057 | |
---|
[553] | 1058 | def oper_values(dvals, opers): |
---|
| 1059 | """ Function to operate the values according to given parameters |
---|
| 1060 | dvals= datavalues |
---|
| 1061 | opers= operations |
---|
| 1062 | """ |
---|
| 1063 | fname = 'oper_values' |
---|
| 1064 | |
---|
| 1065 | newdvals = {} |
---|
| 1066 | varnames = dvals.keys() |
---|
| 1067 | |
---|
| 1068 | aopers = ['sumc','subc','mulc','divc'] |
---|
| 1069 | |
---|
| 1070 | Nopers = len(opers) |
---|
| 1071 | for iop in range(Nopers): |
---|
| 1072 | vn = varnames[iop] |
---|
| 1073 | print vn,'oper:',opers[iop] |
---|
| 1074 | if opers[iop] != '-': |
---|
| 1075 | opern = opers[iop].split(',')[0] |
---|
| 1076 | operv = np.float(opers[iop].split(',')[1]) |
---|
| 1077 | |
---|
| 1078 | vvals = np.array(dvals[vn]) |
---|
| 1079 | |
---|
| 1080 | if opern == 'sumc': |
---|
| 1081 | newvals = np.where(vvals is None, None, vvals+operv) |
---|
| 1082 | elif opern == 'subc': |
---|
| 1083 | newvals = np.where(vvals is None, None, vvals-operv) |
---|
| 1084 | elif opern == 'mulc': |
---|
| 1085 | newvals = np.where(vvals is None, None, vvals*operv) |
---|
| 1086 | elif opern == 'divc': |
---|
| 1087 | newvals = np.where(vvals is None, None, vvals/operv) |
---|
| 1088 | else: |
---|
| 1089 | print errormsg |
---|
| 1090 | print ' ' + fname + ": operation '" + opern + "' not ready!!" |
---|
| 1091 | print ' availables:',aopers |
---|
| 1092 | quit(-1) |
---|
| 1093 | |
---|
| 1094 | newdvals[vn] = list(newvals) |
---|
| 1095 | else: |
---|
| 1096 | newdvals[vn] = dvals[vn] |
---|
| 1097 | |
---|
| 1098 | return newdvals |
---|
| 1099 | |
---|
[1949] | 1100 | def WMOcodevar(unitsn, onc, Lstrcode=1024): |
---|
| 1101 | """ Function to add a variabe providing description of a units based on WMO code |
---|
| 1102 | unitsn= name of the units (all WMO codes derived units must be labelled as |
---|
[1950] | 1103 | 'wmo_code_[num]' associated to a file valled wmo_[num].code) |
---|
[1949] | 1104 | onc= netCDF object file to add the description |
---|
| 1105 | Lstrcode= length of description of codes |
---|
| 1106 | |
---|
| 1107 | wmo_[num].code must have the structure: ('#' for comments) |
---|
[1950] | 1108 | reference|web page, document reference with the code |
---|
| 1109 | short_description|main description of the code |
---|
| 1110 | long_description|long description of the code |
---|
| 1111 | codeTYPE|type of the code (andy of 'D', 'F', 'I', 'S') |
---|
| 1112 | @| Values line giving the start of the values |
---|
| 1113 | [val1]|[meaning of first value] |
---|
[1949] | 1114 | (...) |
---|
[1950] | 1115 | [valN]|[meaning of last value] |
---|
[1949] | 1116 | """ |
---|
| 1117 | fname = 'WMOcodevar' |
---|
| 1118 | |
---|
| 1119 | # From http://stackoverflow.com/questions/4934806/how-can-i-find-scripts-directory-with-python |
---|
| 1120 | folder = os.path.dirname(os.path.realpath(__file__)) |
---|
| 1121 | |
---|
[1950] | 1122 | code = unitsn.split('_')[2] |
---|
[1949] | 1123 | |
---|
| 1124 | infile = folder + '/wmo_' + code +'.code' |
---|
| 1125 | |
---|
| 1126 | if not os.path.isfile(infile): |
---|
| 1127 | print warnmsg |
---|
| 1128 | print ' ' + fname + ": WMO code file '" + infile + "' does not exist !!" |
---|
| 1129 | return |
---|
| 1130 | # main expected values |
---|
[1950] | 1131 | descvals = ['wmo_code', 'reference', 'short_description', 'long_description', \ |
---|
| 1132 | 'codeTYPE', '@'] |
---|
[1949] | 1133 | |
---|
| 1134 | availcodetype = ['D', 'F', 'I', 'S'] |
---|
| 1135 | |
---|
| 1136 | codevalues = {} |
---|
| 1137 | ocode = open(infile, 'r') |
---|
| 1138 | inivals = False |
---|
| 1139 | codvals = [] |
---|
| 1140 | codmeanings = [] |
---|
| 1141 | for line in ocode: |
---|
| 1142 | if len(line) > 1 and line[0:1] != '#': |
---|
| 1143 | linev = line.replace('\n','').replace('\t',' ').replace('\r','') |
---|
| 1144 | if not inivals: |
---|
[1950] | 1145 | Sv = linev.split('|')[0] |
---|
| 1146 | Vv = linev.split('|')[1] |
---|
[1949] | 1147 | if searchInlist(descvals, Sv): |
---|
| 1148 | if Sv != '@': codevalues[Sv] = Vv |
---|
| 1149 | else: inivals = True |
---|
| 1150 | else: |
---|
[1950] | 1151 | Svv = linev.split('|')[0] |
---|
| 1152 | Vvv = linev.split('|')[1] |
---|
[1949] | 1153 | codvals.append(Svv) |
---|
| 1154 | codmeanings.append(Vvv) |
---|
| 1155 | |
---|
| 1156 | # Creating variable |
---|
| 1157 | if not searchInlist(onc.dimensions, 'Lstringcode'): |
---|
| 1158 | print ' ' + fname + ": Adding string length dimension 'Lstringcode' for " + \ |
---|
| 1159 | " code descriptions" |
---|
| 1160 | newdim = onc.createDimension('Lstringcode', Lstrcode) |
---|
| 1161 | Ncodes = len(codvals) |
---|
| 1162 | codedimn = 'wmo_code_' + str(code) |
---|
| 1163 | if not searchInlist(onc.dimensions, codedimn): |
---|
| 1164 | print ' ' + fname + ": Adding '" + codedimn + "' dimension for code " + \ |
---|
| 1165 | "description" |
---|
| 1166 | newdim = onc.createDimension(codedimn, Ncodes) |
---|
| 1167 | onc.sync() |
---|
| 1168 | |
---|
| 1169 | # Variable with the value of the code |
---|
| 1170 | if not onc.variables.has_key(codedimn): |
---|
| 1171 | if codevalues['codeTYPE'] == 'D': |
---|
| 1172 | newvar = onc.createVariable(codedimn, 'f8', (codedimn)) |
---|
| 1173 | for iv in range(Ncodes): newvar[iv] = np.float64(codvals[iv]) |
---|
| 1174 | elif codevalues['codeTYPE'] == 'F': |
---|
| 1175 | newvar = onc.createVariable(codedimn, 'f', (codedimn)) |
---|
| 1176 | for iv in range(Ncodes): newvar[iv] = np.float(codvals[iv]) |
---|
| 1177 | elif codevalues['codeTYPE'] == 'I': |
---|
| 1178 | newvar = onc.createVariable(codedimn, 'i', (codedimn)) |
---|
[1951] | 1179 | for iv in range(Ncodes): newvar[iv] = int(codvals[iv]) |
---|
[1949] | 1180 | elif codevalues['codeTYPE'] == 'S': |
---|
| 1181 | newvar = onc.createVariable(codedimn, 'c', (codedimn, 'Lstringcode')) |
---|
| 1182 | writing_str_nc(newvar, codvals, Lstrcode) |
---|
| 1183 | else: |
---|
| 1184 | print errormsg |
---|
| 1185 | print ' ' + fname + ": codeTYPE '" + codevalues['codeTYPE'] + "' not" + \ |
---|
| 1186 | " ready !!" |
---|
| 1187 | print ' available ones:', availcodetype |
---|
| 1188 | quit(-1) |
---|
| 1189 | |
---|
| 1190 | for descv in descvals: |
---|
| 1191 | if descv != '@' and descv != 'codeTYPE': |
---|
| 1192 | newvar.setncattr(descv, codevalues[descv]) |
---|
| 1193 | # Variable with the meaning of the code |
---|
| 1194 | if not onc.variables.has_key(codedimn+'_meaning'): |
---|
[1950] | 1195 | print ' '+fname+": Adding '" + codedimn + "_meaning' variable for code " + \ |
---|
| 1196 | "description" |
---|
[1949] | 1197 | newvar = onc.createVariable(codedimn+'_meaning','c',(codedimn,'Lstringcode')) |
---|
| 1198 | writing_str_nc(newvar, codmeanings, Lstrcode) |
---|
| 1199 | newvar.setncattr('description', 'meaning of WMO code ' + str(code)) |
---|
| 1200 | |
---|
| 1201 | onc.sync() |
---|
| 1202 | |
---|
| 1203 | return |
---|
| 1204 | |
---|
[1952] | 1205 | def add_global_PyNCplot(ObjFile, pyscript, funcname, version): |
---|
| 1206 | """ Function to add global attributes from 'PyNCplot' to a given netCDF |
---|
| 1207 | ObjFile= netCDF file object to which add the global attributes |
---|
| 1208 | funcname= name of the function from which file comes from |
---|
| 1209 | version= version of the function |
---|
| 1210 | """ |
---|
| 1211 | fname = 'add_global_PyNCplot' |
---|
| 1212 | |
---|
| 1213 | # Global values |
---|
| 1214 | ObjFile.setncattr('author', 'L. Fita') |
---|
| 1215 | newattr = set_attributek(ObjFile, 'institution', unicode('Centro de ' + \ |
---|
| 1216 | 'Investigaciones del Mar y la Atm' + unichr(243) + 'sfera (CIMA)'), 'U') |
---|
| 1217 | newattr = set_attributek(ObjFile, 'institution2', unicode('Instituto Franco-' + \ |
---|
| 1218 | 'Argentino sobre Estudios de Clima y sus Impactos (CNRS, UMI-3351-IFAECI'), 'U') |
---|
| 1219 | newattr = set_attributek(ObjFile, 'center', unicode('Consejo Nacional de ' + \ |
---|
| 1220 | 'Investigaciones Cient' + unichr(237) + 'ficas y T' + unichr(233) + \ |
---|
| 1221 | 'cnicas (CONICET)'), 'U') |
---|
| 1222 | ObjFile.setncattr('university', 'Universidad de Buenos Aires (UBA)') |
---|
| 1223 | ObjFile.setncattr('city', 'Buenos Aires') |
---|
| 1224 | ObjFile.setncattr('country', 'Argentina') |
---|
| 1225 | ObjFile.setncattr('tool', 'PyNCplot') |
---|
| 1226 | ObjFile.setncattr('url', 'http://www.xn--llusfb-5va.cat/python/PyNCplot') |
---|
| 1227 | ObjFile.setncattr('script', pyscript) |
---|
| 1228 | if funcname is not None: |
---|
| 1229 | ObjFile.setncattr('function', funcname) |
---|
| 1230 | ObjFile.setncattr('version', version) |
---|
| 1231 | |
---|
| 1232 | ObjFile.sync() |
---|
| 1233 | |
---|
| 1234 | return |
---|
| 1235 | |
---|
[259] | 1236 | ####### ###### ##### #### ### ## # |
---|
| 1237 | |
---|
| 1238 | strCFt="Refdate,tunits (CF reference date [YYYY][MM][DD][HH][MI][SS] format and " + \ |
---|
| 1239 | " and time units: 'weeks', 'days', 'hours', 'miuntes', 'seconds')" |
---|
| 1240 | |
---|
[648] | 1241 | kindobs=['stations-map','multi-points', 'single-station', 'trajectory'] |
---|
[259] | 1242 | strkObs="kind of observations; 'multi-points': multiple individual punctual obs " + \ |
---|
| 1243 | "(e.g., lightning strikes), 'single-station': single station on a fixed position,"+\ |
---|
| 1244 | "'trajectory': following a trajectory" |
---|
| 1245 | |
---|
| 1246 | parser = OptionParser() |
---|
| 1247 | parser.add_option("-c", "--comments", dest="charcom", |
---|
| 1248 | help="':', list of characters used for comments", metavar="VALUES") |
---|
| 1249 | parser.add_option("-d", "--descriptionfile", dest="fdesc", |
---|
[553] | 1250 | help="description file to use" + read_description.__doc__, metavar="FILE") |
---|
[259] | 1251 | parser.add_option("-e", "--end_column", dest="endcol", |
---|
[1948] | 1252 | help="character to indicate end of the column ('space', for ' ', or 'None,[fixcoljumps]' for fixed size columns with [fixcoljumps]: ',' separated list of positions of column ends within line)", metavar="VALUE") |
---|
[259] | 1253 | parser.add_option("-f", "--file", dest="obsfile", |
---|
| 1254 | help="observational file to use", metavar="FILE") |
---|
| 1255 | parser.add_option("-g", "--debug", dest="debug", |
---|
| 1256 | help="whther debug is required ('false', 'true')", metavar="VALUE") |
---|
| 1257 | parser.add_option("-k", "--kindObs", dest="obskind", type='choice', choices=kindobs, |
---|
[553] | 1258 | help=strkObs, metavar="VALUE") |
---|
[259] | 1259 | parser.add_option("-s", "--stationLocation", dest="stloc", |
---|
[553] | 1260 | help="name ('!' for spaces), longitude, latitude and height of the station (only for 'single-station')", |
---|
[259] | 1261 | metavar="FILE") |
---|
| 1262 | parser.add_option("-t", "--CFtime", dest="CFtime", help=strCFt, metavar="VALUE") |
---|
| 1263 | |
---|
| 1264 | (opts, args) = parser.parse_args() |
---|
| 1265 | |
---|
| 1266 | ####### ####### |
---|
| 1267 | ## MAIN |
---|
| 1268 | ####### |
---|
| 1269 | |
---|
| 1270 | ofile='OBSnetcdf.nc' |
---|
| 1271 | |
---|
| 1272 | if opts.charcom is None: |
---|
| 1273 | print warnmsg |
---|
| 1274 | print ' ' + main + ': No list of comment characters provided!!' |
---|
| 1275 | print ' assuming no need!' |
---|
| 1276 | charcomments = [] |
---|
| 1277 | else: |
---|
| 1278 | charcomments = opts.charcom.split(':') |
---|
| 1279 | |
---|
| 1280 | if opts.fdesc is None: |
---|
| 1281 | print errormsg |
---|
| 1282 | print ' ' + main + ': No description file for the observtional data provided!!' |
---|
| 1283 | quit(-1) |
---|
| 1284 | |
---|
| 1285 | if opts.endcol is None: |
---|
| 1286 | print warnmsg |
---|
| 1287 | print ' ' + main + ': No list of comment characters provided!!' |
---|
| 1288 | print " assuming 'space'" |
---|
| 1289 | endcol = ' ' |
---|
| 1290 | else: |
---|
| 1291 | if opts.endcol == 'space': |
---|
| 1292 | endcol = ' ' |
---|
| 1293 | else: |
---|
| 1294 | endcol = opts.endcol |
---|
| 1295 | |
---|
| 1296 | if opts.obsfile is None: |
---|
| 1297 | print errormsg |
---|
[332] | 1298 | print ' ' + main + ': No observations file provided!!' |
---|
[259] | 1299 | quit(-1) |
---|
| 1300 | |
---|
| 1301 | if opts.debug is None: |
---|
| 1302 | print warnmsg |
---|
| 1303 | print ' ' + main + ': No debug provided!!' |
---|
| 1304 | print " assuming 'False'" |
---|
| 1305 | debug = False |
---|
| 1306 | else: |
---|
| 1307 | if opts.debug == 'true': |
---|
| 1308 | debug = True |
---|
| 1309 | else: |
---|
| 1310 | debug = False |
---|
| 1311 | |
---|
| 1312 | if not os.path.isfile(opts.fdesc): |
---|
| 1313 | print errormsg |
---|
| 1314 | print ' ' + main + ": description file '" + opts.fdesc + "' does not exist !!" |
---|
| 1315 | quit(-1) |
---|
| 1316 | |
---|
| 1317 | if not os.path.isfile(opts.obsfile): |
---|
| 1318 | print errormsg |
---|
| 1319 | print ' ' + main + ": observational file '" + opts.obsfile + "' does not exist !!" |
---|
| 1320 | quit(-1) |
---|
| 1321 | |
---|
| 1322 | if opts.CFtime is None: |
---|
| 1323 | print warnmsg |
---|
| 1324 | print ' ' + main + ': No CFtime criteria are provided !!' |
---|
| 1325 | print " either time is already in CF-format ('timeFMT=CFtime') in '" + \ |
---|
| 1326 | opts.fdesc + "'" |
---|
| 1327 | print " or assuming refdate: '19491201000000' and time units: 'hours'" |
---|
| 1328 | referencedate = '19491201000000' |
---|
| 1329 | timeunits = 'hours' |
---|
| 1330 | else: |
---|
| 1331 | referencedate = opts.CFtime.split(',')[0] |
---|
| 1332 | timeunits = opts.CFtime.split(',')[1] |
---|
| 1333 | |
---|
| 1334 | if opts.obskind is None: |
---|
| 1335 | print warnmsg |
---|
| 1336 | print ' ' + main + ': No kind of observations provided !!' |
---|
| 1337 | print " assuming 'single-station': single station on a fixed position at 0,0,0" |
---|
| 1338 | obskind = 'single-station' |
---|
[553] | 1339 | stationdesc = [0.0, 0.0, 0.0, 0.0] |
---|
[259] | 1340 | else: |
---|
| 1341 | obskind = opts.obskind |
---|
| 1342 | if obskind == 'single-station': |
---|
| 1343 | if opts.stloc is None: |
---|
| 1344 | print errornmsg |
---|
| 1345 | print ' ' + main + ': No station location provided !!' |
---|
| 1346 | quit(-1) |
---|
| 1347 | else: |
---|
[553] | 1348 | stvals = opts.stloc.split(',') |
---|
| 1349 | stationdesc = [stvals[0], np.float(stvals[1]), np.float(stvals[2]), \ |
---|
| 1350 | np.float(stvals[3])] |
---|
[259] | 1351 | else: |
---|
| 1352 | obskind = opts.obskind |
---|
| 1353 | |
---|
| 1354 | # Reading description file |
---|
| 1355 | ## |
---|
| 1356 | description = read_description(opts.fdesc, debug) |
---|
| 1357 | |
---|
| 1358 | Nvariables=len(description['varN']) |
---|
| 1359 | formats = description['varTYPE'] |
---|
| 1360 | |
---|
| 1361 | if len(formats) != Nvariables: |
---|
| 1362 | print errormsg |
---|
| 1363 | print ' ' + main + ': number of formats:',len(formats),' and number of ' + \ |
---|
| 1364 | 'variables', Nvariables,' does not coincide!!' |
---|
| 1365 | print ' * what is found is _______' |
---|
| 1366 | if Nvariables > len(formats): |
---|
| 1367 | Nshow = len(formats) |
---|
| 1368 | for ivar in range(Nshow): |
---|
| 1369 | print ' ',description['varN'][ivar],':', description['varLN'][ivar],\ |
---|
| 1370 | '[', description['varU'][ivar], '] fmt:', formats[ivar] |
---|
| 1371 | print ' missing values for:', description['varN'][Nshow:Nvariables+1] |
---|
| 1372 | else: |
---|
| 1373 | Nshow = Nvariables |
---|
| 1374 | for ivar in range(Nshow): |
---|
| 1375 | print ' ',description['varN'][ivar],':', description['varLN'][ivar],\ |
---|
| 1376 | '[', description['varU'][ivar], '] fmt:', formats[ivar] |
---|
| 1377 | print ' excess of formats for:', formats[Nshow:len(formats)+1] |
---|
| 1378 | |
---|
[587] | 1379 | # quit(-1) |
---|
[259] | 1380 | |
---|
| 1381 | # Reading values |
---|
| 1382 | ## |
---|
| 1383 | datavalues = read_datavalues(opts.obsfile, charcomments, endcol, formats, |
---|
[553] | 1384 | description['varOPER'], description['MissingValue'], description['varN'], debug) |
---|
[259] | 1385 | |
---|
| 1386 | # Total number of values |
---|
| 1387 | Ntvalues = len(datavalues[description['varN'][0]]) |
---|
[648] | 1388 | if obskind == 'stations-map': |
---|
| 1389 | print main + ': total values found:',Ntvalues |
---|
| 1390 | else: |
---|
| 1391 | print main + ': total temporal values found:',Ntvalues |
---|
[259] | 1392 | |
---|
| 1393 | objfile = NetCDFFile(ofile, 'w') |
---|
| 1394 | |
---|
| 1395 | # Creation of dimensions |
---|
| 1396 | ## |
---|
[648] | 1397 | if obskind == 'stations-map': |
---|
| 1398 | rowsdim = 'Npoints' |
---|
| 1399 | dimlength = Ntvalues |
---|
| 1400 | else: |
---|
| 1401 | rowsdim = 'time' |
---|
| 1402 | dimlength = None |
---|
| 1403 | |
---|
| 1404 | objfile.createDimension(rowsdim,dimlength) |
---|
[259] | 1405 | objfile.createDimension('StrLength',StringLength) |
---|
| 1406 | |
---|
| 1407 | # Creation of variables |
---|
| 1408 | ## |
---|
| 1409 | for ivar in range(Nvariables): |
---|
| 1410 | varn = description['varN'][ivar] |
---|
| 1411 | print " including: '" + varn + "' ... .. ." |
---|
| 1412 | |
---|
| 1413 | if formats[ivar] == 'D': |
---|
[648] | 1414 | newvar = objfile.createVariable(varn, 'f32', (rowsdim), fill_value=fillValueF) |
---|
[259] | 1415 | basicvardef(newvar, varn, description['varLN'][ivar], \ |
---|
| 1416 | description['varU'][ivar]) |
---|
| 1417 | newvar[:] = np.where(datavalues[varn] is None, fillValueF, datavalues[varn]) |
---|
| 1418 | elif formats[ivar] == 'F': |
---|
[648] | 1419 | newvar = objfile.createVariable(varn, 'f', (rowsdim), fill_value=fillValueF) |
---|
[259] | 1420 | basicvardef(newvar, varn, description['varLN'][ivar], \ |
---|
| 1421 | description['varU'][ivar]) |
---|
| 1422 | newvar[:] = np.where(datavalues[varn] is None, fillValueF, datavalues[varn]) |
---|
| 1423 | elif formats[ivar] == 'I': |
---|
[648] | 1424 | newvar = objfile.createVariable(varn, 'i', (rowsdim), fill_value=fillValueI) |
---|
[259] | 1425 | basicvardef(newvar, varn, description['varLN'][ivar], \ |
---|
| 1426 | description['varU'][ivar]) |
---|
| 1427 | # Why is not wotking with integers? |
---|
| 1428 | vals = np.array(datavalues[varn]) |
---|
| 1429 | for iv in range(Ntvalues): |
---|
| 1430 | if vals[iv] is None: vals[iv] = fillValueI |
---|
| 1431 | newvar[:] = vals |
---|
| 1432 | elif formats[ivar] == 'S': |
---|
[648] | 1433 | newvar = objfile.createVariable(varn, 'c', (rowsdim,'StrLength')) |
---|
[259] | 1434 | basicvardef(newvar, varn, description['varLN'][ivar], \ |
---|
| 1435 | description['varU'][ivar]) |
---|
[1949] | 1436 | vals = datavalues[varn] |
---|
| 1437 | for iv in range(Ntvalues): |
---|
| 1438 | if vals[iv] is None: vals[iv] = fillValueS |
---|
| 1439 | writing_str_nc(newvar, vals, StringLength) |
---|
[259] | 1440 | |
---|
[1949] | 1441 | # Getting new variables to describe certain units as codeWMO_[num] from an |
---|
| 1442 | # external file |
---|
[1950] | 1443 | if description['varU'][ivar][0:8] == 'wmo_code': |
---|
[1949] | 1444 | WMOcodevar(description['varU'][ivar], objfile) |
---|
| 1445 | |
---|
[259] | 1446 | # Extra variable descriptions/attributes |
---|
| 1447 | if description.has_key('varBUFR'): |
---|
| 1448 | set_attribute(newvar,'bufr_code',description['varBUFR'][ivar]) |
---|
| 1449 | |
---|
| 1450 | objfile.sync() |
---|
| 1451 | |
---|
| 1452 | # Time variable in CF format |
---|
| 1453 | ## |
---|
| 1454 | if description['FMTtime'] == 'CFtime': |
---|
| 1455 | timevals = datavalues[description['NAMEtime']] |
---|
| 1456 | iv = 0 |
---|
| 1457 | for ivn in description['varN']: |
---|
| 1458 | if ivn == description['NAMEtime']: |
---|
| 1459 | tunits = description['varU'][iv] |
---|
| 1460 | break |
---|
| 1461 | iv = iv + 1 |
---|
| 1462 | else: |
---|
| 1463 | # Time as a composition of different columns |
---|
| 1464 | tcomposite = description['NAMEtime'].find('@') |
---|
[332] | 1465 | if tcomposite != -1: |
---|
[259] | 1466 | timevars = description['NAMEtime'].split('@') |
---|
| 1467 | |
---|
| 1468 | print warnmsg |
---|
| 1469 | print ' ' + main + ': time values as combination of different columns!' |
---|
| 1470 | print ' combining:',timevars,' with a final format: ',description['FMTtime'] |
---|
| 1471 | |
---|
| 1472 | timeSvals = [] |
---|
| 1473 | if debug: print ' ' + main + ': creating times _______' |
---|
| 1474 | for it in range(Ntvalues): |
---|
| 1475 | tSvals = '' |
---|
| 1476 | for tvar in timevars: |
---|
| 1477 | tSvals = tSvals + datavalues[tvar][it] + ' ' |
---|
| 1478 | |
---|
| 1479 | timeSvals.append(tSvals[0:len(tSvals)-1]) |
---|
| 1480 | if debug: print it, '*' + timeSvals[it] + '*' |
---|
| 1481 | |
---|
| 1482 | timevals = Stringtimes_CF(timeSvals, description['FMTtime'], referencedate, \ |
---|
| 1483 | timeunits, debug) |
---|
| 1484 | else: |
---|
| 1485 | timevals = Stringtimes_CF(datavalues[description['NAMEtime']], \ |
---|
| 1486 | description['FMTtime'], referencedate, timeunits, debug) |
---|
| 1487 | |
---|
| 1488 | CFtimeRef = referencedate[0:4] +'-'+ referencedate[4:6] +'-'+ referencedate[6:8] + \ |
---|
| 1489 | ' ' + referencedate[8:10] +':'+ referencedate[10:12] +':'+ referencedate[12:14] |
---|
| 1490 | tunits = timeunits + ' since ' + CFtimeRef |
---|
| 1491 | |
---|
| 1492 | if objfile.variables.has_key('time'): |
---|
| 1493 | print warnmsg |
---|
| 1494 | print ' ' + main + ": variable 'time' already exist !!" |
---|
| 1495 | print " renaming it as 'CFtime'" |
---|
| 1496 | timeCFname = 'CFtime' |
---|
| 1497 | newdim = objfile.renameDimension('time','CFtime') |
---|
| 1498 | newvar = objfile.createVariable( timeCFname, 'f8', ('CFtime')) |
---|
| 1499 | basicvardef(newvar, timeCFname, 'time', tunits ) |
---|
| 1500 | else: |
---|
[1949] | 1501 | if not searchInlist(objfile.dimensions, 'time'): |
---|
| 1502 | newdim = objfile.createDimension('time',None) |
---|
[259] | 1503 | timeCFname = 'time' |
---|
[1949] | 1504 | newvar = objfile.createVariable( timeCFname, 'f8', ('time')) |
---|
[1948] | 1505 | newvar[:] = np.zeros(timevals.shape[0]) |
---|
[259] | 1506 | basicvardef(newvar, timeCFname, 'time', tunits ) |
---|
| 1507 | |
---|
| 1508 | set_attribute(newvar, 'calendar', 'standard') |
---|
[648] | 1509 | if obskind == 'stations-map': |
---|
| 1510 | newvar[:] = timevals[0] |
---|
| 1511 | else: |
---|
| 1512 | newvar[:] = timevals |
---|
[259] | 1513 | |
---|
| 1514 | # Global attributes |
---|
| 1515 | ## |
---|
| 1516 | for descn in description.keys(): |
---|
| 1517 | if descn[0:3] != 'var' and descn[0:4] != 'NAME' and descn[0:3] != 'FMT': |
---|
| 1518 | set_attribute(objfile, descn, description[descn]) |
---|
| 1519 | |
---|
[1952] | 1520 | add_global_PyNCplot(objfile, main, 'main', version) |
---|
[259] | 1521 | |
---|
| 1522 | objfile.sync() |
---|
| 1523 | |
---|
| 1524 | # Adding new variables as function of the observational type |
---|
| 1525 | ## 'multi-points', 'single-station', 'trajectory' |
---|
| 1526 | |
---|
| 1527 | if obskind != 'single-station': |
---|
| 1528 | adding_complementary(objfile, description, obskind, datavalues, timevals, \ |
---|
| 1529 | referencedate, timeunits, Ndim2D, debug) |
---|
| 1530 | else: |
---|
| 1531 | # Adding three variables with the station location, longitude, latitude and height |
---|
| 1532 | adding_station_desc(objfile,stationdesc) |
---|
| 1533 | |
---|
| 1534 | objfile.sync() |
---|
| 1535 | objfile.close() |
---|
| 1536 | |
---|
| 1537 | print main + ": Successfull generation of netcdf observational file '" + ofile + "' !!" |
---|