[876] | 1 | |
---|
| 2 | import numpy as np |
---|
| 3 | import netCDF4 |
---|
| 4 | |
---|
[345] | 5 | ## Author: AS |
---|
[252] | 6 | def errormess(text,printvar=None): |
---|
[233] | 7 | print text |
---|
[399] | 8 | if printvar is not None: print printvar |
---|
[233] | 9 | exit() |
---|
| 10 | return |
---|
| 11 | |
---|
[345] | 12 | ## Author: AS |
---|
[349] | 13 | def adjust_length (tab, zelen): |
---|
| 14 | if tab is None: |
---|
[876] | 15 | outtab = np.ones(zelen) * -999999 |
---|
[349] | 16 | else: |
---|
| 17 | if zelen != len(tab): |
---|
| 18 | print "not enough or too much values... setting same values all variables" |
---|
[876] | 19 | outtab = np.ones(zelen) * tab[0] |
---|
[349] | 20 | else: |
---|
| 21 | outtab = tab |
---|
| 22 | return outtab |
---|
| 23 | |
---|
| 24 | ## Author: AS |
---|
[468] | 25 | def getname(var=False,var2=False,winds=False,anomaly=False): |
---|
[252] | 26 | if var and winds: basename = var + '_UV' |
---|
| 27 | elif var: basename = var |
---|
| 28 | elif winds: basename = 'UV' |
---|
| 29 | else: errormess("please set at least winds or var",printvar=nc.variables) |
---|
| 30 | if anomaly: basename = 'd' + basename |
---|
[468] | 31 | if var2: basename = basename + '_' + var2 |
---|
[252] | 32 | return basename |
---|
| 33 | |
---|
[763] | 34 | ## Author: AS + AC |
---|
[782] | 35 | def localtime(time,lon,namefile): # lon is the mean longitude of the plot, not of the domain. central lon of domain is taken from cen_lon |
---|
| 36 | ## THIS IS FOR MESOSCALE |
---|
[876] | 37 | nc = netCDF4.Dataset(namefile) |
---|
[782] | 38 | ## get start date and intervals |
---|
| 39 | dt_hour=1. ; start=0. |
---|
| 40 | if hasattr(nc,'TITLE'): |
---|
| 41 | title=getattr(nc, 'TITLE') |
---|
| 42 | if hasattr(nc,'DT') and hasattr(nc,'START_DATE') and 'MRAMS' in title: |
---|
| 43 | ## we must adapt what is done in getlschar to MRAMS (outputs from ic.py) |
---|
| 44 | dt_hour=getattr(nc, 'DT')/60. |
---|
| 45 | start_date=getattr(nc, 'START_DATE') |
---|
| 46 | start_hour=np.float(start_date[11:13]) |
---|
| 47 | start_minute=np.float(start_date[14:16])/60. |
---|
| 48 | start=start_hour+start_minute # start is the local time of simu at longitude 0 |
---|
| 49 | #LMD MMM is 1 output/hour (and not 1 output/timestep) |
---|
| 50 | #MRAMS is 1 output/timestep, unless stride is added in ic.py |
---|
| 51 | elif 'WRF' in title: |
---|
| 52 | [dummy,start,dt_hour] = getlschar ( namefile ) # get start hour and interval hour |
---|
| 53 | ## get longitude |
---|
[763] | 54 | if lon is not None: |
---|
| 55 | if lon[0,1]!=lon[0,0]: mean_lon_plot = 0.5*(lon[0,1]-lon[0,0]) |
---|
| 56 | else: mean_lon_plot=lon[0,0] |
---|
| 57 | elif hasattr(nc, 'CEN_LON'): mean_lon_plot=getattr(nc, 'CEN_LON') |
---|
| 58 | else: mean_lon_plot=0. |
---|
[782] | 59 | ## calculate local time |
---|
| 60 | ltst = start + time*dt_hour + mean_lon_plot / 15. |
---|
[252] | 61 | ltst = int (ltst * 10) / 10. |
---|
| 62 | ltst = ltst % 24 |
---|
| 63 | return ltst |
---|
| 64 | |
---|
[569] | 65 | ## Author: AC |
---|
| 66 | def check_localtime(time): |
---|
| 67 | a=-1 |
---|
| 68 | for i in range(len(time)-1): |
---|
[583] | 69 | if (time[i] > time[i+1]): a=i |
---|
| 70 | if a >= 0 and a < (len(time)-1)/2.: |
---|
[569] | 71 | print "Sorry, time axis is not regular." |
---|
| 72 | print "Contourf needs regular axis... recasting" |
---|
| 73 | for i in range(a+1): |
---|
| 74 | time[i]=time[i]-24. |
---|
[583] | 75 | if a >= 0 and a >= (len(time)-1)/2.: |
---|
| 76 | print "Sorry, time axis is not regular." |
---|
| 77 | print "Contourf needs regular axis... recasting" |
---|
| 78 | for i in range((len(time)-1) - a): |
---|
| 79 | time[a+1+i]=time[a+1+i]+24. |
---|
[569] | 80 | return time |
---|
| 81 | |
---|
[525] | 82 | ## Author: AS, AC, JL |
---|
[233] | 83 | def whatkindfile (nc): |
---|
[647] | 84 | typefile = 'gcm' # default |
---|
[429] | 85 | if 'controle' in nc.variables: typefile = 'gcm' |
---|
| 86 | elif 'phisinit' in nc.variables: typefile = 'gcm' |
---|
[721] | 87 | elif 'phis' in nc.variables: typefile = 'gcm' |
---|
[525] | 88 | elif 'time_counter' in nc.variables: typefile = 'earthgcm' |
---|
[548] | 89 | elif hasattr(nc,'START_DATE'): typefile = 'meso' |
---|
[429] | 90 | elif 'HGT_M' in nc.variables: typefile = 'geo' |
---|
[558] | 91 | elif hasattr(nc,'institution'): |
---|
| 92 | if "European Centre" in getattr(nc,'institution'): typefile = 'ecmwf' |
---|
[233] | 93 | return typefile |
---|
| 94 | |
---|
[345] | 95 | ## Author: AS |
---|
[876] | 96 | def getfieldred (nc,var,indexlon,indexlat,indexvert,indextime): |
---|
| 97 | dimension = len(nc.variables[var].dimensions) |
---|
| 98 | ## this allows to get much faster and use much less memory esp. with large datasets |
---|
| 99 | if dimension == 2: field = nc.variables[var][indextime,indexlon] |
---|
| 100 | elif dimension == 3: field = nc.variables[var][indextime,indexlat,indexlon] |
---|
| 101 | elif dimension == 4: field = nc.variables[var][indextime,indexvert,indexlat,indexlon] |
---|
| 102 | elif dimension == 1: field = nc.variables[var][indextime] |
---|
| 103 | return field |
---|
| 104 | |
---|
| 105 | ## Author: AS |
---|
[233] | 106 | def getfield (nc,var): |
---|
[876] | 107 | dimension = len(nc.variables[var].dimensions) |
---|
[233] | 108 | ## this allows to get much faster (than simply referring to nc.variables[var]) |
---|
[876] | 109 | print " Opening variable",var," with", dimension, "dimensions ..." |
---|
[233] | 110 | if dimension == 2: field = nc.variables[var][:,:] |
---|
| 111 | elif dimension == 3: field = nc.variables[var][:,:,:] |
---|
| 112 | elif dimension == 4: field = nc.variables[var][:,:,:,:] |
---|
[494] | 113 | elif dimension == 1: field = nc.variables[var][:] |
---|
[395] | 114 | # if there are NaNs in the ncdf, they should be loaded as a masked array which will be |
---|
| 115 | # recasted as a regular array later in reducefield |
---|
| 116 | if (np.isnan(np.sum(field)) and (type(field).__name__ not in 'MaskedArray')): |
---|
| 117 | print "Warning: netcdf as nan values but is not loaded as a Masked Array." |
---|
| 118 | print "recasting array type" |
---|
| 119 | out=np.ma.masked_invalid(field) |
---|
| 120 | out.set_fill_value([np.NaN]) |
---|
| 121 | else: |
---|
[464] | 122 | # missing values from zrecast or hrecast are -1e-33 |
---|
[469] | 123 | masked=np.ma.masked_where(field < -1e30,field) |
---|
[578] | 124 | masked2=np.ma.masked_where(field > 1e35,field) |
---|
| 125 | masked.set_fill_value([np.NaN]) ; masked2.set_fill_value([np.NaN]) |
---|
| 126 | mask = np.ma.getmask(masked) ; mask2 = np.ma.getmask(masked2) |
---|
| 127 | if (True in np.array(mask)): |
---|
| 128 | out=masked |
---|
| 129 | print "Masked array... Missing value is NaN" |
---|
| 130 | elif (True in np.array(mask2)): |
---|
| 131 | out=masked2 |
---|
| 132 | print "Masked array... Missing value is NaN" |
---|
| 133 | # else: |
---|
| 134 | # # missing values from api are 1e36 |
---|
| 135 | # masked=np.ma.masked_where(field > 1e35,field) |
---|
| 136 | # masked.set_fill_value([np.NaN]) |
---|
| 137 | # mask = np.ma.getmask(masked) |
---|
| 138 | # if (True in np.array(mask)):out=masked |
---|
| 139 | # else:out=field |
---|
[763] | 140 | else: |
---|
| 141 | # # missing values from MRAMS files are 0.100E+32 |
---|
| 142 | masked=np.ma.masked_where(field > 1e30,field) |
---|
| 143 | masked.set_fill_value([np.NaN]) |
---|
| 144 | mask = np.ma.getmask(masked) |
---|
| 145 | if (True in np.array(mask)):out=masked |
---|
| 146 | else:out=field |
---|
| 147 | # else:out=field |
---|
[395] | 148 | return out |
---|
[233] | 149 | |
---|
[571] | 150 | ## Author: AC |
---|
[763] | 151 | # Compute the norm of the winds or return an hodograph |
---|
[612] | 152 | # The corresponding variable to call is UV or uvmet (to use api) |
---|
[763] | 153 | def windamplitude (nc,mode): |
---|
[571] | 154 | varinfile = nc.variables.keys() |
---|
| 155 | if "U" in varinfile: zu=getfield(nc,'U') |
---|
| 156 | elif "Um" in varinfile: zu=getfield(nc,'Um') |
---|
[763] | 157 | else: errormess("you need slopex or U or Um in your file.") |
---|
[571] | 158 | if "V" in varinfile: zv=getfield(nc,'V') |
---|
| 159 | elif "Vm" in varinfile: zv=getfield(nc,'Vm') |
---|
[763] | 160 | else: errormess("you need V or Vm in your file.") |
---|
[571] | 161 | znt,znz,zny,znx = np.array(zu).shape |
---|
[763] | 162 | if hasattr(nc,'WEST-EAST_PATCH_END_UNSTAG'):znx=getattr(nc, 'WEST-EAST_PATCH_END_UNSTAG') |
---|
[571] | 163 | zuint = np.zeros([znt,znz,zny,znx]) |
---|
| 164 | zvint = np.zeros([znt,znz,zny,znx]) |
---|
| 165 | if "U" in varinfile: |
---|
[763] | 166 | if hasattr(nc,'SOUTH-NORTH_PATCH_END_STAG'): zny_stag=getattr(nc, 'SOUTH-NORTH_PATCH_END_STAG') |
---|
| 167 | if hasattr(nc,'WEST-EAST_PATCH_END_STAG'): znx_stag=getattr(nc, 'WEST-EAST_PATCH_END_STAG') |
---|
| 168 | if zny_stag == zny: zvint=zv |
---|
| 169 | else: |
---|
| 170 | for yy in np.arange(zny): zvint[:,:,yy,:] = (zv[:,:,yy,:] + zv[:,:,yy+1,:])/2. |
---|
| 171 | if znx_stag == znx: zuint=zu |
---|
| 172 | else: |
---|
| 173 | for xx in np.arange(znx): zuint[:,:,:,xx] = (zu[:,:,:,xx] + zu[:,:,:,xx+1])/2. |
---|
[571] | 174 | else: |
---|
| 175 | zuint=zu |
---|
| 176 | zvint=zv |
---|
[763] | 177 | if mode=='amplitude': return np.sqrt(zuint**2 + zvint**2) |
---|
| 178 | if mode=='hodograph': return zuint,zvint |
---|
| 179 | if mode=='hodograph_2': return None, 360.*np.arctan(zvint/zuint)/(2.*np.pi) |
---|
[571] | 180 | |
---|
[612] | 181 | ## Author: AC |
---|
[701] | 182 | # Compute the enrichment factor of non condensible gases |
---|
| 183 | # The corresponding variable to call is enfact |
---|
[753] | 184 | # enrichment factor is computed as in Yuan Lian et al. 2012 |
---|
| 185 | # i.e. you need to have VL2 site at LS 135 in your data |
---|
| 186 | # this only requires co2col so that you can concat.nc at low cost |
---|
| 187 | def enrichment_factor(nc,lon,lat,time): |
---|
| 188 | from myplot import reducefield |
---|
[701] | 189 | varinfile = nc.variables.keys() |
---|
[753] | 190 | if "co2col" in varinfile: co2col=getfield(nc,'co2col') |
---|
| 191 | else: print "error, you need co2col var in your file" |
---|
[701] | 192 | if "ps" in varinfile: ps=getfield(nc,'ps') |
---|
| 193 | else: print "error, you need ps var in your file" |
---|
[753] | 194 | dimension = len(nc.variables['co2col'].dimensions) |
---|
| 195 | if dimension == 2: |
---|
| 196 | zny,znx = np.array(co2col).shape |
---|
[701] | 197 | znt=1 |
---|
[753] | 198 | elif dimension == 3: znt,zny,znx = np.array(co2col).shape |
---|
[701] | 199 | mmrarcol = np.zeros([znt,zny,znx]) |
---|
| 200 | enfact = np.zeros([znt,zny,znx]) |
---|
| 201 | grav=3.72 |
---|
[753] | 202 | mmrarcol[:,:,:] = 1. - grav*co2col[:,:,:]/ps[:,:,:] |
---|
| 203 | # Computation with reference argon mmr at VL2 Ls 135 (as in Yuan Lian et al 2012) |
---|
| 204 | lonvl2=np.zeros([1,2]) |
---|
| 205 | latvl2=np.zeros([1,2]) |
---|
| 206 | timevl2=np.zeros([1,2]) |
---|
| 207 | lonvl2[0,0]=-180 |
---|
| 208 | lonvl2[0,1]=180 |
---|
| 209 | latvl2[:,:]=48.16 |
---|
| 210 | timevl2[:,:]=135. |
---|
| 211 | indexlon = getsindex(lonvl2,0,lon) |
---|
| 212 | indexlat = getsindex(latvl2,0,lat) |
---|
| 213 | indextime = getsindex(timevl2,0,time) |
---|
| 214 | mmrvl2, error = reducefield( mmrarcol, d4=indextime, d1=indexlon, d2=indexlat) |
---|
| 215 | print "VL2 Ls 135 mmr arcol:", mmrvl2 |
---|
| 216 | enfact[:,:,:] = mmrarcol[:,:,:]/mmrvl2 |
---|
[701] | 217 | return enfact |
---|
| 218 | |
---|
| 219 | ## Author: AC |
---|
[612] | 220 | # Compute the norm of the slope angles |
---|
| 221 | # The corresponding variable to call is SLOPEXY |
---|
| 222 | def slopeamplitude (nc): |
---|
| 223 | varinfile = nc.variables.keys() |
---|
| 224 | if "slopex" in varinfile: zu=getfield(nc,'slopex') |
---|
| 225 | elif "SLOPEX" in varinfile: zu=getfield(nc,'SLOPEX') |
---|
[754] | 226 | else: errormess("you need slopex or SLOPEX in your file.") |
---|
[612] | 227 | if "slopey" in varinfile: zv=getfield(nc,'slopey') |
---|
| 228 | elif "SLOPEY" in varinfile: zv=getfield(nc,'SLOPEY') |
---|
[754] | 229 | else: errormess("you need slopey or SLOPEY in your file.") |
---|
[612] | 230 | znt,zny,znx = np.array(zu).shape |
---|
| 231 | zuint = np.zeros([znt,zny,znx]) |
---|
| 232 | zvint = np.zeros([znt,zny,znx]) |
---|
| 233 | zuint=zu |
---|
| 234 | zvint=zv |
---|
| 235 | return np.sqrt(zuint**2 + zvint**2) |
---|
| 236 | |
---|
| 237 | ## Author: AC |
---|
| 238 | # Compute the temperature difference between surface and first level. |
---|
| 239 | # API is automatically called to get TSURF and TK. |
---|
| 240 | # The corresponding variable to call is DELTAT |
---|
| 241 | def deltat0t1 (nc): |
---|
| 242 | varinfile = nc.variables.keys() |
---|
| 243 | if "tsurf" in varinfile: zu=getfield(nc,'tsurf') |
---|
| 244 | elif "TSURF" in varinfile: zu=getfield(nc,'TSURF') |
---|
[754] | 245 | else: errormess("You need tsurf or TSURF in your file") |
---|
[612] | 246 | if "tk" in varinfile: zv=getfield(nc,'tk') |
---|
| 247 | elif "TK" in varinfile: zv=getfield(nc,'TK') |
---|
[754] | 248 | else: errormess("You need tk or TK in your file. (might need to use API. try to add -i 4 -l XXX)") |
---|
[612] | 249 | znt,zny,znx = np.array(zu).shape |
---|
| 250 | zuint = np.zeros([znt,zny,znx]) |
---|
| 251 | zuint=zu - zv[:,0,:,:] |
---|
| 252 | return zuint |
---|
| 253 | |
---|
[382] | 254 | ## Author: AS + TN + AC |
---|
[717] | 255 | def reducefield (input,d4=None,d3=None,d2=None,d1=None,yint=False,alt=None,anomaly=False,redope=None,mesharea=None,unidim=999): |
---|
[252] | 256 | ### we do it the reverse way to be compliant with netcdf "t z y x" or "t y x" or "y x" |
---|
[233] | 257 | ### it would be actually better to name d4 d3 d2 d1 as t z y x |
---|
[405] | 258 | ### ... note, anomaly is only computed over d1 and d2 for the moment |
---|
[647] | 259 | from mymath import max,mean,min,sum,getmask |
---|
[422] | 260 | csmooth = 12 ## a fair amount of grid points (too high results in high computation time) |
---|
[483] | 261 | if redope is not None: |
---|
[865] | 262 | # if redope == "mint": input = min(input,axis=0) ; d1 = None |
---|
| 263 | # elif redope == "maxt": input = max(input,axis=0) ; d1 = None |
---|
| 264 | if redope == "edge_y1": input = input[:,:,0,:] ; d2 = None |
---|
[763] | 265 | elif redope == "edge_y2": input = input[:,:,-1,:] ; d2 = None |
---|
| 266 | elif redope == "edge_x1": input = input[:,:,:,0] ; d1 = None |
---|
| 267 | elif redope == "edge_x2": input = input[:,:,:,-1] ; d1 = None |
---|
[865] | 268 | # else: errormess("not supported. but try lines in reducefield beforehand.") |
---|
[483] | 269 | #elif redope == "minz": input = min(input,axis=1) ; d2 = None |
---|
| 270 | #elif redope == "maxz": input = max(input,axis=1) ; d2 = None |
---|
| 271 | #elif redope == "miny": input = min(input,axis=2) ; d3 = None |
---|
| 272 | #elif redope == "maxy": input = max(input,axis=2) ; d3 = None |
---|
| 273 | #elif redope == "minx": input = min(input,axis=3) ; d4 = None |
---|
| 274 | #elif redope == "maxx": input = max(input,axis=3) ; d4 = None |
---|
[233] | 275 | dimension = np.array(input).ndim |
---|
[525] | 276 | shape = np.array(np.array(input).shape) |
---|
[349] | 277 | #print 'd1,d2,d3,d4: ',d1,d2,d3,d4 |
---|
[405] | 278 | if anomaly: print 'ANOMALY ANOMALY' |
---|
[233] | 279 | output = input |
---|
| 280 | error = False |
---|
[350] | 281 | #### this is needed to cope the case where d4,d3,d2,d1 are single integers and not arrays |
---|
[345] | 282 | if d4 is not None and not isinstance(d4, np.ndarray): d4=[d4] |
---|
| 283 | if d3 is not None and not isinstance(d3, np.ndarray): d3=[d3] |
---|
| 284 | if d2 is not None and not isinstance(d2, np.ndarray): d2=[d2] |
---|
| 285 | if d1 is not None and not isinstance(d1, np.ndarray): d1=[d1] |
---|
| 286 | ### now the main part |
---|
[233] | 287 | if dimension == 2: |
---|
[717] | 288 | #### this is needed for 1d-type files (where dim=2 but axes are time-vert and not lat-lon) |
---|
[753] | 289 | if unidim==1: d2=d4 ; d1=d3 ; d4 = None ; d3 = None |
---|
[525] | 290 | if mesharea is None: mesharea=np.ones(shape) |
---|
| 291 | if max(d2) >= shape[0]: error = True |
---|
| 292 | elif max(d1) >= shape[1]: error = True |
---|
| 293 | elif d1 is not None and d2 is not None: |
---|
[687] | 294 | try: |
---|
| 295 | totalarea = np.ma.masked_where(getmask(output),mesharea) |
---|
| 296 | totalarea = mean(totalarea[d2,:],axis=0);totalarea = mean(totalarea[d1]) |
---|
| 297 | except: print "(problem with areas. I skip this)" ; mesharea = 1. ; totalarea = 1. |
---|
[647] | 298 | output = output*mesharea; output = mean(output[d2,:],axis=0); output = mean(output[d1])/totalarea |
---|
[350] | 299 | elif d1 is not None: output = mean(input[:,d1],axis=1) |
---|
[647] | 300 | elif d2 is not None: |
---|
[687] | 301 | try: |
---|
| 302 | totalarea = np.ma.masked_where(getmask(output),mesharea) |
---|
| 303 | totalarea = mean(totalarea[d2,:],axis=0) |
---|
| 304 | except: print "(problem with areas. I skip this)" ; mesharea = 1. ; totalarea = 1. |
---|
[647] | 305 | output = output*mesharea; output = mean(output[d2,:],axis=0)/totalarea |
---|
[233] | 306 | elif dimension == 3: |
---|
[525] | 307 | if mesharea is None: mesharea=np.ones(shape[[1,2]]) |
---|
[345] | 308 | if max(d4) >= shape[0]: error = True |
---|
| 309 | elif max(d2) >= shape[1]: error = True |
---|
| 310 | elif max(d1) >= shape[2]: error = True |
---|
[647] | 311 | elif d4 is not None and d2 is not None and d1 is not None: |
---|
[525] | 312 | output = mean(input[d4,:,:],axis=0) |
---|
[687] | 313 | try: |
---|
| 314 | totalarea = np.ma.masked_where(getmask(output),mesharea) |
---|
| 315 | totalarea = mean(totalarea[d2,:],axis=0);totalarea = mean(totalarea[d1]) |
---|
| 316 | except: print "(problem with areas. I skip this)" ; mesharea = 1. ; totalarea = 1. |
---|
[647] | 317 | output = output*mesharea; output = mean(output[d2,:],axis=0); output = mean(output[d1])/totalarea |
---|
[525] | 318 | elif d4 is not None and d2 is not None: |
---|
| 319 | output = mean(input[d4,:,:],axis=0) |
---|
[687] | 320 | try: |
---|
| 321 | totalarea = np.ma.masked_where(getmask(output),mesharea) |
---|
| 322 | totalarea = mean(totalarea[d2,:],axis=0) |
---|
| 323 | except: print "(problem with areas. I skip this)" ; mesharea = 1. ; totalarea = 1. |
---|
[647] | 324 | output = output*mesharea; output = mean(output[d2,:],axis=0)/totalarea |
---|
[349] | 325 | elif d4 is not None and d1 is not None: output = mean(input[d4,:,:],axis=0); output=mean(output[:,d1],axis=1) |
---|
[525] | 326 | elif d2 is not None and d1 is not None: |
---|
[687] | 327 | try: |
---|
| 328 | totalarea = np.tile(mesharea,(output.shape[0],1,1)) |
---|
| 329 | totalarea = np.ma.masked_where(getmask(output),totalarea) |
---|
| 330 | totalarea = mean(totalarea[:,d2,:],axis=1);totalarea = mean(totalarea[:,d1],axis=1) |
---|
| 331 | except: print "(problem with areas. I skip this)" ; mesharea = 1. ; totalarea = 1. |
---|
[647] | 332 | output = output*mesharea; output = mean(output[:,d2,:],axis=1); output = mean(output[:,d1],axis=1)/totalarea |
---|
[525] | 333 | elif d1 is not None: output = mean(input[:,:,d1],axis=2) |
---|
[647] | 334 | elif d2 is not None: |
---|
[687] | 335 | try: |
---|
| 336 | totalarea = np.tile(mesharea,(output.shape[0],1,1)) |
---|
| 337 | totalarea = np.ma.masked_where(getmask(output),totalarea) |
---|
| 338 | totalarea = mean(totalarea[:,d2,:],axis=1) |
---|
| 339 | except: print "(problem with areas. I skip this)" ; mesharea = 1. ; totalarea = 1. |
---|
[647] | 340 | output = output*mesharea; output = mean(output[:,d2,:],axis=1)/totalarea |
---|
[525] | 341 | elif d4 is not None: output = mean(input[d4,:,:],axis=0) |
---|
[233] | 342 | elif dimension == 4: |
---|
[647] | 343 | if mesharea is None: mesharea=np.ones(shape[[2,3]]) # mesharea=np.random.random_sample(shape[[2,3]])*5. + 2. # pour tester |
---|
[345] | 344 | if max(d4) >= shape[0]: error = True |
---|
| 345 | elif max(d3) >= shape[1]: error = True |
---|
| 346 | elif max(d2) >= shape[2]: error = True |
---|
| 347 | elif max(d1) >= shape[3]: error = True |
---|
[382] | 348 | elif d4 is not None and d3 is not None and d2 is not None and d1 is not None: |
---|
[392] | 349 | output = mean(input[d4,:,:,:],axis=0) |
---|
| 350 | output = reduce_zaxis(output[d3,:,:],ax=0,yint=yint,vert=alt,indice=d3) |
---|
[427] | 351 | if anomaly: output = 100. * ((output / smooth(output,csmooth)) - 1.) |
---|
[687] | 352 | try: |
---|
| 353 | totalarea = np.ma.masked_where(np.isnan(output),mesharea) |
---|
| 354 | totalarea = mean(totalarea[d2,:],axis=0); totalarea = mean(totalarea[d1]) |
---|
| 355 | except: print "(problem with areas. I skip this)" ; mesharea = 1. ; totalarea = 1. |
---|
[647] | 356 | output = output*mesharea; output = mean(output[d2,:],axis=0); output = mean(output[d1])/totalarea |
---|
[350] | 357 | elif d4 is not None and d3 is not None and d2 is not None: |
---|
[392] | 358 | output = mean(input[d4,:,:,:],axis=0) |
---|
| 359 | output = reduce_zaxis(output[d3,:,:],ax=0,yint=yint,vert=alt,indice=d3) |
---|
[525] | 360 | if anomaly: output = 100. * ((output / smooth(output,csmooth)) - 1.) |
---|
[687] | 361 | try: |
---|
| 362 | totalarea = np.ma.masked_where(np.isnan(output),mesharea) |
---|
| 363 | totalarea = mean(totalarea[d2,:],axis=0) |
---|
| 364 | except: print "(problem with areas. I skip this)" ; mesharea = 1. ; totalarea = 1. |
---|
[647] | 365 | output = output*mesharea; output = mean(output[d2,:],axis=0)/totalarea |
---|
[350] | 366 | elif d4 is not None and d3 is not None and d1 is not None: |
---|
[392] | 367 | output = mean(input[d4,:,:,:],axis=0) |
---|
| 368 | output = reduce_zaxis(output[d3,:,:],ax=0,yint=yint,vert=alt,indice=d3) |
---|
[405] | 369 | if anomaly: output = 100. * ((output / smooth(output,csmooth)) - 1.) |
---|
[392] | 370 | output = mean(output[:,d1],axis=1) |
---|
[350] | 371 | elif d4 is not None and d2 is not None and d1 is not None: |
---|
[392] | 372 | output = mean(input[d4,:,:,:],axis=0) |
---|
[405] | 373 | if anomaly: |
---|
| 374 | for k in range(output.shape[0]): output[k,:,:] = 100. * ((output[k,:,:] / smooth(output[k,:,:],csmooth)) - 1.) |
---|
[687] | 375 | try: |
---|
| 376 | totalarea = np.tile(mesharea,(output.shape[0],1,1)) |
---|
| 377 | totalarea = np.ma.masked_where(getmask(output),mesharea) |
---|
| 378 | totalarea = mean(totalarea[:,d2,:],axis=1); totalarea = mean(totalarea[:,d1],axis=1) |
---|
| 379 | except: print "(problem with areas. I skip this)" ; mesharea = 1. ; totalarea = 1. |
---|
[647] | 380 | output = output*mesharea; output = mean(output[:,d2,:],axis=1); output = mean(output[:,d1],axis=1)/totalarea |
---|
[405] | 381 | #noperturb = smooth1d(output,window_len=7) |
---|
| 382 | #lenlen = len(output) ; output = output[1:lenlen-7] ; yeye = noperturb[4:lenlen-4] |
---|
| 383 | #plot(output) ; plot(yeye) ; show() ; plot(output-yeye) ; show() |
---|
[647] | 384 | elif d3 is not None and d2 is not None and d1 is not None: |
---|
| 385 | output = reduce_zaxis(input[:,d3,:,:],ax=1,yint=yint,vert=alt,indice=d3) |
---|
[405] | 386 | if anomaly: |
---|
| 387 | for k in range(output.shape[0]): output[k,:,:] = 100. * ((output[k,:,:] / smooth(output[k,:,:],csmooth)) - 1.) |
---|
[687] | 388 | try: |
---|
| 389 | totalarea = np.tile(mesharea,(output.shape[0],1,1)) |
---|
| 390 | totalarea = np.ma.masked_where(getmask(output),totalarea) |
---|
| 391 | totalarea = mean(totalarea[:,d2,:],axis=1); totalarea = mean(totalarea[:,d1],axis=1) |
---|
| 392 | except: print "(problem with areas. I skip this)" ; mesharea = 1. ; totalarea = 1. |
---|
[647] | 393 | output = output*mesharea; output = mean(output[:,d2,:],axis=1); output = mean(output[:,d1],axis=1)/totalarea |
---|
[392] | 394 | elif d4 is not None and d3 is not None: |
---|
| 395 | output = mean(input[d4,:,:,:],axis=0) |
---|
| 396 | output = reduce_zaxis(output[d3,:,:],ax=0,yint=yint,vert=alt,indice=d3) |
---|
[405] | 397 | if anomaly: output = 100. * ((output / smooth(output,csmooth)) - 1.) |
---|
[392] | 398 | elif d4 is not None and d2 is not None: |
---|
[647] | 399 | output = mean(input[d4,:,:,:],axis=0) |
---|
[687] | 400 | try: |
---|
| 401 | totalarea = np.tile(mesharea,(output.shape[0],1,1)) |
---|
| 402 | totalarea = np.ma.masked_where(getmask(output),mesharea) |
---|
| 403 | totalarea = mean(totalarea[:,d2,:],axis=1) |
---|
| 404 | except: print "(problem with areas. I skip this)" ; mesharea = 1. ; totalarea = 1. |
---|
[647] | 405 | output = output*mesharea; output = mean(output[:,d2,:],axis=1)/totalarea |
---|
[392] | 406 | elif d4 is not None and d1 is not None: |
---|
| 407 | output = mean(input[d4,:,:,:],axis=0) |
---|
| 408 | output = mean(output[:,:,d1],axis=2) |
---|
[647] | 409 | elif d3 is not None and d2 is not None: |
---|
[392] | 410 | output = reduce_zaxis(input[:,d3,:,:],ax=1,yint=yint,vert=alt,indice=d3) |
---|
[687] | 411 | try: |
---|
| 412 | totalarea = np.tile(mesharea,(output.shape[0],1,1)) |
---|
| 413 | totalarea = np.ma.masked_where(getmask(output),mesharea) |
---|
| 414 | totalarea = mean(totalarea[:,d2,:],axis=1) |
---|
| 415 | except: print "(problem with areas. I skip this)" ; mesharea = 1. ; totalarea = 1. |
---|
[647] | 416 | output = output*mesharea; output = mean(output[:,d2,:],axis=1)/totalarea |
---|
[392] | 417 | elif d3 is not None and d1 is not None: |
---|
| 418 | output = reduce_zaxis(input[:,d3,:,:],ax=1,yint=yint,vert=alt,indice=d3) |
---|
[448] | 419 | output = mean(output[:,:,d1],axis=2) |
---|
[647] | 420 | elif d2 is not None and d1 is not None: |
---|
[687] | 421 | try: |
---|
| 422 | totalarea = np.tile(mesharea,(output.shape[0],output.shape[1],1,1)) |
---|
| 423 | totalarea = np.ma.masked_where(getmask(output),totalarea) |
---|
| 424 | totalarea = mean(totalarea[:,:,d2,:],axis=2); totalarea = mean(totalarea[:,:,d1],axis=1) |
---|
| 425 | except: print "(problem with areas. I skip this)" ; mesharea = 1. ; totalarea = 1. |
---|
[763] | 426 | output = output*mesharea; output = mean(output[:,:,d2,:],axis=2); output = mean(output[:,:,d1],axis=2)/totalarea |
---|
[392] | 427 | elif d1 is not None: output = mean(input[:,:,:,d1],axis=3) |
---|
[647] | 428 | elif d2 is not None: |
---|
[687] | 429 | try: |
---|
| 430 | totalarea = np.tile(mesharea,(output.shape[0],output.shape[1],1,output.shape[3])) |
---|
| 431 | totalarea = np.ma.masked_where(getmask(output),totalarea) |
---|
| 432 | totalarea = mean(totalarea[:,:,d2,:],axis=2) |
---|
| 433 | except: print "(problem with areas. I skip this)" ; mesharea = 1. ; totalarea = 1. |
---|
[647] | 434 | output = output*mesharea; output = mean(output[:,:,d2,:],axis=2)/totalarea |
---|
[437] | 435 | elif d3 is not None: output = reduce_zaxis(input[:,d3,:,:],ax=1,yint=yint,vert=alt,indice=d3) |
---|
[392] | 436 | elif d4 is not None: output = mean(input[d4,:,:,:],axis=0) |
---|
[865] | 437 | |
---|
| 438 | if redope is not None: |
---|
| 439 | if redope == "mint": output = min(output,axis=0) |
---|
| 440 | elif redope == "maxt": output = max(output,axis=0) |
---|
| 441 | |
---|
[468] | 442 | dimension2 = np.array(output).ndim |
---|
| 443 | shape2 = np.array(output).shape |
---|
| 444 | print 'REDUCEFIELD dim,shape: ',dimension,shape,' >>> ',dimension2,shape2 |
---|
[233] | 445 | return output, error |
---|
| 446 | |
---|
[392] | 447 | ## Author: AC + AS |
---|
| 448 | def reduce_zaxis (input,ax=None,yint=False,vert=None,indice=None): |
---|
[382] | 449 | from mymath import max,mean |
---|
| 450 | from scipy import integrate |
---|
[392] | 451 | if yint and vert is not None and indice is not None: |
---|
[391] | 452 | if type(input).__name__=='MaskedArray': |
---|
| 453 | input.set_fill_value([np.NaN]) |
---|
[392] | 454 | output = integrate.trapz(input.filled(),x=vert[indice],axis=ax) |
---|
[391] | 455 | else: |
---|
[396] | 456 | output = integrate.trapz(input,x=vert[indice],axis=ax) |
---|
[382] | 457 | else: |
---|
| 458 | output = mean(input,axis=ax) |
---|
| 459 | return output |
---|
| 460 | |
---|
[345] | 461 | ## Author: AS + TN |
---|
[817] | 462 | def definesubplot ( numplot, fig, ipreferline=False): |
---|
[233] | 463 | from matplotlib.pyplot import rcParams |
---|
| 464 | rcParams['font.size'] = 12. ## default (important for multiple calls) |
---|
[345] | 465 | if numplot <= 0: |
---|
| 466 | subv = 99999 |
---|
| 467 | subh = 99999 |
---|
| 468 | elif numplot == 1: |
---|
[568] | 469 | subv = 1 |
---|
| 470 | subh = 1 |
---|
[233] | 471 | elif numplot == 2: |
---|
[483] | 472 | subv = 1 #2 |
---|
| 473 | subh = 2 #1 |
---|
[233] | 474 | fig.subplots_adjust(wspace = 0.35) |
---|
| 475 | rcParams['font.size'] = int( rcParams['font.size'] * 3. / 4. ) |
---|
| 476 | elif numplot == 3: |
---|
[453] | 477 | subv = 3 |
---|
| 478 | subh = 1 |
---|
[613] | 479 | fig.subplots_adjust(hspace = 0.5) |
---|
[817] | 480 | if ipreferline: subv = 1 ; subh = 3 ; fig.subplots_adjust(wspace = 0.35) |
---|
[233] | 481 | rcParams['font.size'] = int( rcParams['font.size'] * 1. / 2. ) |
---|
[637] | 482 | elif numplot == 4: |
---|
[345] | 483 | subv = 2 |
---|
| 484 | subh = 2 |
---|
[781] | 485 | #fig.subplots_adjust(wspace = 0.4, hspace = 0.6) |
---|
[610] | 486 | fig.subplots_adjust(wspace = 0.4, hspace = 0.3) |
---|
[345] | 487 | rcParams['font.size'] = int( rcParams['font.size'] * 2. / 3. ) |
---|
| 488 | elif numplot <= 6: |
---|
[876] | 489 | subv = 3#2 |
---|
| 490 | subh = 2#3 |
---|
| 491 | ##fig.subplots_adjust(wspace = 0.4, hspace = 0.0) |
---|
| 492 | #fig.subplots_adjust(wspace = 0.5, hspace = 0.3) |
---|
| 493 | fig.subplots_adjust(wspace = 0.3, hspace = 0.5) |
---|
[233] | 494 | rcParams['font.size'] = int( rcParams['font.size'] * 1. / 2. ) |
---|
[345] | 495 | elif numplot <= 8: |
---|
| 496 | subv = 2 |
---|
| 497 | subh = 4 |
---|
[233] | 498 | fig.subplots_adjust(wspace = 0.3, hspace = 0.3) |
---|
| 499 | rcParams['font.size'] = int( rcParams['font.size'] * 1. / 2. ) |
---|
[345] | 500 | elif numplot <= 9: |
---|
| 501 | subv = 3 |
---|
| 502 | subh = 3 |
---|
[233] | 503 | fig.subplots_adjust(wspace = 0.3, hspace = 0.3) |
---|
| 504 | rcParams['font.size'] = int( rcParams['font.size'] * 1. / 2. ) |
---|
[345] | 505 | elif numplot <= 12: |
---|
| 506 | subv = 3 |
---|
| 507 | subh = 4 |
---|
[453] | 508 | fig.subplots_adjust(wspace = 0, hspace = 0.1) |
---|
[345] | 509 | rcParams['font.size'] = int( rcParams['font.size'] * 1. / 2. ) |
---|
| 510 | elif numplot <= 16: |
---|
| 511 | subv = 4 |
---|
| 512 | subh = 4 |
---|
| 513 | fig.subplots_adjust(wspace = 0.3, hspace = 0.3) |
---|
| 514 | rcParams['font.size'] = int( rcParams['font.size'] * 1. / 2. ) |
---|
[233] | 515 | else: |
---|
[345] | 516 | print "number of plot supported: 1 to 16" |
---|
[233] | 517 | exit() |
---|
[345] | 518 | return subv,subh |
---|
[233] | 519 | |
---|
[345] | 520 | ## Author: AS |
---|
[233] | 521 | def getstralt(nc,nvert): |
---|
[548] | 522 | varinfile = nc.variables.keys() |
---|
| 523 | if 'vert' not in varinfile: |
---|
[233] | 524 | stralt = "_lvl" + str(nvert) |
---|
[548] | 525 | else: |
---|
[233] | 526 | zelevel = int(nc.variables['vert'][nvert]) |
---|
| 527 | if abs(zelevel) < 10000.: strheight=str(zelevel)+"m" |
---|
| 528 | else: strheight=str(int(zelevel/1000.))+"km" |
---|
| 529 | if 'altitude' in nc.dimensions: stralt = "_"+strheight+"-AMR" |
---|
| 530 | elif 'altitude_abg' in nc.dimensions: stralt = "_"+strheight+"-ALS" |
---|
| 531 | elif 'bottom_top' in nc.dimensions: stralt = "_"+strheight |
---|
| 532 | elif 'pressure' in nc.dimensions: stralt = "_"+str(zelevel)+"Pa" |
---|
| 533 | else: stralt = "" |
---|
| 534 | return stralt |
---|
| 535 | |
---|
[345] | 536 | ## Author: AS |
---|
[468] | 537 | def getlschar ( namefile, getaxis=False ): |
---|
[195] | 538 | from timestuff import sol2ls |
---|
[400] | 539 | from string import rstrip |
---|
[687] | 540 | import os as daos |
---|
| 541 | namefiletest = rstrip( rstrip( rstrip( namefile, chars="_z"), chars="_zabg"), chars="_p") |
---|
| 542 | testexist = daos.path.isfile(namefiletest) |
---|
[237] | 543 | zetime = None |
---|
[687] | 544 | if testexist: |
---|
| 545 | namefile = namefiletest |
---|
| 546 | #### we assume that wrfout is next to wrfout_z and wrfout_zabg |
---|
[876] | 547 | nc = netCDF4.Dataset(namefile) |
---|
[687] | 548 | zetime = None |
---|
| 549 | days_in_month = [61, 66, 66, 65, 60, 54, 50, 46, 47, 47, 51, 56] |
---|
| 550 | plus_in_month = [ 0, 61,127,193,258,318,372,422,468,515,562,613] |
---|
| 551 | if 'Times' in nc.variables: |
---|
[233] | 552 | zetime = nc.variables['Times'][0] |
---|
[876] | 553 | shape = np.array(nc.variables['Times']).shape |
---|
[233] | 554 | if shape[0] < 2: zetime = None |
---|
| 555 | if zetime is not None \ |
---|
[225] | 556 | and 'vert' not in nc.variables: |
---|
[489] | 557 | ##### strangely enough this does not work for api or ncrcat results! |
---|
| 558 | zesol = plus_in_month[ int(zetime[5]+zetime[6])-1 ] + int(zetime[8]+zetime[9]) - 1 ##les sols GCM commencent a 0 |
---|
[864] | 559 | dals = int(sol2ls ( zesol )) #int( 10. * sol2ls ( zesol ) ) / 10. |
---|
[197] | 560 | ### |
---|
| 561 | zetime2 = nc.variables['Times'][1] |
---|
| 562 | one = int(zetime[11]+zetime[12]) + int(zetime[14]+zetime[15])/37. |
---|
| 563 | next = int(zetime2[11]+zetime2[12]) + int(zetime2[14]+zetime2[15])/37. |
---|
| 564 | zehour = one |
---|
| 565 | zehourin = abs ( next - one ) |
---|
[489] | 566 | if not getaxis: |
---|
| 567 | lschar = "_Ls"+str(dals) |
---|
| 568 | else: |
---|
[468] | 569 | zelen = len(nc.variables['Times'][:]) |
---|
| 570 | yeye = range(zelen) ; lsaxis = range(zelen) ; solaxis = range(zelen) ; ltaxis = range(zelen) |
---|
| 571 | for iii in yeye: |
---|
[489] | 572 | zetime = nc.variables['Times'][iii] |
---|
[468] | 573 | ltaxis[iii] = int(zetime[11]+zetime[12]) + int(zetime[14]+zetime[15])/37. |
---|
[489] | 574 | solaxis[iii] = ltaxis[iii] / 24. + plus_in_month[ int(zetime[5]+zetime[6])-1 ] + int(zetime[8]+zetime[9]) - 1 ##les sols GCM commencent a 0 |
---|
[468] | 575 | lsaxis[iii] = sol2ls ( solaxis[iii] ) |
---|
| 576 | if ltaxis[iii] < ltaxis[iii-1]: ltaxis[iii] = ltaxis[iii] + 24. |
---|
[489] | 577 | #print ltaxis[iii], solaxis[iii], lsaxis[iii], getattr( nc, 'JULDAY' ) |
---|
[468] | 578 | lschar = lsaxis ; zehour = solaxis ; zehourin = ltaxis |
---|
[195] | 579 | else: |
---|
| 580 | lschar="" |
---|
[197] | 581 | zehour = 0 |
---|
| 582 | zehourin = 1 |
---|
| 583 | return lschar, zehour, zehourin |
---|
[195] | 584 | |
---|
[345] | 585 | ## Author: AS |
---|
[202] | 586 | def getprefix (nc): |
---|
| 587 | prefix = 'LMD_MMM_' |
---|
| 588 | prefix = prefix + 'd'+str(getattr(nc,'GRID_ID'))+'_' |
---|
| 589 | prefix = prefix + str(int(getattr(nc,'DX')/1000.))+'km_' |
---|
| 590 | return prefix |
---|
| 591 | |
---|
[345] | 592 | ## Author: AS |
---|
[184] | 593 | def getproj (nc): |
---|
[233] | 594 | typefile = whatkindfile(nc) |
---|
[548] | 595 | if typefile in ['meso','geo']: |
---|
[233] | 596 | ### (il faudrait passer CEN_LON dans la projection ?) |
---|
| 597 | map_proj = getattr(nc, 'MAP_PROJ') |
---|
| 598 | cen_lat = getattr(nc, 'CEN_LAT') |
---|
| 599 | if map_proj == 2: |
---|
| 600 | if cen_lat > 10.: |
---|
| 601 | proj="npstere" |
---|
[392] | 602 | #print "NP stereographic polar domain" |
---|
[233] | 603 | else: |
---|
| 604 | proj="spstere" |
---|
[392] | 605 | #print "SP stereographic polar domain" |
---|
[233] | 606 | elif map_proj == 1: |
---|
[392] | 607 | #print "lambert projection domain" |
---|
[233] | 608 | proj="lcc" |
---|
| 609 | elif map_proj == 3: |
---|
[392] | 610 | #print "mercator projection" |
---|
[233] | 611 | proj="merc" |
---|
| 612 | else: |
---|
| 613 | proj="merc" |
---|
[548] | 614 | elif typefile in ['gcm']: proj="cyl" ## pb avec les autres (de trace derriere la sphere ?) |
---|
| 615 | else: proj="ortho" |
---|
[184] | 616 | return proj |
---|
| 617 | |
---|
[345] | 618 | ## Author: AS |
---|
[180] | 619 | def ptitle (name): |
---|
| 620 | from matplotlib.pyplot import title |
---|
| 621 | title(name) |
---|
| 622 | print name |
---|
| 623 | |
---|
[345] | 624 | ## Author: AS |
---|
[252] | 625 | def polarinterv (lon2d,lat2d): |
---|
| 626 | wlon = [np.min(lon2d),np.max(lon2d)] |
---|
| 627 | ind = np.array(lat2d).shape[0] / 2 ## to get a good boundlat and to get the pole |
---|
| 628 | wlat = [np.min(lat2d[ind,:]),np.max(lat2d[ind,:])] |
---|
| 629 | return [wlon,wlat] |
---|
| 630 | |
---|
[345] | 631 | ## Author: AS |
---|
[180] | 632 | def simplinterv (lon2d,lat2d): |
---|
| 633 | return [[np.min(lon2d),np.max(lon2d)],[np.min(lat2d),np.max(lat2d)]] |
---|
| 634 | |
---|
[345] | 635 | ## Author: AS |
---|
[184] | 636 | def wrfinterv (lon2d,lat2d): |
---|
| 637 | nx = len(lon2d[0,:])-1 |
---|
| 638 | ny = len(lon2d[:,0])-1 |
---|
[225] | 639 | lon1 = lon2d[0,0] |
---|
| 640 | lon2 = lon2d[nx,ny] |
---|
| 641 | lat1 = lat2d[0,0] |
---|
| 642 | lat2 = lat2d[nx,ny] |
---|
[233] | 643 | if abs(0.5*(lat1+lat2)) > 60.: wider = 0.5 * (abs(lon1)+abs(lon2)) * 0.1 |
---|
| 644 | else: wider = 0. |
---|
| 645 | if lon1 < lon2: wlon = [lon1, lon2 + wider] |
---|
[225] | 646 | else: wlon = [lon2, lon1 + wider] |
---|
| 647 | if lat1 < lat2: wlat = [lat1, lat2] |
---|
| 648 | else: wlat = [lat2, lat1] |
---|
| 649 | return [wlon,wlat] |
---|
[184] | 650 | |
---|
[345] | 651 | ## Author: AS |
---|
[240] | 652 | def makeplotres (filename,res=None,pad_inches_value=0.25,folder='',disp=True,ext='png',erase=False): |
---|
[180] | 653 | import matplotlib.pyplot as plt |
---|
[240] | 654 | from os import system |
---|
| 655 | addstr = "" |
---|
| 656 | if res is not None: |
---|
| 657 | res = int(res) |
---|
| 658 | addstr = "_"+str(res) |
---|
| 659 | name = filename+addstr+"."+ext |
---|
[186] | 660 | if folder != '': name = folder+'/'+name |
---|
[180] | 661 | plt.savefig(name,dpi=res,bbox_inches='tight',pad_inches=pad_inches_value) |
---|
[240] | 662 | if disp: display(name) |
---|
| 663 | if ext in ['eps','ps','svg']: system("tar czvf "+name+".tar.gz "+name+" ; rm -f "+name) |
---|
| 664 | if erase: system("mv "+name+" to_be_erased") |
---|
[180] | 665 | return |
---|
| 666 | |
---|
[430] | 667 | ## Author: AS + AC |
---|
[451] | 668 | def dumpbdy (field,n,stag=None,condition=False,onlyx=False,onlyy=False): |
---|
[447] | 669 | nx = len(field[0,:])-1 |
---|
| 670 | ny = len(field[:,0])-1 |
---|
[444] | 671 | if condition: |
---|
| 672 | if stag == 'U': nx = nx-1 |
---|
| 673 | if stag == 'V': ny = ny-1 |
---|
| 674 | if stag == 'W': nx = nx+1 #special les case when we dump stag on W |
---|
[451] | 675 | if onlyx: result = field[:,n:nx-n] |
---|
| 676 | elif onlyy: result = field[n:ny-n,:] |
---|
| 677 | else: result = field[n:ny-n,n:nx-n] |
---|
| 678 | return result |
---|
[180] | 679 | |
---|
[444] | 680 | ## Author: AS + AC |
---|
[233] | 681 | def getcoorddef ( nc ): |
---|
| 682 | ## getcoord2d for predefined types |
---|
| 683 | typefile = whatkindfile(nc) |
---|
[548] | 684 | if typefile in ['meso']: |
---|
| 685 | if '9999' not in getattr(nc,'START_DATE') : |
---|
[753] | 686 | ## regular mesoscale |
---|
[548] | 687 | [lon2d,lat2d] = getcoord2d(nc) |
---|
| 688 | else: |
---|
| 689 | ## idealized mesoscale |
---|
| 690 | nx=getattr(nc,'WEST-EAST_GRID_DIMENSION') |
---|
| 691 | ny=getattr(nc,'SOUTH-NORTH_GRID_DIMENSION') |
---|
| 692 | dlat=getattr(nc,'DX') |
---|
| 693 | ## this is dirty because Martian-specific |
---|
| 694 | # ... but this just intended to get "lat-lon" like info |
---|
| 695 | falselon = np.arange(-dlat*(nx-1)/2.,dlat*(nx-1)/2.,dlat)/60000. |
---|
| 696 | falselat = np.arange(-dlat*(ny-1)/2.,dlat*(ny-1)/2.,dlat)/60000. |
---|
| 697 | [lon2d,lat2d] = np.meshgrid(falselon,falselat) ## dummy coordinates |
---|
| 698 | print "WARNING: domain plot artificially centered on lat,lon 0,0" |
---|
[637] | 699 | elif typefile in ['gcm','earthgcm','ecmwf']: |
---|
[724] | 700 | #### n est ce pas nc.variables ? |
---|
[637] | 701 | if "longitude" in nc.dimensions: dalon = "longitude" |
---|
| 702 | elif "lon" in nc.dimensions: dalon = "lon" |
---|
[724] | 703 | else: dalon = "nothing" |
---|
[637] | 704 | if "latitude" in nc.dimensions: dalat = "latitude" |
---|
| 705 | elif "lat" in nc.dimensions: dalat = "lat" |
---|
[724] | 706 | else: dalat = "nothing" |
---|
[637] | 707 | [lon2d,lat2d] = getcoord2d(nc,nlat=dalat,nlon=dalon,is1d=True) |
---|
[233] | 708 | elif typefile in ['geo']: |
---|
| 709 | [lon2d,lat2d] = getcoord2d(nc,nlat='XLAT_M',nlon='XLONG_M') |
---|
| 710 | return lon2d,lat2d |
---|
| 711 | |
---|
[345] | 712 | ## Author: AS |
---|
[184] | 713 | def getcoord2d (nc,nlat='XLAT',nlon='XLONG',is1d=False): |
---|
[724] | 714 | if nlon == "nothing" or nlat == "nothing": |
---|
| 715 | print "NO LAT LON FIELDS. I AM TRYING MY BEST. I ASSUME GLOBAL FIELD." |
---|
| 716 | lon = np.linspace(-180.,180.,getdimfromvar(nc)[-1]) |
---|
| 717 | lat = np.linspace(-90.,90.,getdimfromvar(nc)[-2]) |
---|
[184] | 718 | [lon2d,lat2d] = np.meshgrid(lon,lat) |
---|
| 719 | else: |
---|
[724] | 720 | if is1d: |
---|
| 721 | lat = nc.variables[nlat][:] |
---|
| 722 | lon = nc.variables[nlon][:] |
---|
| 723 | [lon2d,lat2d] = np.meshgrid(lon,lat) |
---|
| 724 | else: |
---|
| 725 | lat = nc.variables[nlat][0,:,:] |
---|
| 726 | lon = nc.variables[nlon][0,:,:] |
---|
| 727 | [lon2d,lat2d] = [lon,lat] |
---|
[184] | 728 | return lon2d,lat2d |
---|
| 729 | |
---|
[724] | 730 | ## Author: AS |
---|
| 731 | def getdimfromvar (nc): |
---|
| 732 | varinfile = nc.variables.keys() |
---|
[871] | 733 | sav = [0.] |
---|
| 734 | for var in varinfile: |
---|
| 735 | dim = nc.variables[var].shape ## usually the last variable is 4D or 3D |
---|
| 736 | if len(dim) > len(sav): sav=dim |
---|
| 737 | return sav |
---|
[724] | 738 | |
---|
[405] | 739 | ## FROM COOKBOOK http://www.scipy.org/Cookbook/SignalSmooth |
---|
| 740 | def smooth1d(x,window_len=11,window='hanning'): |
---|
| 741 | """smooth the data using a window with requested size. |
---|
| 742 | This method is based on the convolution of a scaled window with the signal. |
---|
| 743 | The signal is prepared by introducing reflected copies of the signal |
---|
| 744 | (with the window size) in both ends so that transient parts are minimized |
---|
| 745 | in the begining and end part of the output signal. |
---|
| 746 | input: |
---|
| 747 | x: the input signal |
---|
| 748 | window_len: the dimension of the smoothing window; should be an odd integer |
---|
| 749 | window: the type of window from 'flat', 'hanning', 'hamming', 'bartlett', 'blackman' |
---|
| 750 | flat window will produce a moving average smoothing. |
---|
| 751 | output: |
---|
| 752 | the smoothed signal |
---|
| 753 | example: |
---|
| 754 | t=linspace(-2,2,0.1) |
---|
| 755 | x=sin(t)+randn(len(t))*0.1 |
---|
| 756 | y=smooth(x) |
---|
| 757 | see also: |
---|
| 758 | numpy.hanning, numpy.hamming, numpy.bartlett, numpy.blackman, numpy.convolve |
---|
| 759 | scipy.signal.lfilter |
---|
| 760 | TODO: the window parameter could be the window itself if an array instead of a string |
---|
| 761 | """ |
---|
[876] | 762 | x = np.array(x) |
---|
[405] | 763 | if x.ndim != 1: |
---|
| 764 | raise ValueError, "smooth only accepts 1 dimension arrays." |
---|
| 765 | if x.size < window_len: |
---|
| 766 | raise ValueError, "Input vector needs to be bigger than window size." |
---|
| 767 | if window_len<3: |
---|
| 768 | return x |
---|
| 769 | if not window in ['flat', 'hanning', 'hamming', 'bartlett', 'blackman']: |
---|
| 770 | raise ValueError, "Window is on of 'flat', 'hanning', 'hamming', 'bartlett', 'blackman'" |
---|
[876] | 771 | s=np.r_[x[window_len-1:0:-1],x,x[-1:-window_len:-1]] |
---|
[405] | 772 | #print(len(s)) |
---|
| 773 | if window == 'flat': #moving average |
---|
[876] | 774 | w=np.ones(window_len,'d') |
---|
[405] | 775 | else: |
---|
| 776 | w=eval('numpy.'+window+'(window_len)') |
---|
[876] | 777 | y=np.convolve(w/w.sum(),s,mode='valid') |
---|
[405] | 778 | return y |
---|
| 779 | |
---|
[345] | 780 | ## Author: AS |
---|
[180] | 781 | def smooth (field, coeff): |
---|
| 782 | ## actually blur_image could work with different coeff on x and y |
---|
| 783 | if coeff > 1: result = blur_image(field,int(coeff)) |
---|
| 784 | else: result = field |
---|
| 785 | return result |
---|
| 786 | |
---|
[345] | 787 | ## FROM COOKBOOK http://www.scipy.org/Cookbook/SignalSmooth |
---|
[180] | 788 | def gauss_kern(size, sizey=None): |
---|
| 789 | # Returns a normalized 2D gauss kernel array for convolutions |
---|
| 790 | size = int(size) |
---|
| 791 | if not sizey: |
---|
| 792 | sizey = size |
---|
| 793 | else: |
---|
| 794 | sizey = int(sizey) |
---|
| 795 | x, y = np.mgrid[-size:size+1, -sizey:sizey+1] |
---|
| 796 | g = np.exp(-(x**2/float(size)+y**2/float(sizey))) |
---|
| 797 | return g / g.sum() |
---|
| 798 | |
---|
[345] | 799 | ## FROM COOKBOOK http://www.scipy.org/Cookbook/SignalSmooth |
---|
[180] | 800 | def blur_image(im, n, ny=None) : |
---|
| 801 | from scipy.signal import convolve |
---|
| 802 | # blurs the image by convolving with a gaussian kernel of typical size n. |
---|
| 803 | # The optional keyword argument ny allows for a different size in the y direction. |
---|
| 804 | g = gauss_kern(n, sizey=ny) |
---|
| 805 | improc = convolve(im, g, mode='same') |
---|
| 806 | return improc |
---|
| 807 | |
---|
[345] | 808 | ## Author: AS |
---|
[233] | 809 | def getwinddef (nc): |
---|
| 810 | ### |
---|
[548] | 811 | varinfile = nc.variables.keys() |
---|
| 812 | if 'Um' in varinfile: [uchar,vchar] = ['Um','Vm'] #; print "this is API meso file" |
---|
| 813 | elif 'U' in varinfile: [uchar,vchar] = ['U','V'] #; print "this is RAW meso file" |
---|
| 814 | elif 'u' in varinfile: [uchar,vchar] = ['u','v'] #; print "this is GCM file" |
---|
[721] | 815 | elif 'vitu' in varinfile: [uchar,vchar] = ['vitu','vitv'] #; print "this is GCM v5 file" |
---|
[871] | 816 | elif 'ucomp' in varinfile: [uchar,vchar] = ['ucomp','vcomp'] |
---|
[783] | 817 | ### you can add choices here ! |
---|
[548] | 818 | else: [uchar,vchar] = ['not found','not found'] |
---|
[233] | 819 | ### |
---|
[548] | 820 | if uchar in ['U']: metwind = False ## geometrical (wrt grid) |
---|
| 821 | else: metwind = True ## meteorological (zon/mer) |
---|
| 822 | if metwind is False: print "Not using meteorological winds. You trust numerical grid as being (x,y)" |
---|
[233] | 823 | ### |
---|
| 824 | return uchar,vchar,metwind |
---|
[202] | 825 | |
---|
[345] | 826 | ## Author: AS |
---|
[184] | 827 | def vectorfield (u, v, x, y, stride=3, scale=15., factor=250., color='black', csmooth=1, key=True): |
---|
| 828 | ## scale regle la reference du vecteur |
---|
| 829 | ## factor regle toutes les longueurs (dont la reference). l'AUGMENTER pour raccourcir les vecteurs. |
---|
| 830 | import matplotlib.pyplot as plt |
---|
[638] | 831 | #posx = np.min(x) - np.std(x) / 10. |
---|
| 832 | #posy = np.min(y) - np.std(y) / 10. |
---|
[865] | 833 | #posx = np.min(x) |
---|
| 834 | #posy = np.min(y) - 4.*np.std(y) / 10. |
---|
[184] | 835 | u = smooth(u,csmooth) |
---|
| 836 | v = smooth(v,csmooth) |
---|
[864] | 837 | widthvec = 0.004 #0.003 #0.005 #0.003 |
---|
[184] | 838 | q = plt.quiver( x[::stride,::stride],\ |
---|
| 839 | y[::stride,::stride],\ |
---|
| 840 | u[::stride,::stride],\ |
---|
| 841 | v[::stride,::stride],\ |
---|
[228] | 842 | angles='xy',color=color,pivot='middle',\ |
---|
[184] | 843 | scale=factor,width=widthvec ) |
---|
[202] | 844 | if color in ['white','yellow']: kcolor='black' |
---|
| 845 | else: kcolor=color |
---|
[865] | 846 | if key: p = plt.quiverkey(q,-0.06,0.98,scale,\ |
---|
| 847 | str(int(scale)),color=kcolor,labelpos='S',labelsep = 0.03) |
---|
[184] | 848 | return |
---|
[180] | 849 | |
---|
[345] | 850 | ## Author: AS |
---|
[180] | 851 | def display (name): |
---|
[184] | 852 | from os import system |
---|
| 853 | system("display "+name+" > /dev/null 2> /dev/null &") |
---|
| 854 | return name |
---|
[180] | 855 | |
---|
[345] | 856 | ## Author: AS |
---|
[180] | 857 | def findstep (wlon): |
---|
[184] | 858 | steplon = int((wlon[1]-wlon[0])/4.) #3 |
---|
| 859 | step = 120. |
---|
| 860 | while step > steplon and step > 15. : step = step / 2. |
---|
| 861 | if step <= 15.: |
---|
| 862 | while step > steplon and step > 5. : step = step - 5. |
---|
| 863 | if step <= 5.: |
---|
| 864 | while step > steplon and step > 1. : step = step - 1. |
---|
| 865 | if step <= 1.: |
---|
| 866 | step = 1. |
---|
[180] | 867 | return step |
---|
| 868 | |
---|
[345] | 869 | ## Author: AS |
---|
[451] | 870 | def define_proj (char,wlon,wlat,back=None,blat=None,blon=None): |
---|
[180] | 871 | from mpl_toolkits.basemap import Basemap |
---|
| 872 | import matplotlib as mpl |
---|
[240] | 873 | from mymath import max |
---|
[180] | 874 | meanlon = 0.5*(wlon[0]+wlon[1]) |
---|
| 875 | meanlat = 0.5*(wlat[0]+wlat[1]) |
---|
[637] | 876 | zewidth = np.abs(wlon[0]-wlon[1])*60000.*np.cos(3.14*meanlat/180.) |
---|
| 877 | zeheight = np.abs(wlat[0]-wlat[1])*60000. |
---|
[385] | 878 | if blat is None: |
---|
[398] | 879 | ortholat=meanlat |
---|
[453] | 880 | if wlat[0] >= 80.: blat = -40. |
---|
[345] | 881 | elif wlat[1] <= -80.: blat = -40. |
---|
| 882 | elif wlat[1] >= 0.: blat = wlat[0] |
---|
| 883 | elif wlat[0] <= 0.: blat = wlat[1] |
---|
[398] | 884 | else: ortholat=blat |
---|
[451] | 885 | if blon is None: ortholon=meanlon |
---|
| 886 | else: ortholon=blon |
---|
[207] | 887 | h = 50. ## en km |
---|
[202] | 888 | radius = 3397200. |
---|
[184] | 889 | if char == "cyl": m = Basemap(rsphere=radius,projection='cyl',\ |
---|
[180] | 890 | llcrnrlat=wlat[0],urcrnrlat=wlat[1],llcrnrlon=wlon[0],urcrnrlon=wlon[1]) |
---|
[184] | 891 | elif char == "moll": m = Basemap(rsphere=radius,projection='moll',lon_0=meanlon) |
---|
[451] | 892 | elif char == "ortho": m = Basemap(rsphere=radius,projection='ortho',lon_0=ortholon,lat_0=ortholat) |
---|
[184] | 893 | elif char == "lcc": m = Basemap(rsphere=radius,projection='lcc',lat_1=meanlat,lat_0=meanlat,lon_0=meanlon,\ |
---|
[637] | 894 | width=zewidth,height=zeheight) |
---|
| 895 | #llcrnrlat=wlat[0],urcrnrlat=wlat[1],llcrnrlon=wlon[0],urcrnrlon=wlon[1]) |
---|
[184] | 896 | elif char == "npstere": m = Basemap(rsphere=radius,projection='npstere', boundinglat=blat, lon_0=0.) |
---|
[395] | 897 | elif char == "spstere": m = Basemap(rsphere=radius,projection='spstere', boundinglat=blat, lon_0=180.) |
---|
[207] | 898 | elif char == "nplaea": m = Basemap(rsphere=radius,projection='nplaea', boundinglat=wlat[0], lon_0=meanlon) |
---|
| 899 | elif char == "laea": m = Basemap(rsphere=radius,projection='laea',lon_0=meanlon,lat_0=meanlat,lat_ts=meanlat,\ |
---|
[637] | 900 | width=zewidth,height=zeheight) |
---|
| 901 | #llcrnrlat=wlat[0],urcrnrlat=wlat[1],llcrnrlon=wlon[0],urcrnrlon=wlon[1]) |
---|
[184] | 902 | elif char == "nsper": m = Basemap(rsphere=radius,projection='nsper',lon_0=meanlon,lat_0=meanlat,satellite_height=h*1000.) |
---|
| 903 | elif char == "merc": m = Basemap(rsphere=radius,projection='merc',lat_ts=0.,\ |
---|
| 904 | llcrnrlat=wlat[0],urcrnrlat=wlat[1],llcrnrlon=wlon[0],urcrnrlon=wlon[1]) |
---|
[817] | 905 | elif char == "geos": m = Basemap(rsphere=radius,projection='geos',lon_0=meanlon) |
---|
| 906 | elif char == "robin": m = Basemap(rsphere=radius,projection='robin',lon_0=0) |
---|
| 907 | elif char == "cass": |
---|
| 908 | if zewidth > 60000.: ## approx. more than one degree |
---|
| 909 | m = Basemap(rsphere=radius,projection='cass',\ |
---|
| 910 | lon_0=meanlon,lat_0=meanlat,\ |
---|
| 911 | width=zewidth,height=zeheight) |
---|
| 912 | else: |
---|
| 913 | m = Basemap(rsphere=radius,projection='cass',\ |
---|
| 914 | lon_0=meanlon,lat_0=meanlat,\ |
---|
| 915 | llcrnrlat=wlat[0],urcrnrlat=wlat[1],llcrnrlon=wlon[0],urcrnrlon=wlon[1]) |
---|
| 916 | else: errormess("projection not supported.") |
---|
[184] | 917 | fontsizemer = int(mpl.rcParams['font.size']*3./4.) |
---|
[864] | 918 | if zewidth > 60000. or char in ["npstere","spstere","ortho"]: |
---|
[817] | 919 | if char in ["cyl","lcc","merc","nsper","laea"]: step = findstep(wlon) |
---|
| 920 | else: step = 10. |
---|
| 921 | steplon = step*2. |
---|
| 922 | else: |
---|
| 923 | print "very small domain !" |
---|
| 924 | steplon = 0.5 |
---|
| 925 | step = 0.5 |
---|
[453] | 926 | zecolor ='grey' |
---|
| 927 | zelinewidth = 1 |
---|
[817] | 928 | zelatmax = 80. |
---|
| 929 | if meanlat > 75.: zelatmax = 90. ; step = step/2. ; steplon = steplon*2. |
---|
[453] | 930 | # to show gcm grid: |
---|
| 931 | #zecolor = 'r' |
---|
| 932 | #zelinewidth = 1 |
---|
[647] | 933 | #step = 180./48. |
---|
| 934 | #steplon = 360./64. |
---|
| 935 | #zelatmax = 90. - step/3 |
---|
[817] | 936 | if char not in ["moll","robin"]: |
---|
[760] | 937 | if wlon[1]-wlon[0] < 2.: ## LOCAL MODE |
---|
| 938 | m.drawmeridians(np.r_[-1.:1.:0.05], labels=[0,0,0,1], color=zecolor, linewidth=zelinewidth, fontsize=fontsizemer, fmt='%5.2f') |
---|
| 939 | m.drawparallels(np.r_[-1.:1.:0.05], labels=[1,0,0,0], color=zecolor, linewidth=zelinewidth, fontsize=fontsizemer, fmt='%5.2f') |
---|
| 940 | else: ## GLOBAL OR REGIONAL MODE |
---|
[864] | 941 | m.drawmeridians(np.r_[-360.:360.:steplon], labels=[0,0,0,1], color=zecolor, linewidth=zelinewidth, fontsize=fontsizemer, latmax=zelatmax) |
---|
| 942 | m.drawparallels(np.r_[-180.:180.:step], labels=[1,0,0,0], color=zecolor, linewidth=zelinewidth, fontsize=fontsizemer, latmax=zelatmax) |
---|
[783] | 943 | if back: |
---|
| 944 | if back not in ["coast","sea"]: m.warpimage(marsmap(back),scale=0.75) |
---|
| 945 | elif back == "coast": m.drawcoastlines() |
---|
| 946 | elif back == "sea": m.drawlsmask(land_color='white',ocean_color='aqua') |
---|
[233] | 947 | #if not back: |
---|
| 948 | # if not var: back = "mola" ## if no var: draw mola |
---|
| 949 | # elif typefile in ['mesoapi','meso','geo'] \ |
---|
| 950 | # and proj not in ['merc','lcc','nsper','laea']: back = "molabw" ## if var but meso: draw molabw |
---|
| 951 | # else: pass ## else: draw None |
---|
[180] | 952 | return m |
---|
| 953 | |
---|
[345] | 954 | ## Author: AS |
---|
[232] | 955 | #### test temporaire |
---|
| 956 | def putpoints (map,plot): |
---|
| 957 | #### from http://www.scipy.org/Cookbook/Matplotlib/Maps |
---|
| 958 | # lat/lon coordinates of five cities. |
---|
| 959 | lats = [18.4] |
---|
| 960 | lons = [-134.0] |
---|
| 961 | points=['Olympus Mons'] |
---|
| 962 | # compute the native map projection coordinates for cities. |
---|
| 963 | x,y = map(lons,lats) |
---|
| 964 | # plot filled circles at the locations of the cities. |
---|
| 965 | map.plot(x,y,'bo') |
---|
| 966 | # plot the names of those five cities. |
---|
| 967 | wherept = 0 #1000 #50000 |
---|
| 968 | for name,xpt,ypt in zip(points,x,y): |
---|
| 969 | plot.text(xpt+wherept,ypt+wherept,name) |
---|
| 970 | ## le nom ne s'affiche pas... |
---|
| 971 | return |
---|
| 972 | |
---|
[345] | 973 | ## Author: AS |
---|
[233] | 974 | def calculate_bounds(field,vmin=None,vmax=None): |
---|
| 975 | from mymath import max,min,mean |
---|
| 976 | ind = np.where(field < 9e+35) |
---|
| 977 | fieldcalc = field[ ind ] # la syntaxe compacte ne marche si field est un tuple |
---|
| 978 | ### |
---|
| 979 | dev = np.std(fieldcalc)*3.0 |
---|
| 980 | ### |
---|
[562] | 981 | if vmin is None: zevmin = mean(fieldcalc) - dev |
---|
[233] | 982 | else: zevmin = vmin |
---|
| 983 | ### |
---|
| 984 | if vmax is None: zevmax = mean(fieldcalc) + dev |
---|
| 985 | else: zevmax = vmax |
---|
| 986 | if vmin == vmax: |
---|
| 987 | zevmin = mean(fieldcalc) - dev ### for continuity |
---|
| 988 | zevmax = mean(fieldcalc) + dev ### for continuity |
---|
| 989 | ### |
---|
| 990 | if zevmin < 0. and min(fieldcalc) > 0.: zevmin = 0. |
---|
[468] | 991 | print "BOUNDS field ", min(fieldcalc), max(fieldcalc), " //// adopted", zevmin, zevmax |
---|
[233] | 992 | return zevmin, zevmax |
---|
[232] | 993 | |
---|
[345] | 994 | ## Author: AS |
---|
[233] | 995 | def bounds(what_I_plot,zevmin,zevmax): |
---|
[247] | 996 | from mymath import max,min,mean |
---|
[233] | 997 | ### might be convenient to add the missing value in arguments |
---|
[310] | 998 | #what_I_plot[ what_I_plot < zevmin ] = zevmin#*(1. + 1.e-7) |
---|
| 999 | if zevmin < 0: what_I_plot[ what_I_plot < zevmin*(1. - 1.e-7) ] = zevmin*(1. - 1.e-7) |
---|
| 1000 | else: what_I_plot[ what_I_plot < zevmin*(1. + 1.e-7) ] = zevmin*(1. + 1.e-7) |
---|
[451] | 1001 | #print "NEW MIN ", min(what_I_plot) |
---|
[233] | 1002 | what_I_plot[ what_I_plot > 9e+35 ] = -9e+35 |
---|
[587] | 1003 | what_I_plot[ what_I_plot > zevmax ] = zevmax*(1. - 1.e-7) |
---|
[451] | 1004 | #print "NEW MAX ", max(what_I_plot) |
---|
[233] | 1005 | return what_I_plot |
---|
| 1006 | |
---|
[345] | 1007 | ## Author: AS |
---|
[241] | 1008 | def nolow(what_I_plot): |
---|
| 1009 | from mymath import max,min |
---|
| 1010 | lim = 0.15*0.5*(abs(max(what_I_plot))+abs(min(what_I_plot))) |
---|
[392] | 1011 | print "NO PLOT BELOW VALUE ", lim |
---|
[241] | 1012 | what_I_plot [ abs(what_I_plot) < lim ] = 1.e40 |
---|
| 1013 | return what_I_plot |
---|
| 1014 | |
---|
[418] | 1015 | |
---|
| 1016 | ## Author : AC |
---|
| 1017 | def hole_bounds(what_I_plot,zevmin,zevmax): |
---|
| 1018 | zi=0 |
---|
| 1019 | for i in what_I_plot: |
---|
| 1020 | zj=0 |
---|
| 1021 | for j in i: |
---|
| 1022 | if ((j < zevmin) or (j > zevmax)):what_I_plot[zi,zj]=np.NaN |
---|
| 1023 | zj=zj+1 |
---|
| 1024 | zi=zi+1 |
---|
| 1025 | |
---|
| 1026 | return what_I_plot |
---|
| 1027 | |
---|
[345] | 1028 | ## Author: AS |
---|
[233] | 1029 | def zoomset (wlon,wlat,zoom): |
---|
| 1030 | dlon = abs(wlon[1]-wlon[0])/2. |
---|
| 1031 | dlat = abs(wlat[1]-wlat[0])/2. |
---|
| 1032 | [wlon,wlat] = [ [wlon[0]+zoom*dlon/100.,wlon[1]-zoom*dlon/100.],\ |
---|
| 1033 | [wlat[0]+zoom*dlat/100.,wlat[1]-zoom*dlat/100.] ] |
---|
[392] | 1034 | print "ZOOM %",zoom,wlon,wlat |
---|
[233] | 1035 | return wlon,wlat |
---|
| 1036 | |
---|
[345] | 1037 | ## Author: AS |
---|
[201] | 1038 | def fmtvar (whichvar="def"): |
---|
[204] | 1039 | fmtvar = { \ |
---|
[502] | 1040 | "MIXED": "%.0f",\ |
---|
| 1041 | "UPDRAFT": "%.0f",\ |
---|
| 1042 | "DOWNDRAFT": "%.0f",\ |
---|
[405] | 1043 | "TK": "%.0f",\ |
---|
[637] | 1044 | "T": "%.0f",\ |
---|
[822] | 1045 | "MARS_TI": "%.0f",\ |
---|
| 1046 | "THERMAL_INERTIA": "%.0f",\ |
---|
[516] | 1047 | #"ZMAX_TH": "%.0f",\ |
---|
| 1048 | #"WSTAR": "%.0f",\ |
---|
[425] | 1049 | # Variables from TES ncdf format |
---|
[363] | 1050 | "T_NADIR_DAY": "%.0f",\ |
---|
[376] | 1051 | "T_NADIR_NIT": "%.0f",\ |
---|
[425] | 1052 | # Variables from tes.py ncdf format |
---|
[398] | 1053 | "TEMP_DAY": "%.0f",\ |
---|
| 1054 | "TEMP_NIGHT": "%.0f",\ |
---|
[425] | 1055 | # Variables from MCS and mcs.py ncdf format |
---|
[427] | 1056 | "DTEMP": "%.0f",\ |
---|
| 1057 | "NTEMP": "%.0f",\ |
---|
| 1058 | "DNUMBINTEMP": "%.0f",\ |
---|
| 1059 | "NNUMBINTEMP": "%.0f",\ |
---|
[425] | 1060 | # other stuff |
---|
[405] | 1061 | "TPOT": "%.0f",\ |
---|
[295] | 1062 | "TSURF": "%.0f",\ |
---|
[817] | 1063 | "TSK": "%.0f",\ |
---|
[612] | 1064 | "U_OUT1": "%.0f",\ |
---|
| 1065 | "T_OUT1": "%.0f",\ |
---|
[204] | 1066 | "def": "%.1e",\ |
---|
| 1067 | "PTOT": "%.0f",\ |
---|
[760] | 1068 | "PSFC": "%.1f",\ |
---|
[204] | 1069 | "HGT": "%.1e",\ |
---|
| 1070 | "USTM": "%.2f",\ |
---|
[225] | 1071 | "HFX": "%.0f",\ |
---|
[232] | 1072 | "ICETOT": "%.1e",\ |
---|
[237] | 1073 | "TAU_ICE": "%.2f",\ |
---|
[451] | 1074 | "TAUICE": "%.2f",\ |
---|
[252] | 1075 | "VMR_ICE": "%.1e",\ |
---|
[345] | 1076 | "MTOT": "%.1f",\ |
---|
[405] | 1077 | "ANOMALY": "%.1f",\ |
---|
[771] | 1078 | "W": "%.2f",\ |
---|
[287] | 1079 | "WMAX_TH": "%.1f",\ |
---|
[562] | 1080 | "WSTAR": "%.1f",\ |
---|
[287] | 1081 | "QSURFICE": "%.0f",\ |
---|
[405] | 1082 | "UM": "%.0f",\ |
---|
[490] | 1083 | "WIND": "%.0f",\ |
---|
[612] | 1084 | "UVMET": "%.0f",\ |
---|
| 1085 | "UV": "%.0f",\ |
---|
[295] | 1086 | "ALBBARE": "%.2f",\ |
---|
[317] | 1087 | "TAU": "%.1f",\ |
---|
[382] | 1088 | "CO2": "%.2f",\ |
---|
[701] | 1089 | "ENFACT": "%.1f",\ |
---|
[771] | 1090 | "QDUST": "%.6f",\ |
---|
[345] | 1091 | #### T.N. |
---|
| 1092 | "TEMP": "%.0f",\ |
---|
| 1093 | "VMR_H2OICE": "%.0f",\ |
---|
| 1094 | "VMR_H2OVAP": "%.0f",\ |
---|
| 1095 | "TAUTES": "%.2f",\ |
---|
| 1096 | "TAUTESAP": "%.2f",\ |
---|
| 1097 | |
---|
[204] | 1098 | } |
---|
[518] | 1099 | if "TSURF" in whichvar: whichvar = "TSURF" |
---|
[204] | 1100 | if whichvar not in fmtvar: |
---|
| 1101 | whichvar = "def" |
---|
| 1102 | return fmtvar[whichvar] |
---|
[201] | 1103 | |
---|
[345] | 1104 | ## Author: AS |
---|
[233] | 1105 | #################################################################################################################### |
---|
| 1106 | ### Colorbars http://www.scipy.org/Cookbook/Matplotlib/Show_colormaps?action=AttachFile&do=get&target=colormaps3.png |
---|
[202] | 1107 | def defcolorb (whichone="def"): |
---|
[204] | 1108 | whichcolorb = { \ |
---|
| 1109 | "def": "spectral",\ |
---|
| 1110 | "HGT": "spectral",\ |
---|
[426] | 1111 | "HGT_M": "spectral",\ |
---|
[405] | 1112 | "TK": "gist_heat",\ |
---|
[425] | 1113 | "TPOT": "Paired",\ |
---|
[295] | 1114 | "TSURF": "RdBu_r",\ |
---|
[817] | 1115 | "TSK": "RdBu_r",\ |
---|
[204] | 1116 | "QH2O": "PuBu",\ |
---|
[871] | 1117 | "PSFC": "RdYlBu_r",\ |
---|
[204] | 1118 | "USTM": "YlOrRd",\ |
---|
[490] | 1119 | "WIND": "YlOrRd",\ |
---|
[363] | 1120 | #"T_nadir_nit": "RdBu_r",\ |
---|
| 1121 | #"T_nadir_day": "RdBu_r",\ |
---|
[225] | 1122 | "HFX": "RdYlBu",\ |
---|
[310] | 1123 | "ICETOT": "YlGnBu_r",\ |
---|
[345] | 1124 | #"MTOT": "PuBu",\ |
---|
| 1125 | "CCNQ": "YlOrBr",\ |
---|
| 1126 | "CCNN": "YlOrBr",\ |
---|
| 1127 | "TEMP": "Jet",\ |
---|
[238] | 1128 | "TAU_ICE": "Blues",\ |
---|
[451] | 1129 | "TAUICE": "Blues",\ |
---|
[252] | 1130 | "VMR_ICE": "Blues",\ |
---|
[241] | 1131 | "W": "jet",\ |
---|
[287] | 1132 | "WMAX_TH": "spectral",\ |
---|
[874] | 1133 | "ANOMALY": "gist_ncar",\ |
---|
| 1134 | #"ANOMALY": "RdBu_r",\ |
---|
[287] | 1135 | "QSURFICE": "hot_r",\ |
---|
[295] | 1136 | "ALBBARE": "spectral",\ |
---|
[317] | 1137 | "TAU": "YlOrBr_r",\ |
---|
[382] | 1138 | "CO2": "YlOrBr_r",\ |
---|
[753] | 1139 | "MIXED": "GnBu",\ |
---|
[345] | 1140 | #### T.N. |
---|
[647] | 1141 | "MTOT": "spectral",\ |
---|
[345] | 1142 | "H2O_ICE_S": "RdBu",\ |
---|
| 1143 | "VMR_H2OICE": "PuBu",\ |
---|
| 1144 | "VMR_H2OVAP": "PuBu",\ |
---|
[453] | 1145 | "WATERCAPTAG": "Blues",\ |
---|
[204] | 1146 | } |
---|
[241] | 1147 | #W --> spectral ou jet |
---|
[240] | 1148 | #spectral BrBG RdBu_r |
---|
[392] | 1149 | #print "predefined colorbars" |
---|
[518] | 1150 | if "TSURF" in whichone: whichone = "TSURF" |
---|
[204] | 1151 | if whichone not in whichcolorb: |
---|
| 1152 | whichone = "def" |
---|
| 1153 | return whichcolorb[whichone] |
---|
[202] | 1154 | |
---|
[345] | 1155 | ## Author: AS |
---|
[202] | 1156 | def definecolorvec (whichone="def"): |
---|
| 1157 | whichcolor = { \ |
---|
| 1158 | "def": "black",\ |
---|
| 1159 | "vis": "yellow",\ |
---|
[864] | 1160 | "vishires": "blue",\ |
---|
[202] | 1161 | "molabw": "yellow",\ |
---|
| 1162 | "mola": "black",\ |
---|
| 1163 | "gist_heat": "white",\ |
---|
[865] | 1164 | "hot": "white",\ |
---|
[202] | 1165 | "gist_rainbow": "black",\ |
---|
| 1166 | "spectral": "black",\ |
---|
| 1167 | "gray": "red",\ |
---|
| 1168 | "PuBu": "black",\ |
---|
[721] | 1169 | "titan": "red",\ |
---|
[202] | 1170 | } |
---|
| 1171 | if whichone not in whichcolor: |
---|
| 1172 | whichone = "def" |
---|
| 1173 | return whichcolor[whichone] |
---|
| 1174 | |
---|
[345] | 1175 | ## Author: AS |
---|
[180] | 1176 | def marsmap (whichone="vishires"): |
---|
[233] | 1177 | from os import uname |
---|
| 1178 | mymachine = uname()[1] |
---|
| 1179 | ### not sure about speed-up with this method... looks the same |
---|
[511] | 1180 | if "lmd.jussieu.fr" in mymachine: domain = "/u/aslmd/WWW/maps/" |
---|
| 1181 | elif "aymeric-laptop" in mymachine: domain = "/home/aymeric/Dropbox/Public/" |
---|
| 1182 | else: domain = "http://www.lmd.jussieu.fr/~aslmd/maps/" |
---|
[180] | 1183 | whichlink = { \ |
---|
[233] | 1184 | #"vis": "http://maps.jpl.nasa.gov/pix/mar0kuu2.jpg",\ |
---|
| 1185 | #"vishires": "http://www.lmd.jussieu.fr/~aslmd/maps/MarsMap_2500x1250.jpg",\ |
---|
| 1186 | #"geolocal": "http://dl.dropbox.com/u/11078310/geolocal.jpg",\ |
---|
| 1187 | #"mola": "http://www.lns.cornell.edu/~seb/celestia/mars-mola-2k.jpg",\ |
---|
| 1188 | #"molabw": "http://dl.dropbox.com/u/11078310/MarsElevation_2500x1250.jpg",\ |
---|
[453] | 1189 | "thermalday": domain+"thermalday.jpg",\ |
---|
| 1190 | "thermalnight": domain+"thermalnight.jpg",\ |
---|
| 1191 | "tesalbedo": domain+"tesalbedo.jpg",\ |
---|
[233] | 1192 | "vis": domain+"mar0kuu2.jpg",\ |
---|
| 1193 | "vishires": domain+"MarsMap_2500x1250.jpg",\ |
---|
| 1194 | "geolocal": domain+"geolocal.jpg",\ |
---|
| 1195 | "mola": domain+"mars-mola-2k.jpg",\ |
---|
| 1196 | "molabw": domain+"MarsElevation_2500x1250.jpg",\ |
---|
[238] | 1197 | "clouds": "http://www.johnstonsarchive.net/spaceart/marswcloudmap.jpg",\ |
---|
| 1198 | "jupiter": "http://www.mmedia.is/~bjj/data/jupiter_css/jupiter_css.jpg",\ |
---|
| 1199 | "jupiter_voy": "http://www.mmedia.is/~bjj/data/jupiter/jupiter_vgr2.jpg",\ |
---|
[558] | 1200 | #"bw": domain+"EarthElevation_2500x1250.jpg",\ |
---|
[273] | 1201 | "bw": "http://users.info.unicaen.fr/~karczma/TEACH/InfoGeo/Images/Planets/EarthElevation_2500x1250.jpg",\ |
---|
| 1202 | "contrast": "http://users.info.unicaen.fr/~karczma/TEACH/InfoGeo/Images/Planets/EarthMapAtmos_2500x1250.jpg",\ |
---|
| 1203 | "nice": "http://users.info.unicaen.fr/~karczma/TEACH/InfoGeo/Images/Planets/earthmap1k.jpg",\ |
---|
| 1204 | "blue": "http://eoimages.gsfc.nasa.gov/ve/2430/land_ocean_ice_2048.jpg",\ |
---|
[296] | 1205 | "blueclouds": "http://eoimages.gsfc.nasa.gov/ve/2431/land_ocean_ice_cloud_2048.jpg",\ |
---|
| 1206 | "justclouds": "http://eoimages.gsfc.nasa.gov/ve/2432/cloud_combined_2048.jpg",\ |
---|
[721] | 1207 | "pluto": "http://www.boulder.swri.edu/~buie/pluto/pluto_all.png",\ |
---|
| 1208 | "triton": "http://laps.noaa.gov/albers/sos/neptune/triton/triton_rgb_cyl_www.jpg",\ |
---|
| 1209 | "titan": "http://laps.noaa.gov/albers/sos/saturn/titan/titan_rgb_cyl_www.jpg",\ |
---|
| 1210 | #"titan": "http://laps.noaa.gov/albers/sos/celestia/titan_50.jpg",\ |
---|
| 1211 | "titanuni": "http://maps.jpl.nasa.gov/pix/sat6fss1.jpg",\ |
---|
| 1212 | "venus": "http://laps.noaa.gov/albers/sos/venus/venus4/venus4_rgb_cyl_www.jpg",\ |
---|
| 1213 | "cosmic": "http://laps.noaa.gov/albers/sos/universe/wmap/wmap_rgb_cyl_www.jpg",\ |
---|
[180] | 1214 | } |
---|
[238] | 1215 | ### see http://www.mmedia.is/~bjj/planetary_maps.html |
---|
[180] | 1216 | if whichone not in whichlink: |
---|
| 1217 | print "marsmap: choice not defined... you'll get the default one... " |
---|
| 1218 | whichone = "vishires" |
---|
| 1219 | return whichlink[whichone] |
---|
| 1220 | |
---|
[273] | 1221 | #def earthmap (whichone): |
---|
| 1222 | # if whichone == "contrast": whichlink="http://users.info.unicaen.fr/~karczma/TEACH/InfoGeo/Images/Planets/EarthMapAtmos_2500x1250.jpg" |
---|
| 1223 | # elif whichone == "bw": whichlink="http://users.info.unicaen.fr/~karczma/TEACH/InfoGeo/Images/Planets/EarthElevation_2500x1250.jpg" |
---|
| 1224 | # elif whichone == "nice": whichlink="http://users.info.unicaen.fr/~karczma/TEACH/InfoGeo/Images/Planets/earthmap1k.jpg" |
---|
| 1225 | # return whichlink |
---|
[180] | 1226 | |
---|
[345] | 1227 | ## Author: AS |
---|
[241] | 1228 | def latinterv (area="Whole"): |
---|
| 1229 | list = { \ |
---|
| 1230 | "Europe": [[ 20., 80.],[- 50., 50.]],\ |
---|
| 1231 | "Central_America": [[-10., 40.],[ 230., 300.]],\ |
---|
| 1232 | "Africa": [[-20., 50.],[- 50., 50.]],\ |
---|
[273] | 1233 | "Whole": [[-90., 90.],[-180., 180.]],\ |
---|
| 1234 | "Southern_Hemisphere": [[-90., 60.],[-180., 180.]],\ |
---|
| 1235 | "Northern_Hemisphere": [[-60., 90.],[-180., 180.]],\ |
---|
[871] | 1236 | "Tharsis_alt": [[-30., 60.],[ 190., 350.]],\ |
---|
[241] | 1237 | "Tharsis": [[-30., 60.],[-170.,- 10.]],\ |
---|
| 1238 | "Whole_No_High": [[-60., 60.],[-180., 180.]],\ |
---|
| 1239 | "Chryse": [[-60., 60.],[- 60., 60.]],\ |
---|
| 1240 | "North_Pole": [[ 50., 90.],[-180., 180.]],\ |
---|
| 1241 | "Close_North_Pole": [[ 75., 90.],[-180., 180.]],\ |
---|
| 1242 | "Far_South_Pole": [[-90.,-40.],[-180., 180.]],\ |
---|
| 1243 | "South_Pole": [[-90.,-50.],[-180., 180.]],\ |
---|
| 1244 | "Close_South_Pole": [[-90.,-75.],[-180., 180.]],\ |
---|
[637] | 1245 | "Sirenum_Crater_large": [[-46.,-34.],[-166.,-151.]],\ |
---|
| 1246 | "Sirenum_Crater_small": [[-36.,-26.],[-168.,-156.]],\ |
---|
| 1247 | "Rupes": [[ 72., 90.],[-120.,- 20.]],\ |
---|
[865] | 1248 | "Rupes2": [[ 80., 85.],[- 90.,- 60.]],\ |
---|
[721] | 1249 | "Xanadu": [[-40., 20.],[ 40., 120.]],\ |
---|
[781] | 1250 | "Hyperboreae": [[ 80., 87.],[- 70.,- 10.]],\ |
---|
[871] | 1251 | "VM_alt": [[-15., 0.],[ 270., 300.]],\ |
---|
[241] | 1252 | } |
---|
| 1253 | if area not in list: area = "Whole" |
---|
| 1254 | [olat,olon] = list[area] |
---|
| 1255 | return olon,olat |
---|
| 1256 | |
---|
[345] | 1257 | ## Author: TN |
---|
| 1258 | def separatenames (name): |
---|
| 1259 | # look for comas in the input name to separate different names (files, variables,etc ..) |
---|
| 1260 | if name is None: |
---|
| 1261 | names = None |
---|
| 1262 | else: |
---|
| 1263 | names = [] |
---|
| 1264 | stop = 0 |
---|
| 1265 | currentname = name |
---|
| 1266 | while stop == 0: |
---|
| 1267 | indexvir = currentname.find(',') |
---|
| 1268 | if indexvir == -1: |
---|
| 1269 | stop = 1 |
---|
| 1270 | name1 = currentname |
---|
| 1271 | else: |
---|
| 1272 | name1 = currentname[0:indexvir] |
---|
[876] | 1273 | names = np.concatenate((names,[name1])) |
---|
[345] | 1274 | currentname = currentname[indexvir+1:len(currentname)] |
---|
| 1275 | return names |
---|
| 1276 | |
---|
| 1277 | |
---|
| 1278 | ## Author: TN |
---|
| 1279 | def readslices(saxis): |
---|
| 1280 | if saxis == None: |
---|
| 1281 | zesaxis = None |
---|
| 1282 | else: |
---|
[876] | 1283 | zesaxis = np.empty((len(saxis),2)) |
---|
[345] | 1284 | for i in range(len(saxis)): |
---|
| 1285 | a = separatenames(saxis[i]) |
---|
| 1286 | if len(a) == 1: |
---|
| 1287 | zesaxis[i,:] = float(a[0]) |
---|
| 1288 | else: |
---|
| 1289 | zesaxis[i,0] = float(a[0]) |
---|
| 1290 | zesaxis[i,1] = float(a[1]) |
---|
| 1291 | |
---|
| 1292 | return zesaxis |
---|
| 1293 | |
---|
[568] | 1294 | ## Author: TN |
---|
| 1295 | def readdata(data,datatype,coord1,coord2): |
---|
| 1296 | ## Read sparse data |
---|
[572] | 1297 | if datatype == 'txt': |
---|
[568] | 1298 | if len(data[coord1].shape) == 1: |
---|
| 1299 | return data[coord1][:] |
---|
| 1300 | elif len(data[coord1].shape) == 2: |
---|
| 1301 | return data[coord1][:,int(coord2)-1] |
---|
| 1302 | else: |
---|
| 1303 | errormess('error in readdata') |
---|
[572] | 1304 | elif datatype == 'sav': |
---|
[568] | 1305 | return data[coord1][coord2] |
---|
| 1306 | else: |
---|
| 1307 | errormess(datatype+' type is not supported!') |
---|
| 1308 | |
---|
| 1309 | |
---|
[399] | 1310 | ## Author: AS |
---|
[475] | 1311 | def bidimfind(lon2d,lat2d,vlon,vlat,file=None): |
---|
| 1312 | import matplotlib.pyplot as mpl |
---|
[399] | 1313 | if vlat is None: array = (lon2d - vlon)**2 |
---|
| 1314 | elif vlon is None: array = (lat2d - vlat)**2 |
---|
| 1315 | else: array = (lon2d - vlon)**2 + (lat2d - vlat)**2 |
---|
| 1316 | idy,idx = np.unravel_index( np.argmin(array), lon2d.shape ) |
---|
| 1317 | if vlon is not None: |
---|
[475] | 1318 | if (np.abs(lon2d[idy,idx]-vlon)) > 5: errormess("longitude not found ",printvar=lon2d) |
---|
[399] | 1319 | if vlat is not None: |
---|
[475] | 1320 | if (np.abs(lat2d[idy,idx]-vlat)) > 5: errormess("latitude not found ",printvar=lat2d) |
---|
| 1321 | if file is not None: |
---|
| 1322 | print idx,idy,lon2d[idy,idx],vlon |
---|
| 1323 | print idx,idy,lat2d[idy,idx],vlat |
---|
| 1324 | var = file.variables["HGT"][:,:,:] |
---|
[489] | 1325 | mpl.contourf(var[0,:,:],30,cmap = mpl.get_cmap(name="Greys_r") ) ; mpl.axis('off') ; mpl.plot(idx,idy,'mx',mew=4.0,ms=20.0) |
---|
[475] | 1326 | mpl.show() |
---|
| 1327 | return idy,idx |
---|
[399] | 1328 | |
---|
[345] | 1329 | ## Author: TN |
---|
[399] | 1330 | def getsindex(saxis,index,axis): |
---|
[345] | 1331 | # input : all the desired slices and the good index |
---|
| 1332 | # output : all indexes to be taken into account for reducing field |
---|
[349] | 1333 | if ( np.array(axis).ndim == 2): |
---|
| 1334 | axis = axis[:,0] |
---|
[345] | 1335 | if saxis is None: |
---|
| 1336 | zeindex = None |
---|
| 1337 | else: |
---|
| 1338 | aaa = int(np.argmin(abs(saxis[index,0] - axis))) |
---|
| 1339 | bbb = int(np.argmin(abs(saxis[index,1] - axis))) |
---|
| 1340 | [imin,imax] = np.sort(np.array([aaa,bbb])) |
---|
| 1341 | zeindex = np.array(range(imax-imin+1))+imin |
---|
| 1342 | # because -180 and 180 are the same point in longitude, |
---|
| 1343 | # we get rid of one for averaging purposes. |
---|
| 1344 | if axis[imin] == -180 and axis[imax] == 180: |
---|
| 1345 | zeindex = zeindex[0:len(zeindex)-1] |
---|
[392] | 1346 | print "INFO: whole longitude averaging asked, so last point is not taken into account." |
---|
[345] | 1347 | return zeindex |
---|
| 1348 | |
---|
| 1349 | ## Author: TN |
---|
[763] | 1350 | def define_axis(lon,lat,vert,time,indexlon,indexlat,indexvert,indextime,what_I_plot,dim0,vertmode,redope): |
---|
[345] | 1351 | # Purpose of define_axis is to find x and y axis scales in a smart way |
---|
| 1352 | # x axis priority: 1/time 2/lon 3/lat 4/vertical |
---|
| 1353 | # To be improved !!!... |
---|
| 1354 | x = None |
---|
| 1355 | y = None |
---|
| 1356 | count = 0 |
---|
[876] | 1357 | what_I_plot = np.array(what_I_plot) |
---|
[345] | 1358 | shape = what_I_plot.shape |
---|
[477] | 1359 | if indextime is None and len(time) > 1: |
---|
[392] | 1360 | print "AXIS is time" |
---|
[345] | 1361 | x = time |
---|
| 1362 | count = count+1 |
---|
[763] | 1363 | if indexlon is None and len(lon) > 1 and redope not in ['edge_x1','edge_x2']: |
---|
[392] | 1364 | print "AXIS is lon" |
---|
[345] | 1365 | if count == 0: x = lon |
---|
| 1366 | else: y = lon |
---|
| 1367 | count = count+1 |
---|
[763] | 1368 | if indexlat is None and len(lat) > 1 and redope not in ['edge_y1','edge_y2']: |
---|
[392] | 1369 | print "AXIS is lat" |
---|
[345] | 1370 | if count == 0: x = lat |
---|
| 1371 | else: y = lat |
---|
| 1372 | count = count+1 |
---|
[817] | 1373 | if indexvert is None and len(vert) > 1 and ((dim0 == 4) or (y is None)): |
---|
[392] | 1374 | print "AXIS is vert" |
---|
[345] | 1375 | if vertmode == 0: # vertical axis is as is (GCM grid) |
---|
| 1376 | if count == 0: x=range(len(vert)) |
---|
| 1377 | else: y=range(len(vert)) |
---|
| 1378 | count = count+1 |
---|
| 1379 | else: # vertical axis is in kms |
---|
| 1380 | if count == 0: x = vert |
---|
| 1381 | else: y = vert |
---|
| 1382 | count = count+1 |
---|
[876] | 1383 | x = np.array(x) |
---|
| 1384 | y = np.array(y) |
---|
[468] | 1385 | print "CHECK SHAPE: what_I_plot, x, y", what_I_plot.shape, x.shape, y.shape |
---|
[345] | 1386 | if len(shape) == 1: |
---|
[562] | 1387 | if shape[0] != len(x): print "WARNING: shape[0] != len(x). Correcting." ; what_I_plot = what_I_plot[0:len(x)] |
---|
[579] | 1388 | if len(y.shape) > 0: y = () |
---|
[345] | 1389 | elif len(shape) == 2: |
---|
[562] | 1390 | if shape[1] == len(y) and shape[0] == len(x) and shape[0] != shape[1]: |
---|
[876] | 1391 | print "INFO: swapaxes: ",what_I_plot.shape,shape ; what_I_plot = np.swapaxes(what_I_plot,0,1) |
---|
[562] | 1392 | else: |
---|
| 1393 | if shape[0] != len(y): print "WARNING: shape[0] != len(y). Correcting." ; what_I_plot = what_I_plot[0:len(y),:] |
---|
| 1394 | elif shape[1] != len(x): print "WARNING: shape[1] != len(x). Correcting." ; what_I_plot = what_I_plot[:,0:len(x)] |
---|
| 1395 | elif len(shape) == 3: |
---|
| 1396 | if vertmode < 0: print "not supported. must check array dimensions at some point. not difficult to implement though." |
---|
[345] | 1397 | return what_I_plot,x,y |
---|
[349] | 1398 | |
---|
[763] | 1399 | # Author: TN + AS + AC |
---|
| 1400 | def determineplot(slon, slat, svert, stime, redope): |
---|
[349] | 1401 | nlon = 1 # number of longitudinal slices -- 1 is None |
---|
| 1402 | nlat = 1 |
---|
| 1403 | nvert = 1 |
---|
| 1404 | ntime = 1 |
---|
| 1405 | nslices = 1 |
---|
| 1406 | if slon is not None: |
---|
[770] | 1407 | length=len(slon[:,0]) |
---|
[763] | 1408 | nslices = nslices*length |
---|
[349] | 1409 | nlon = len(slon) |
---|
| 1410 | if slat is not None: |
---|
[770] | 1411 | length=len(slat[:,0]) |
---|
[763] | 1412 | nslices = nslices*length |
---|
[349] | 1413 | nlat = len(slat) |
---|
| 1414 | if svert is not None: |
---|
[771] | 1415 | length=len(svert[:,0]) |
---|
[763] | 1416 | nslices = nslices*length |
---|
[349] | 1417 | nvert = len(svert) |
---|
| 1418 | if stime is not None: |
---|
[770] | 1419 | length=len(stime[:,0]) |
---|
[763] | 1420 | nslices = nslices*length |
---|
[349] | 1421 | ntime = len(stime) |
---|
| 1422 | #else: |
---|
| 1423 | # nslices = 2 |
---|
| 1424 | mapmode = 0 |
---|
[763] | 1425 | if slon is None and slat is None and redope not in ['edge_x1','edge_x2','edge_y1','edge_y2']: |
---|
[349] | 1426 | mapmode = 1 # in this case we plot a map, with the given projection |
---|
| 1427 | return nlon, nlat, nvert, ntime, mapmode, nslices |
---|
[440] | 1428 | |
---|
[638] | 1429 | ## Author : AS |
---|
| 1430 | def maplatlon( lon,lat,field,\ |
---|
| 1431 | proj="cyl",colorb="jet",ndiv=10,zeback="molabw",trans=0.6,title="",\ |
---|
| 1432 | vecx=None,vecy=None,stride=2 ): |
---|
| 1433 | ### an easy way to map a field over lat/lon grid |
---|
| 1434 | import matplotlib.pyplot as mpl |
---|
| 1435 | from matplotlib.cm import get_cmap |
---|
| 1436 | ## get lon and lat in 2D version. get lat/lon intervals |
---|
| 1437 | numdim = len(np.array(lon).shape) |
---|
| 1438 | if numdim == 2: [lon2d,lat2d] = [lon,lat] |
---|
| 1439 | elif numdim == 1: [lon2d,lat2d] = np.meshgrid(lon,lat) |
---|
| 1440 | else: errormess("lon and lat arrays must be 1D or 2D") |
---|
[796] | 1441 | #[wlon,wlat] = latinterv() |
---|
| 1442 | [wlon,wlat] = simplinterv(lon2d,lat2d) |
---|
[638] | 1443 | ## define projection and background. define x and y given the projection |
---|
| 1444 | m = define_proj(proj,wlon,wlat,back=zeback,blat=None,blon=None) |
---|
| 1445 | x, y = m(lon2d, lat2d) |
---|
| 1446 | ## define field. bound field. |
---|
| 1447 | what_I_plot = np.transpose(field) |
---|
| 1448 | zevmin, zevmax = calculate_bounds(what_I_plot) ## vmin=min(what_I_plot_frame), vmax=max(what_I_plot_frame)) |
---|
| 1449 | what_I_plot = bounds(what_I_plot,zevmin,zevmax) |
---|
| 1450 | ## define contour field levels. define color palette |
---|
| 1451 | ticks = ndiv + 1 |
---|
| 1452 | zelevels = np.linspace(zevmin,zevmax,ticks) |
---|
| 1453 | palette = get_cmap(name=colorb) |
---|
| 1454 | ## contour field |
---|
| 1455 | m.contourf( x, y, what_I_plot, zelevels, cmap = palette, alpha = trans ) |
---|
| 1456 | ## draw colorbar |
---|
| 1457 | if proj in ['moll','cyl']: zeorientation="horizontal" ; zepad = 0.07 |
---|
| 1458 | else: zeorientation="vertical" ; zepad = 0.03 |
---|
| 1459 | #daformat = fmtvar(fvar.upper()) |
---|
| 1460 | daformat = "%.0f" |
---|
| 1461 | zecb = mpl.colorbar( fraction=0.05,pad=zepad,format=daformat,orientation=zeorientation,\ |
---|
| 1462 | ticks=np.linspace(zevmin,zevmax,num=min([ticks/2+1,21])),extend='neither',spacing='proportional' ) |
---|
| 1463 | ## give a title |
---|
| 1464 | if zeorientation == "horizontal": zecb.ax.set_xlabel(title) |
---|
| 1465 | else: ptitle(title) |
---|
| 1466 | ## draw vector |
---|
| 1467 | if vecx is not None and vecy is not None: |
---|
| 1468 | [vecx_frame,vecy_frame] = m.rotate_vector( np.transpose(vecx), np.transpose(vecy), lon2d, lat2d ) ## for metwinds |
---|
| 1469 | vectorfield(vecx_frame, vecy_frame, x, y, stride=stride, csmooth=2,\ |
---|
| 1470 | scale=30., factor=500., color=definecolorvec(colorb), key=True) |
---|
| 1471 | ## scale regle la reference du vecteur. factor regle toutes les longueurs (dont la reference). l'AUGMENTER pour raccourcir les vecteurs. |
---|
| 1472 | return |
---|
[754] | 1473 | ## Author : AC |
---|
| 1474 | ## Handles calls to specific computations (e.g. wind norm, enrichment factor...) |
---|
[763] | 1475 | def select_getfield(zvarname=None,znc=None,ztypefile=None,mode=None,ztsat=None,ylon=None,ylat=None,yalt=None,ytime=None,analysis=None): |
---|
[754] | 1476 | from mymath import get_tsat |
---|
| 1477 | |
---|
| 1478 | ## Specific variables are described here: |
---|
| 1479 | # for the mesoscale: |
---|
[763] | 1480 | specificname_meso = ['UV','uv','uvmet','slopexy','SLOPEXY','deltat','DELTAT','hodograph','tk','hodograph_2'] |
---|
[754] | 1481 | # for the gcm: |
---|
| 1482 | specificname_gcm = ['enfact'] |
---|
| 1483 | |
---|
[876] | 1484 | zncvar = znc.variables |
---|
| 1485 | |
---|
[754] | 1486 | ## Check for variable in file: |
---|
| 1487 | if mode == 'check': |
---|
| 1488 | varname = zvarname |
---|
[876] | 1489 | varinfile=zncvar.keys() |
---|
| 1490 | logical_novarname = zvarname not in zncvar |
---|
[754] | 1491 | logical_nospecificname_meso = not ((ztypefile in ['meso']) and (zvarname in specificname_meso)) |
---|
| 1492 | logical_nospecificname_gcm = not ((ztypefile in ['gcm']) and (zvarname in specificname_gcm)) |
---|
| 1493 | if ( logical_novarname and logical_nospecificname_meso and logical_nospecificname_gcm ): |
---|
| 1494 | if len(varinfile) == 1: varname = varinfile[0] |
---|
| 1495 | else: varname = False |
---|
| 1496 | ## Return the variable name: |
---|
| 1497 | return varname |
---|
| 1498 | |
---|
| 1499 | ## Get the corresponding variable: |
---|
| 1500 | if mode == 'getvar': |
---|
[763] | 1501 | plot_x = None ; plot_y = None ; |
---|
[754] | 1502 | ### ----------- 1. saturation temperature |
---|
| 1503 | if zvarname in ["temp","t","T_nadir_nit","T_nadir_day","temp_day","temp_night"] and ztsat: |
---|
| 1504 | tt=getfield(znc,zvarname) ; print "computing Tsat-T, I ASSUME Z-AXIS IS PRESSURE" |
---|
| 1505 | if type(tt).__name__=='MaskedArray': tt.set_fill_value([np.NaN]) ; tinput=tt.filled() |
---|
| 1506 | else: tinput=tt |
---|
| 1507 | all_var=get_tsat(yalt,tinput,zlon=ylon,zlat=ylat,zalt=yalt,ztime=ytime) |
---|
| 1508 | ### ----------- 2. wind amplitude |
---|
[876] | 1509 | elif ((zvarname in ['UV','uv','uvmet']) and (ztypefile in ['meso']) and (zvarname not in zncvar)): |
---|
[763] | 1510 | all_var=windamplitude(znc,'amplitude') |
---|
[876] | 1511 | elif ((zvarname in ['hodograph','hodograph_2']) and (ztypefile in ['meso']) and (zvarname not in zncvar)): |
---|
[763] | 1512 | plot_x, plot_y = windamplitude(znc,zvarname) |
---|
| 1513 | if plot_x is not None: all_var=plot_x # dummy |
---|
| 1514 | else: all_var=plot_y ; plot_x = None ; plot_y = None # Hodograph type 2 is not 'xy' mode |
---|
[876] | 1515 | elif ((zvarname in ['slopexy','SLOPEXY']) and (ztypefile in ['meso']) and (zvarname not in zncvar)): |
---|
[754] | 1516 | all_var=slopeamplitude(znc) |
---|
| 1517 | ### ------------ 3. Near surface instability |
---|
[876] | 1518 | elif ((zvarname in ['DELTAT','deltat']) and (ztypefile in ['meso']) and (zvarname not in zncvar)): |
---|
[754] | 1519 | all_var=deltat0t1(znc) |
---|
| 1520 | ### ------------ 4. Enrichment factor |
---|
| 1521 | elif ((ztypefile in ['gcm']) and (zvarname in ['enfact'])): |
---|
| 1522 | all_var=enrichment_factor(znc,ylon,ylat,ytime) |
---|
[763] | 1523 | ### ------------ 5. teta -> temp |
---|
[876] | 1524 | elif ((ztypefile in ['meso']) and (zvarname in ['tk']) and ('tk' not in zncvar.keys())): |
---|
[763] | 1525 | all_var=teta_to_tk(znc) |
---|
[754] | 1526 | else: |
---|
| 1527 | ### ----------- 999. Normal case |
---|
| 1528 | all_var = getfield(znc,zvarname) |
---|
[763] | 1529 | if analysis is not None: |
---|
| 1530 | if analysis in ['histo','density','histodensity']: plot_y=all_var ; plot_x = plot_y |
---|
| 1531 | elif analysis == 'fft': plot_y, plot_x = spectrum(all_var,ytime,yalt,ylat,ylon) ; all_var = plot_y |
---|
| 1532 | return all_var, plot_x, plot_y |
---|
| 1533 | |
---|
| 1534 | # Author : A.C |
---|
| 1535 | # FFT is computed before reducefield voluntarily, because we dont want to compute |
---|
| 1536 | # ffts on averaged fields (which would kill all waves). Instead, we take the fft everywhere |
---|
| 1537 | # (which is not efficient but it is still ok) and then, make the average (if the user wants to) |
---|
| 1538 | def spectrum(var,time,vert,lat,lon): |
---|
| 1539 | fft=np.fft.fft(var,axis=1) |
---|
| 1540 | N=len(vert) |
---|
| 1541 | step=(vert[1]-vert[0])*1000. |
---|
| 1542 | print "step is: ",step |
---|
| 1543 | fftfreq=np.fft.fftfreq(N,d=step) |
---|
| 1544 | fftfreq=np.fft.fftshift(fftfreq) # spatial FFT => this is the wavenumber |
---|
| 1545 | fft=np.fft.fftshift(fft) |
---|
| 1546 | fftfreq = 1./fftfreq # => wavelength (div by 0 expected, don't panic) |
---|
| 1547 | fft=np.abs(fft) # => amplitude spectrum |
---|
| 1548 | # fft=np.abs(fft)**2 # => power spectrum |
---|
| 1549 | return fft,fftfreq |
---|
| 1550 | |
---|
| 1551 | # Author : A.C. |
---|
| 1552 | # Computes temperature from potential temperature for mesoscale files, without the need to use API, i.e. using natural vertical grid |
---|
| 1553 | def teta_to_tk(nc): |
---|
| 1554 | varinfile = nc.variables.keys() |
---|
| 1555 | p0=610. |
---|
| 1556 | t0=220. |
---|
| 1557 | r_cp=1./3.89419 |
---|
| 1558 | if "T" in varinfile: zteta=getfield(nc,'T') |
---|
| 1559 | else: errormess("you need T in your file.") |
---|
| 1560 | if "PTOT" in varinfile: zptot=getfield(nc,'PTOT') |
---|
| 1561 | else: errormess("you need PTOT in your file.") |
---|
| 1562 | zt=(zteta+220.)*(zptot/p0)**(r_cp) |
---|
| 1563 | return zt |
---|
[792] | 1564 | |
---|
| 1565 | # Author : A.C. |
---|
| 1566 | # Find the lon and lat index of the dust devil with the largest pressure gradient |
---|
| 1567 | # Steps : |
---|
| 1568 | # 1/ convert the chosen PSFC frame to an image of the PSFC anomaly with respect to the mean |
---|
| 1569 | # 2/ apply the Sobel operator |
---|
| 1570 | # (The Sobel operator performs a 2-D spatial gradient measurement on an image and so emphasizes regions of high spatial frequency that correspond to edges.) |
---|
| 1571 | # 3/ find the maximum of the resulting field |
---|
| 1572 | # 4/ find the points in a 5 pixel radius around the maximum for which the value of the Sobel transform is greater than half the maximum |
---|
| 1573 | # 5/ define a slab of points encompassing the above selected points, including the potential points 'inside' them (if the above points are a hollow circle for example) |
---|
| 1574 | # 6/ in this slab, find the point at which the surface pressure is minimum |
---|
| 1575 | def find_devil(nc,indextime): |
---|
| 1576 | from scipy import ndimage |
---|
| 1577 | from mymath import array2image,image2array |
---|
| 1578 | |
---|
| 1579 | varinfile = nc.variables.keys() |
---|
| 1580 | if "PSFC" not in varinfile: errormess("You need PSFC in your file to find dust devils") |
---|
| 1581 | else: psfc_full=getfield(nc,'PSFC') |
---|
| 1582 | psfc,error=reducefield( psfc_full, d4=indextime) |
---|
| 1583 | psfcim=array2image(1000.*(psfc-psfc.mean())) |
---|
| 1584 | sx = ndimage.sobel(psfcim, axis=0, mode='constant') ; sy = ndimage.sobel(psfcim, axis=1, mode='constant') |
---|
| 1585 | sob = np.hypot(sx, sy) |
---|
| 1586 | zemax=np.max(sob) |
---|
| 1587 | goodvalues = sob[sob >= zemax/2] |
---|
| 1588 | ix = np.in1d(sob.ravel(), goodvalues).reshape(sob.shape) |
---|
| 1589 | idxs,idys=np.where(ix) |
---|
| 1590 | maxvalue = sob[sob == zemax] |
---|
| 1591 | ixmax = np.in1d(sob.ravel(), maxvalue[0]).reshape(sob.shape) |
---|
| 1592 | idxmax,idymax=np.where(ixmax) |
---|
| 1593 | valok=[] |
---|
| 1594 | for i in np.arange(len(idxs)): |
---|
| 1595 | a=np.sqrt((idxmax-idxs[i])**2 + (idymax-idys[i])**2) |
---|
| 1596 | if 0 < a <= 5.*np.sqrt(2.): valok.append(goodvalues[i]) |
---|
| 1597 | ix = np.in1d(sob.ravel(), valok).reshape(sob.shape) |
---|
| 1598 | idxs,idys=np.where(ix) |
---|
| 1599 | hyperslab=psfc[np.min(idxs):np.max(idxs),np.min(idys):np.max(idys)] |
---|
| 1600 | idxsub,idysub=np.where(hyperslab==hyperslab.min()) |
---|
| 1601 | idx=idxsub[0]+np.min(idxs) ; idy=idysub[0]+np.min(idys) |
---|
| 1602 | return np.int(idx),np.int(idy) |
---|