[345] | 1 | ## Author: AS |
---|
[252] | 2 | def errormess(text,printvar=None): |
---|
[233] | 3 | print text |
---|
[399] | 4 | if printvar is not None: print printvar |
---|
[233] | 5 | exit() |
---|
| 6 | return |
---|
| 7 | |
---|
[345] | 8 | ## Author: AS |
---|
[349] | 9 | def adjust_length (tab, zelen): |
---|
| 10 | from numpy import ones |
---|
| 11 | if tab is None: |
---|
| 12 | outtab = ones(zelen) * -999999 |
---|
| 13 | else: |
---|
| 14 | if zelen != len(tab): |
---|
| 15 | print "not enough or too much values... setting same values all variables" |
---|
| 16 | outtab = ones(zelen) * tab[0] |
---|
| 17 | else: |
---|
| 18 | outtab = tab |
---|
| 19 | return outtab |
---|
| 20 | |
---|
| 21 | ## Author: AS |
---|
[468] | 22 | def getname(var=False,var2=False,winds=False,anomaly=False): |
---|
[252] | 23 | if var and winds: basename = var + '_UV' |
---|
| 24 | elif var: basename = var |
---|
| 25 | elif winds: basename = 'UV' |
---|
| 26 | else: errormess("please set at least winds or var",printvar=nc.variables) |
---|
| 27 | if anomaly: basename = 'd' + basename |
---|
[468] | 28 | if var2: basename = basename + '_' + var2 |
---|
[252] | 29 | return basename |
---|
| 30 | |
---|
[763] | 31 | ## Author: AS + AC |
---|
[782] | 32 | def localtime(time,lon,namefile): # lon is the mean longitude of the plot, not of the domain. central lon of domain is taken from cen_lon |
---|
[763] | 33 | import numpy as np |
---|
[782] | 34 | from netCDF4 import Dataset |
---|
| 35 | ## THIS IS FOR MESOSCALE |
---|
| 36 | nc = Dataset(namefile) |
---|
| 37 | ## get start date and intervals |
---|
| 38 | dt_hour=1. ; start=0. |
---|
| 39 | if hasattr(nc,'TITLE'): |
---|
| 40 | title=getattr(nc, 'TITLE') |
---|
| 41 | if hasattr(nc,'DT') and hasattr(nc,'START_DATE') and 'MRAMS' in title: |
---|
| 42 | ## we must adapt what is done in getlschar to MRAMS (outputs from ic.py) |
---|
| 43 | dt_hour=getattr(nc, 'DT')/60. |
---|
| 44 | start_date=getattr(nc, 'START_DATE') |
---|
| 45 | start_hour=np.float(start_date[11:13]) |
---|
| 46 | start_minute=np.float(start_date[14:16])/60. |
---|
| 47 | start=start_hour+start_minute # start is the local time of simu at longitude 0 |
---|
| 48 | #LMD MMM is 1 output/hour (and not 1 output/timestep) |
---|
| 49 | #MRAMS is 1 output/timestep, unless stride is added in ic.py |
---|
| 50 | elif 'WRF' in title: |
---|
| 51 | [dummy,start,dt_hour] = getlschar ( namefile ) # get start hour and interval hour |
---|
| 52 | ## get longitude |
---|
[763] | 53 | if lon is not None: |
---|
| 54 | if lon[0,1]!=lon[0,0]: mean_lon_plot = 0.5*(lon[0,1]-lon[0,0]) |
---|
| 55 | else: mean_lon_plot=lon[0,0] |
---|
| 56 | elif hasattr(nc, 'CEN_LON'): mean_lon_plot=getattr(nc, 'CEN_LON') |
---|
| 57 | else: mean_lon_plot=0. |
---|
[782] | 58 | ## calculate local time |
---|
| 59 | ltst = start + time*dt_hour + mean_lon_plot / 15. |
---|
[252] | 60 | ltst = int (ltst * 10) / 10. |
---|
| 61 | ltst = ltst % 24 |
---|
| 62 | return ltst |
---|
| 63 | |
---|
[569] | 64 | ## Author: AC |
---|
| 65 | def check_localtime(time): |
---|
| 66 | a=-1 |
---|
| 67 | for i in range(len(time)-1): |
---|
[583] | 68 | if (time[i] > time[i+1]): a=i |
---|
| 69 | if a >= 0 and a < (len(time)-1)/2.: |
---|
[569] | 70 | print "Sorry, time axis is not regular." |
---|
| 71 | print "Contourf needs regular axis... recasting" |
---|
| 72 | for i in range(a+1): |
---|
| 73 | time[i]=time[i]-24. |
---|
[583] | 74 | if a >= 0 and a >= (len(time)-1)/2.: |
---|
| 75 | print "Sorry, time axis is not regular." |
---|
| 76 | print "Contourf needs regular axis... recasting" |
---|
| 77 | for i in range((len(time)-1) - a): |
---|
| 78 | time[a+1+i]=time[a+1+i]+24. |
---|
[569] | 79 | return time |
---|
| 80 | |
---|
[525] | 81 | ## Author: AS, AC, JL |
---|
[233] | 82 | def whatkindfile (nc): |
---|
[647] | 83 | typefile = 'gcm' # default |
---|
[429] | 84 | if 'controle' in nc.variables: typefile = 'gcm' |
---|
| 85 | elif 'phisinit' in nc.variables: typefile = 'gcm' |
---|
[721] | 86 | elif 'phis' in nc.variables: typefile = 'gcm' |
---|
[525] | 87 | elif 'time_counter' in nc.variables: typefile = 'earthgcm' |
---|
[548] | 88 | elif hasattr(nc,'START_DATE'): typefile = 'meso' |
---|
[429] | 89 | elif 'HGT_M' in nc.variables: typefile = 'geo' |
---|
[558] | 90 | elif hasattr(nc,'institution'): |
---|
| 91 | if "European Centre" in getattr(nc,'institution'): typefile = 'ecmwf' |
---|
[233] | 92 | return typefile |
---|
| 93 | |
---|
[345] | 94 | ## Author: AS |
---|
[233] | 95 | def getfield (nc,var): |
---|
| 96 | ## this allows to get much faster (than simply referring to nc.variables[var]) |
---|
[395] | 97 | import numpy as np |
---|
[233] | 98 | dimension = len(nc.variables[var].dimensions) |
---|
[392] | 99 | #print " Opening variable with", dimension, "dimensions ..." |
---|
[233] | 100 | if dimension == 2: field = nc.variables[var][:,:] |
---|
| 101 | elif dimension == 3: field = nc.variables[var][:,:,:] |
---|
| 102 | elif dimension == 4: field = nc.variables[var][:,:,:,:] |
---|
[494] | 103 | elif dimension == 1: field = nc.variables[var][:] |
---|
[395] | 104 | # if there are NaNs in the ncdf, they should be loaded as a masked array which will be |
---|
| 105 | # recasted as a regular array later in reducefield |
---|
| 106 | if (np.isnan(np.sum(field)) and (type(field).__name__ not in 'MaskedArray')): |
---|
| 107 | print "Warning: netcdf as nan values but is not loaded as a Masked Array." |
---|
| 108 | print "recasting array type" |
---|
| 109 | out=np.ma.masked_invalid(field) |
---|
| 110 | out.set_fill_value([np.NaN]) |
---|
| 111 | else: |
---|
[464] | 112 | # missing values from zrecast or hrecast are -1e-33 |
---|
[469] | 113 | masked=np.ma.masked_where(field < -1e30,field) |
---|
[578] | 114 | masked2=np.ma.masked_where(field > 1e35,field) |
---|
| 115 | masked.set_fill_value([np.NaN]) ; masked2.set_fill_value([np.NaN]) |
---|
| 116 | mask = np.ma.getmask(masked) ; mask2 = np.ma.getmask(masked2) |
---|
| 117 | if (True in np.array(mask)): |
---|
| 118 | out=masked |
---|
| 119 | print "Masked array... Missing value is NaN" |
---|
| 120 | elif (True in np.array(mask2)): |
---|
| 121 | out=masked2 |
---|
| 122 | print "Masked array... Missing value is NaN" |
---|
| 123 | # else: |
---|
| 124 | # # missing values from api are 1e36 |
---|
| 125 | # masked=np.ma.masked_where(field > 1e35,field) |
---|
| 126 | # masked.set_fill_value([np.NaN]) |
---|
| 127 | # mask = np.ma.getmask(masked) |
---|
| 128 | # if (True in np.array(mask)):out=masked |
---|
| 129 | # else:out=field |
---|
[763] | 130 | else: |
---|
| 131 | # # missing values from MRAMS files are 0.100E+32 |
---|
| 132 | masked=np.ma.masked_where(field > 1e30,field) |
---|
| 133 | masked.set_fill_value([np.NaN]) |
---|
| 134 | mask = np.ma.getmask(masked) |
---|
| 135 | if (True in np.array(mask)):out=masked |
---|
| 136 | else:out=field |
---|
| 137 | # else:out=field |
---|
[395] | 138 | return out |
---|
[233] | 139 | |
---|
[571] | 140 | ## Author: AC |
---|
[763] | 141 | # Compute the norm of the winds or return an hodograph |
---|
[612] | 142 | # The corresponding variable to call is UV or uvmet (to use api) |
---|
[763] | 143 | def windamplitude (nc,mode): |
---|
[581] | 144 | import numpy as np |
---|
[571] | 145 | varinfile = nc.variables.keys() |
---|
| 146 | if "U" in varinfile: zu=getfield(nc,'U') |
---|
| 147 | elif "Um" in varinfile: zu=getfield(nc,'Um') |
---|
[763] | 148 | else: errormess("you need slopex or U or Um in your file.") |
---|
[571] | 149 | if "V" in varinfile: zv=getfield(nc,'V') |
---|
| 150 | elif "Vm" in varinfile: zv=getfield(nc,'Vm') |
---|
[763] | 151 | else: errormess("you need V or Vm in your file.") |
---|
[571] | 152 | znt,znz,zny,znx = np.array(zu).shape |
---|
[763] | 153 | if hasattr(nc,'WEST-EAST_PATCH_END_UNSTAG'):znx=getattr(nc, 'WEST-EAST_PATCH_END_UNSTAG') |
---|
[571] | 154 | zuint = np.zeros([znt,znz,zny,znx]) |
---|
| 155 | zvint = np.zeros([znt,znz,zny,znx]) |
---|
| 156 | if "U" in varinfile: |
---|
[763] | 157 | if hasattr(nc,'SOUTH-NORTH_PATCH_END_STAG'): zny_stag=getattr(nc, 'SOUTH-NORTH_PATCH_END_STAG') |
---|
| 158 | if hasattr(nc,'WEST-EAST_PATCH_END_STAG'): znx_stag=getattr(nc, 'WEST-EAST_PATCH_END_STAG') |
---|
| 159 | if zny_stag == zny: zvint=zv |
---|
| 160 | else: |
---|
| 161 | for yy in np.arange(zny): zvint[:,:,yy,:] = (zv[:,:,yy,:] + zv[:,:,yy+1,:])/2. |
---|
| 162 | if znx_stag == znx: zuint=zu |
---|
| 163 | else: |
---|
| 164 | for xx in np.arange(znx): zuint[:,:,:,xx] = (zu[:,:,:,xx] + zu[:,:,:,xx+1])/2. |
---|
[571] | 165 | else: |
---|
| 166 | zuint=zu |
---|
| 167 | zvint=zv |
---|
[763] | 168 | if mode=='amplitude': return np.sqrt(zuint**2 + zvint**2) |
---|
| 169 | if mode=='hodograph': return zuint,zvint |
---|
| 170 | if mode=='hodograph_2': return None, 360.*np.arctan(zvint/zuint)/(2.*np.pi) |
---|
[571] | 171 | |
---|
[612] | 172 | ## Author: AC |
---|
[701] | 173 | # Compute the enrichment factor of non condensible gases |
---|
| 174 | # The corresponding variable to call is enfact |
---|
[753] | 175 | # enrichment factor is computed as in Yuan Lian et al. 2012 |
---|
| 176 | # i.e. you need to have VL2 site at LS 135 in your data |
---|
| 177 | # this only requires co2col so that you can concat.nc at low cost |
---|
| 178 | def enrichment_factor(nc,lon,lat,time): |
---|
[701] | 179 | import numpy as np |
---|
[753] | 180 | from myplot import reducefield |
---|
[701] | 181 | varinfile = nc.variables.keys() |
---|
[753] | 182 | if "co2col" in varinfile: co2col=getfield(nc,'co2col') |
---|
| 183 | else: print "error, you need co2col var in your file" |
---|
[701] | 184 | if "ps" in varinfile: ps=getfield(nc,'ps') |
---|
| 185 | else: print "error, you need ps var in your file" |
---|
[753] | 186 | dimension = len(nc.variables['co2col'].dimensions) |
---|
| 187 | if dimension == 2: |
---|
| 188 | zny,znx = np.array(co2col).shape |
---|
[701] | 189 | znt=1 |
---|
[753] | 190 | elif dimension == 3: znt,zny,znx = np.array(co2col).shape |
---|
[701] | 191 | mmrarcol = np.zeros([znt,zny,znx]) |
---|
| 192 | enfact = np.zeros([znt,zny,znx]) |
---|
| 193 | grav=3.72 |
---|
[753] | 194 | mmrarcol[:,:,:] = 1. - grav*co2col[:,:,:]/ps[:,:,:] |
---|
| 195 | # Computation with reference argon mmr at VL2 Ls 135 (as in Yuan Lian et al 2012) |
---|
| 196 | lonvl2=np.zeros([1,2]) |
---|
| 197 | latvl2=np.zeros([1,2]) |
---|
| 198 | timevl2=np.zeros([1,2]) |
---|
| 199 | lonvl2[0,0]=-180 |
---|
| 200 | lonvl2[0,1]=180 |
---|
| 201 | latvl2[:,:]=48.16 |
---|
| 202 | timevl2[:,:]=135. |
---|
| 203 | indexlon = getsindex(lonvl2,0,lon) |
---|
| 204 | indexlat = getsindex(latvl2,0,lat) |
---|
| 205 | indextime = getsindex(timevl2,0,time) |
---|
| 206 | mmrvl2, error = reducefield( mmrarcol, d4=indextime, d1=indexlon, d2=indexlat) |
---|
| 207 | print "VL2 Ls 135 mmr arcol:", mmrvl2 |
---|
| 208 | enfact[:,:,:] = mmrarcol[:,:,:]/mmrvl2 |
---|
[701] | 209 | return enfact |
---|
| 210 | |
---|
| 211 | ## Author: AC |
---|
[612] | 212 | # Compute the norm of the slope angles |
---|
| 213 | # The corresponding variable to call is SLOPEXY |
---|
| 214 | def slopeamplitude (nc): |
---|
| 215 | import numpy as np |
---|
| 216 | varinfile = nc.variables.keys() |
---|
| 217 | if "slopex" in varinfile: zu=getfield(nc,'slopex') |
---|
| 218 | elif "SLOPEX" in varinfile: zu=getfield(nc,'SLOPEX') |
---|
[754] | 219 | else: errormess("you need slopex or SLOPEX in your file.") |
---|
[612] | 220 | if "slopey" in varinfile: zv=getfield(nc,'slopey') |
---|
| 221 | elif "SLOPEY" in varinfile: zv=getfield(nc,'SLOPEY') |
---|
[754] | 222 | else: errormess("you need slopey or SLOPEY in your file.") |
---|
[612] | 223 | znt,zny,znx = np.array(zu).shape |
---|
| 224 | zuint = np.zeros([znt,zny,znx]) |
---|
| 225 | zvint = np.zeros([znt,zny,znx]) |
---|
| 226 | zuint=zu |
---|
| 227 | zvint=zv |
---|
| 228 | return np.sqrt(zuint**2 + zvint**2) |
---|
| 229 | |
---|
| 230 | ## Author: AC |
---|
| 231 | # Compute the temperature difference between surface and first level. |
---|
| 232 | # API is automatically called to get TSURF and TK. |
---|
| 233 | # The corresponding variable to call is DELTAT |
---|
| 234 | def deltat0t1 (nc): |
---|
| 235 | import numpy as np |
---|
| 236 | varinfile = nc.variables.keys() |
---|
| 237 | if "tsurf" in varinfile: zu=getfield(nc,'tsurf') |
---|
| 238 | elif "TSURF" in varinfile: zu=getfield(nc,'TSURF') |
---|
[754] | 239 | else: errormess("You need tsurf or TSURF in your file") |
---|
[612] | 240 | if "tk" in varinfile: zv=getfield(nc,'tk') |
---|
| 241 | elif "TK" in varinfile: zv=getfield(nc,'TK') |
---|
[754] | 242 | else: errormess("You need tk or TK in your file. (might need to use API. try to add -i 4 -l XXX)") |
---|
[612] | 243 | znt,zny,znx = np.array(zu).shape |
---|
| 244 | zuint = np.zeros([znt,zny,znx]) |
---|
| 245 | zuint=zu - zv[:,0,:,:] |
---|
| 246 | return zuint |
---|
| 247 | |
---|
[382] | 248 | ## Author: AS + TN + AC |
---|
[717] | 249 | def reducefield (input,d4=None,d3=None,d2=None,d1=None,yint=False,alt=None,anomaly=False,redope=None,mesharea=None,unidim=999): |
---|
[252] | 250 | ### we do it the reverse way to be compliant with netcdf "t z y x" or "t y x" or "y x" |
---|
[233] | 251 | ### it would be actually better to name d4 d3 d2 d1 as t z y x |
---|
[405] | 252 | ### ... note, anomaly is only computed over d1 and d2 for the moment |
---|
[233] | 253 | import numpy as np |
---|
[647] | 254 | from mymath import max,mean,min,sum,getmask |
---|
[422] | 255 | csmooth = 12 ## a fair amount of grid points (too high results in high computation time) |
---|
[483] | 256 | if redope is not None: |
---|
[865] | 257 | # if redope == "mint": input = min(input,axis=0) ; d1 = None |
---|
| 258 | # elif redope == "maxt": input = max(input,axis=0) ; d1 = None |
---|
| 259 | if redope == "edge_y1": input = input[:,:,0,:] ; d2 = None |
---|
[763] | 260 | elif redope == "edge_y2": input = input[:,:,-1,:] ; d2 = None |
---|
| 261 | elif redope == "edge_x1": input = input[:,:,:,0] ; d1 = None |
---|
| 262 | elif redope == "edge_x2": input = input[:,:,:,-1] ; d1 = None |
---|
[865] | 263 | # else: errormess("not supported. but try lines in reducefield beforehand.") |
---|
[483] | 264 | #elif redope == "minz": input = min(input,axis=1) ; d2 = None |
---|
| 265 | #elif redope == "maxz": input = max(input,axis=1) ; d2 = None |
---|
| 266 | #elif redope == "miny": input = min(input,axis=2) ; d3 = None |
---|
| 267 | #elif redope == "maxy": input = max(input,axis=2) ; d3 = None |
---|
| 268 | #elif redope == "minx": input = min(input,axis=3) ; d4 = None |
---|
| 269 | #elif redope == "maxx": input = max(input,axis=3) ; d4 = None |
---|
[233] | 270 | dimension = np.array(input).ndim |
---|
[525] | 271 | shape = np.array(np.array(input).shape) |
---|
[349] | 272 | #print 'd1,d2,d3,d4: ',d1,d2,d3,d4 |
---|
[405] | 273 | if anomaly: print 'ANOMALY ANOMALY' |
---|
[233] | 274 | output = input |
---|
| 275 | error = False |
---|
[350] | 276 | #### this is needed to cope the case where d4,d3,d2,d1 are single integers and not arrays |
---|
[345] | 277 | if d4 is not None and not isinstance(d4, np.ndarray): d4=[d4] |
---|
| 278 | if d3 is not None and not isinstance(d3, np.ndarray): d3=[d3] |
---|
| 279 | if d2 is not None and not isinstance(d2, np.ndarray): d2=[d2] |
---|
| 280 | if d1 is not None and not isinstance(d1, np.ndarray): d1=[d1] |
---|
| 281 | ### now the main part |
---|
[233] | 282 | if dimension == 2: |
---|
[717] | 283 | #### this is needed for 1d-type files (where dim=2 but axes are time-vert and not lat-lon) |
---|
[753] | 284 | if unidim==1: d2=d4 ; d1=d3 ; d4 = None ; d3 = None |
---|
[525] | 285 | if mesharea is None: mesharea=np.ones(shape) |
---|
| 286 | if max(d2) >= shape[0]: error = True |
---|
| 287 | elif max(d1) >= shape[1]: error = True |
---|
| 288 | elif d1 is not None and d2 is not None: |
---|
[687] | 289 | try: |
---|
| 290 | totalarea = np.ma.masked_where(getmask(output),mesharea) |
---|
| 291 | totalarea = mean(totalarea[d2,:],axis=0);totalarea = mean(totalarea[d1]) |
---|
| 292 | except: print "(problem with areas. I skip this)" ; mesharea = 1. ; totalarea = 1. |
---|
[647] | 293 | output = output*mesharea; output = mean(output[d2,:],axis=0); output = mean(output[d1])/totalarea |
---|
[350] | 294 | elif d1 is not None: output = mean(input[:,d1],axis=1) |
---|
[647] | 295 | elif d2 is not None: |
---|
[687] | 296 | try: |
---|
| 297 | totalarea = np.ma.masked_where(getmask(output),mesharea) |
---|
| 298 | totalarea = mean(totalarea[d2,:],axis=0) |
---|
| 299 | except: print "(problem with areas. I skip this)" ; mesharea = 1. ; totalarea = 1. |
---|
[647] | 300 | output = output*mesharea; output = mean(output[d2,:],axis=0)/totalarea |
---|
[233] | 301 | elif dimension == 3: |
---|
[525] | 302 | if mesharea is None: mesharea=np.ones(shape[[1,2]]) |
---|
[345] | 303 | if max(d4) >= shape[0]: error = True |
---|
| 304 | elif max(d2) >= shape[1]: error = True |
---|
| 305 | elif max(d1) >= shape[2]: error = True |
---|
[647] | 306 | elif d4 is not None and d2 is not None and d1 is not None: |
---|
[525] | 307 | output = mean(input[d4,:,:],axis=0) |
---|
[687] | 308 | try: |
---|
| 309 | totalarea = np.ma.masked_where(getmask(output),mesharea) |
---|
| 310 | totalarea = mean(totalarea[d2,:],axis=0);totalarea = mean(totalarea[d1]) |
---|
| 311 | except: print "(problem with areas. I skip this)" ; mesharea = 1. ; totalarea = 1. |
---|
[647] | 312 | output = output*mesharea; output = mean(output[d2,:],axis=0); output = mean(output[d1])/totalarea |
---|
[525] | 313 | elif d4 is not None and d2 is not None: |
---|
| 314 | output = mean(input[d4,:,:],axis=0) |
---|
[687] | 315 | try: |
---|
| 316 | totalarea = np.ma.masked_where(getmask(output),mesharea) |
---|
| 317 | totalarea = mean(totalarea[d2,:],axis=0) |
---|
| 318 | except: print "(problem with areas. I skip this)" ; mesharea = 1. ; totalarea = 1. |
---|
[647] | 319 | output = output*mesharea; output = mean(output[d2,:],axis=0)/totalarea |
---|
[349] | 320 | elif d4 is not None and d1 is not None: output = mean(input[d4,:,:],axis=0); output=mean(output[:,d1],axis=1) |
---|
[525] | 321 | elif d2 is not None and d1 is not None: |
---|
[687] | 322 | try: |
---|
| 323 | totalarea = np.tile(mesharea,(output.shape[0],1,1)) |
---|
| 324 | totalarea = np.ma.masked_where(getmask(output),totalarea) |
---|
| 325 | totalarea = mean(totalarea[:,d2,:],axis=1);totalarea = mean(totalarea[:,d1],axis=1) |
---|
| 326 | except: print "(problem with areas. I skip this)" ; mesharea = 1. ; totalarea = 1. |
---|
[647] | 327 | output = output*mesharea; output = mean(output[:,d2,:],axis=1); output = mean(output[:,d1],axis=1)/totalarea |
---|
[525] | 328 | elif d1 is not None: output = mean(input[:,:,d1],axis=2) |
---|
[647] | 329 | elif d2 is not None: |
---|
[687] | 330 | try: |
---|
| 331 | totalarea = np.tile(mesharea,(output.shape[0],1,1)) |
---|
| 332 | totalarea = np.ma.masked_where(getmask(output),totalarea) |
---|
| 333 | totalarea = mean(totalarea[:,d2,:],axis=1) |
---|
| 334 | except: print "(problem with areas. I skip this)" ; mesharea = 1. ; totalarea = 1. |
---|
[647] | 335 | output = output*mesharea; output = mean(output[:,d2,:],axis=1)/totalarea |
---|
[525] | 336 | elif d4 is not None: output = mean(input[d4,:,:],axis=0) |
---|
[233] | 337 | elif dimension == 4: |
---|
[647] | 338 | if mesharea is None: mesharea=np.ones(shape[[2,3]]) # mesharea=np.random.random_sample(shape[[2,3]])*5. + 2. # pour tester |
---|
[345] | 339 | if max(d4) >= shape[0]: error = True |
---|
| 340 | elif max(d3) >= shape[1]: error = True |
---|
| 341 | elif max(d2) >= shape[2]: error = True |
---|
| 342 | elif max(d1) >= shape[3]: error = True |
---|
[382] | 343 | elif d4 is not None and d3 is not None and d2 is not None and d1 is not None: |
---|
[392] | 344 | output = mean(input[d4,:,:,:],axis=0) |
---|
| 345 | output = reduce_zaxis(output[d3,:,:],ax=0,yint=yint,vert=alt,indice=d3) |
---|
[427] | 346 | if anomaly: output = 100. * ((output / smooth(output,csmooth)) - 1.) |
---|
[687] | 347 | try: |
---|
| 348 | totalarea = np.ma.masked_where(np.isnan(output),mesharea) |
---|
| 349 | totalarea = mean(totalarea[d2,:],axis=0); totalarea = mean(totalarea[d1]) |
---|
| 350 | except: print "(problem with areas. I skip this)" ; mesharea = 1. ; totalarea = 1. |
---|
[647] | 351 | output = output*mesharea; output = mean(output[d2,:],axis=0); output = mean(output[d1])/totalarea |
---|
[350] | 352 | elif d4 is not None and d3 is not None and d2 is not None: |
---|
[392] | 353 | output = mean(input[d4,:,:,:],axis=0) |
---|
| 354 | output = reduce_zaxis(output[d3,:,:],ax=0,yint=yint,vert=alt,indice=d3) |
---|
[525] | 355 | if anomaly: output = 100. * ((output / smooth(output,csmooth)) - 1.) |
---|
[687] | 356 | try: |
---|
| 357 | totalarea = np.ma.masked_where(np.isnan(output),mesharea) |
---|
| 358 | totalarea = mean(totalarea[d2,:],axis=0) |
---|
| 359 | except: print "(problem with areas. I skip this)" ; mesharea = 1. ; totalarea = 1. |
---|
[647] | 360 | output = output*mesharea; output = mean(output[d2,:],axis=0)/totalarea |
---|
[350] | 361 | elif d4 is not None and d3 is not None and d1 is not None: |
---|
[392] | 362 | output = mean(input[d4,:,:,:],axis=0) |
---|
| 363 | output = reduce_zaxis(output[d3,:,:],ax=0,yint=yint,vert=alt,indice=d3) |
---|
[405] | 364 | if anomaly: output = 100. * ((output / smooth(output,csmooth)) - 1.) |
---|
[392] | 365 | output = mean(output[:,d1],axis=1) |
---|
[350] | 366 | elif d4 is not None and d2 is not None and d1 is not None: |
---|
[392] | 367 | output = mean(input[d4,:,:,:],axis=0) |
---|
[405] | 368 | if anomaly: |
---|
| 369 | for k in range(output.shape[0]): output[k,:,:] = 100. * ((output[k,:,:] / smooth(output[k,:,:],csmooth)) - 1.) |
---|
[687] | 370 | try: |
---|
| 371 | totalarea = np.tile(mesharea,(output.shape[0],1,1)) |
---|
| 372 | totalarea = np.ma.masked_where(getmask(output),mesharea) |
---|
| 373 | totalarea = mean(totalarea[:,d2,:],axis=1); totalarea = mean(totalarea[:,d1],axis=1) |
---|
| 374 | except: print "(problem with areas. I skip this)" ; mesharea = 1. ; totalarea = 1. |
---|
[647] | 375 | output = output*mesharea; output = mean(output[:,d2,:],axis=1); output = mean(output[:,d1],axis=1)/totalarea |
---|
[405] | 376 | #noperturb = smooth1d(output,window_len=7) |
---|
| 377 | #lenlen = len(output) ; output = output[1:lenlen-7] ; yeye = noperturb[4:lenlen-4] |
---|
| 378 | #plot(output) ; plot(yeye) ; show() ; plot(output-yeye) ; show() |
---|
[647] | 379 | elif d3 is not None and d2 is not None and d1 is not None: |
---|
| 380 | output = reduce_zaxis(input[:,d3,:,:],ax=1,yint=yint,vert=alt,indice=d3) |
---|
[405] | 381 | if anomaly: |
---|
| 382 | for k in range(output.shape[0]): output[k,:,:] = 100. * ((output[k,:,:] / smooth(output[k,:,:],csmooth)) - 1.) |
---|
[687] | 383 | try: |
---|
| 384 | totalarea = np.tile(mesharea,(output.shape[0],1,1)) |
---|
| 385 | totalarea = np.ma.masked_where(getmask(output),totalarea) |
---|
| 386 | totalarea = mean(totalarea[:,d2,:],axis=1); totalarea = mean(totalarea[:,d1],axis=1) |
---|
| 387 | except: print "(problem with areas. I skip this)" ; mesharea = 1. ; totalarea = 1. |
---|
[647] | 388 | output = output*mesharea; output = mean(output[:,d2,:],axis=1); output = mean(output[:,d1],axis=1)/totalarea |
---|
[392] | 389 | elif d4 is not None and d3 is not None: |
---|
| 390 | output = mean(input[d4,:,:,:],axis=0) |
---|
| 391 | output = reduce_zaxis(output[d3,:,:],ax=0,yint=yint,vert=alt,indice=d3) |
---|
[405] | 392 | if anomaly: output = 100. * ((output / smooth(output,csmooth)) - 1.) |
---|
[392] | 393 | elif d4 is not None and d2 is not None: |
---|
[647] | 394 | output = mean(input[d4,:,:,:],axis=0) |
---|
[687] | 395 | try: |
---|
| 396 | totalarea = np.tile(mesharea,(output.shape[0],1,1)) |
---|
| 397 | totalarea = np.ma.masked_where(getmask(output),mesharea) |
---|
| 398 | totalarea = mean(totalarea[:,d2,:],axis=1) |
---|
| 399 | except: print "(problem with areas. I skip this)" ; mesharea = 1. ; totalarea = 1. |
---|
[647] | 400 | output = output*mesharea; output = mean(output[:,d2,:],axis=1)/totalarea |
---|
[392] | 401 | elif d4 is not None and d1 is not None: |
---|
| 402 | output = mean(input[d4,:,:,:],axis=0) |
---|
| 403 | output = mean(output[:,:,d1],axis=2) |
---|
[647] | 404 | elif d3 is not None and d2 is not None: |
---|
[392] | 405 | output = reduce_zaxis(input[:,d3,:,:],ax=1,yint=yint,vert=alt,indice=d3) |
---|
[687] | 406 | try: |
---|
| 407 | totalarea = np.tile(mesharea,(output.shape[0],1,1)) |
---|
| 408 | totalarea = np.ma.masked_where(getmask(output),mesharea) |
---|
| 409 | totalarea = mean(totalarea[:,d2,:],axis=1) |
---|
| 410 | except: print "(problem with areas. I skip this)" ; mesharea = 1. ; totalarea = 1. |
---|
[647] | 411 | output = output*mesharea; output = mean(output[:,d2,:],axis=1)/totalarea |
---|
[392] | 412 | elif d3 is not None and d1 is not None: |
---|
| 413 | output = reduce_zaxis(input[:,d3,:,:],ax=1,yint=yint,vert=alt,indice=d3) |
---|
[448] | 414 | output = mean(output[:,:,d1],axis=2) |
---|
[647] | 415 | elif d2 is not None and d1 is not None: |
---|
[687] | 416 | try: |
---|
| 417 | totalarea = np.tile(mesharea,(output.shape[0],output.shape[1],1,1)) |
---|
| 418 | totalarea = np.ma.masked_where(getmask(output),totalarea) |
---|
| 419 | totalarea = mean(totalarea[:,:,d2,:],axis=2); totalarea = mean(totalarea[:,:,d1],axis=1) |
---|
| 420 | except: print "(problem with areas. I skip this)" ; mesharea = 1. ; totalarea = 1. |
---|
[763] | 421 | output = output*mesharea; output = mean(output[:,:,d2,:],axis=2); output = mean(output[:,:,d1],axis=2)/totalarea |
---|
[392] | 422 | elif d1 is not None: output = mean(input[:,:,:,d1],axis=3) |
---|
[647] | 423 | elif d2 is not None: |
---|
[687] | 424 | try: |
---|
| 425 | totalarea = np.tile(mesharea,(output.shape[0],output.shape[1],1,output.shape[3])) |
---|
| 426 | totalarea = np.ma.masked_where(getmask(output),totalarea) |
---|
| 427 | totalarea = mean(totalarea[:,:,d2,:],axis=2) |
---|
| 428 | except: print "(problem with areas. I skip this)" ; mesharea = 1. ; totalarea = 1. |
---|
[647] | 429 | output = output*mesharea; output = mean(output[:,:,d2,:],axis=2)/totalarea |
---|
[437] | 430 | elif d3 is not None: output = reduce_zaxis(input[:,d3,:,:],ax=1,yint=yint,vert=alt,indice=d3) |
---|
[392] | 431 | elif d4 is not None: output = mean(input[d4,:,:,:],axis=0) |
---|
[865] | 432 | |
---|
| 433 | if redope is not None: |
---|
| 434 | if redope == "mint": output = min(output,axis=0) |
---|
| 435 | elif redope == "maxt": output = max(output,axis=0) |
---|
| 436 | |
---|
[468] | 437 | dimension2 = np.array(output).ndim |
---|
| 438 | shape2 = np.array(output).shape |
---|
| 439 | print 'REDUCEFIELD dim,shape: ',dimension,shape,' >>> ',dimension2,shape2 |
---|
[233] | 440 | return output, error |
---|
| 441 | |
---|
[392] | 442 | ## Author: AC + AS |
---|
| 443 | def reduce_zaxis (input,ax=None,yint=False,vert=None,indice=None): |
---|
[382] | 444 | from mymath import max,mean |
---|
| 445 | from scipy import integrate |
---|
[637] | 446 | import numpy as np |
---|
[392] | 447 | if yint and vert is not None and indice is not None: |
---|
[391] | 448 | if type(input).__name__=='MaskedArray': |
---|
| 449 | input.set_fill_value([np.NaN]) |
---|
[392] | 450 | output = integrate.trapz(input.filled(),x=vert[indice],axis=ax) |
---|
[391] | 451 | else: |
---|
[396] | 452 | output = integrate.trapz(input,x=vert[indice],axis=ax) |
---|
[382] | 453 | else: |
---|
| 454 | output = mean(input,axis=ax) |
---|
| 455 | return output |
---|
| 456 | |
---|
[345] | 457 | ## Author: AS + TN |
---|
[817] | 458 | def definesubplot ( numplot, fig, ipreferline=False): |
---|
[233] | 459 | from matplotlib.pyplot import rcParams |
---|
| 460 | rcParams['font.size'] = 12. ## default (important for multiple calls) |
---|
[345] | 461 | if numplot <= 0: |
---|
| 462 | subv = 99999 |
---|
| 463 | subh = 99999 |
---|
| 464 | elif numplot == 1: |
---|
[568] | 465 | subv = 1 |
---|
| 466 | subh = 1 |
---|
[233] | 467 | elif numplot == 2: |
---|
[483] | 468 | subv = 1 #2 |
---|
| 469 | subh = 2 #1 |
---|
[233] | 470 | fig.subplots_adjust(wspace = 0.35) |
---|
| 471 | rcParams['font.size'] = int( rcParams['font.size'] * 3. / 4. ) |
---|
| 472 | elif numplot == 3: |
---|
[453] | 473 | subv = 3 |
---|
| 474 | subh = 1 |
---|
[613] | 475 | fig.subplots_adjust(hspace = 0.5) |
---|
[817] | 476 | if ipreferline: subv = 1 ; subh = 3 ; fig.subplots_adjust(wspace = 0.35) |
---|
[233] | 477 | rcParams['font.size'] = int( rcParams['font.size'] * 1. / 2. ) |
---|
[637] | 478 | elif numplot == 4: |
---|
[345] | 479 | subv = 2 |
---|
| 480 | subh = 2 |
---|
[781] | 481 | #fig.subplots_adjust(wspace = 0.4, hspace = 0.6) |
---|
[610] | 482 | fig.subplots_adjust(wspace = 0.4, hspace = 0.3) |
---|
[345] | 483 | rcParams['font.size'] = int( rcParams['font.size'] * 2. / 3. ) |
---|
| 484 | elif numplot <= 6: |
---|
| 485 | subv = 2 |
---|
| 486 | subh = 3 |
---|
[638] | 487 | #fig.subplots_adjust(wspace = 0.4, hspace = 0.0) |
---|
| 488 | fig.subplots_adjust(wspace = 0.5, hspace = 0.3) |
---|
[233] | 489 | rcParams['font.size'] = int( rcParams['font.size'] * 1. / 2. ) |
---|
[345] | 490 | elif numplot <= 8: |
---|
| 491 | subv = 2 |
---|
| 492 | subh = 4 |
---|
[233] | 493 | fig.subplots_adjust(wspace = 0.3, hspace = 0.3) |
---|
| 494 | rcParams['font.size'] = int( rcParams['font.size'] * 1. / 2. ) |
---|
[345] | 495 | elif numplot <= 9: |
---|
| 496 | subv = 3 |
---|
| 497 | subh = 3 |
---|
[233] | 498 | fig.subplots_adjust(wspace = 0.3, hspace = 0.3) |
---|
| 499 | rcParams['font.size'] = int( rcParams['font.size'] * 1. / 2. ) |
---|
[345] | 500 | elif numplot <= 12: |
---|
| 501 | subv = 3 |
---|
| 502 | subh = 4 |
---|
[453] | 503 | fig.subplots_adjust(wspace = 0, hspace = 0.1) |
---|
[345] | 504 | rcParams['font.size'] = int( rcParams['font.size'] * 1. / 2. ) |
---|
| 505 | elif numplot <= 16: |
---|
| 506 | subv = 4 |
---|
| 507 | subh = 4 |
---|
| 508 | fig.subplots_adjust(wspace = 0.3, hspace = 0.3) |
---|
| 509 | rcParams['font.size'] = int( rcParams['font.size'] * 1. / 2. ) |
---|
[233] | 510 | else: |
---|
[345] | 511 | print "number of plot supported: 1 to 16" |
---|
[233] | 512 | exit() |
---|
[345] | 513 | return subv,subh |
---|
[233] | 514 | |
---|
[345] | 515 | ## Author: AS |
---|
[233] | 516 | def getstralt(nc,nvert): |
---|
[548] | 517 | varinfile = nc.variables.keys() |
---|
| 518 | if 'vert' not in varinfile: |
---|
[233] | 519 | stralt = "_lvl" + str(nvert) |
---|
[548] | 520 | else: |
---|
[233] | 521 | zelevel = int(nc.variables['vert'][nvert]) |
---|
| 522 | if abs(zelevel) < 10000.: strheight=str(zelevel)+"m" |
---|
| 523 | else: strheight=str(int(zelevel/1000.))+"km" |
---|
| 524 | if 'altitude' in nc.dimensions: stralt = "_"+strheight+"-AMR" |
---|
| 525 | elif 'altitude_abg' in nc.dimensions: stralt = "_"+strheight+"-ALS" |
---|
| 526 | elif 'bottom_top' in nc.dimensions: stralt = "_"+strheight |
---|
| 527 | elif 'pressure' in nc.dimensions: stralt = "_"+str(zelevel)+"Pa" |
---|
| 528 | else: stralt = "" |
---|
| 529 | return stralt |
---|
| 530 | |
---|
[345] | 531 | ## Author: AS |
---|
[468] | 532 | def getlschar ( namefile, getaxis=False ): |
---|
[195] | 533 | from netCDF4 import Dataset |
---|
| 534 | from timestuff import sol2ls |
---|
[233] | 535 | from numpy import array |
---|
[400] | 536 | from string import rstrip |
---|
[687] | 537 | import os as daos |
---|
| 538 | namefiletest = rstrip( rstrip( rstrip( namefile, chars="_z"), chars="_zabg"), chars="_p") |
---|
| 539 | testexist = daos.path.isfile(namefiletest) |
---|
[237] | 540 | zetime = None |
---|
[687] | 541 | if testexist: |
---|
| 542 | namefile = namefiletest |
---|
| 543 | #### we assume that wrfout is next to wrfout_z and wrfout_zabg |
---|
| 544 | nc = Dataset(namefile) |
---|
| 545 | zetime = None |
---|
| 546 | days_in_month = [61, 66, 66, 65, 60, 54, 50, 46, 47, 47, 51, 56] |
---|
| 547 | plus_in_month = [ 0, 61,127,193,258,318,372,422,468,515,562,613] |
---|
| 548 | if 'Times' in nc.variables: |
---|
[233] | 549 | zetime = nc.variables['Times'][0] |
---|
| 550 | shape = array(nc.variables['Times']).shape |
---|
| 551 | if shape[0] < 2: zetime = None |
---|
| 552 | if zetime is not None \ |
---|
[225] | 553 | and 'vert' not in nc.variables: |
---|
[489] | 554 | ##### strangely enough this does not work for api or ncrcat results! |
---|
| 555 | zesol = plus_in_month[ int(zetime[5]+zetime[6])-1 ] + int(zetime[8]+zetime[9]) - 1 ##les sols GCM commencent a 0 |
---|
[864] | 556 | dals = int(sol2ls ( zesol )) #int( 10. * sol2ls ( zesol ) ) / 10. |
---|
[197] | 557 | ### |
---|
| 558 | zetime2 = nc.variables['Times'][1] |
---|
| 559 | one = int(zetime[11]+zetime[12]) + int(zetime[14]+zetime[15])/37. |
---|
| 560 | next = int(zetime2[11]+zetime2[12]) + int(zetime2[14]+zetime2[15])/37. |
---|
| 561 | zehour = one |
---|
| 562 | zehourin = abs ( next - one ) |
---|
[489] | 563 | if not getaxis: |
---|
| 564 | lschar = "_Ls"+str(dals) |
---|
| 565 | else: |
---|
[468] | 566 | zelen = len(nc.variables['Times'][:]) |
---|
| 567 | yeye = range(zelen) ; lsaxis = range(zelen) ; solaxis = range(zelen) ; ltaxis = range(zelen) |
---|
| 568 | for iii in yeye: |
---|
[489] | 569 | zetime = nc.variables['Times'][iii] |
---|
[468] | 570 | ltaxis[iii] = int(zetime[11]+zetime[12]) + int(zetime[14]+zetime[15])/37. |
---|
[489] | 571 | solaxis[iii] = ltaxis[iii] / 24. + plus_in_month[ int(zetime[5]+zetime[6])-1 ] + int(zetime[8]+zetime[9]) - 1 ##les sols GCM commencent a 0 |
---|
[468] | 572 | lsaxis[iii] = sol2ls ( solaxis[iii] ) |
---|
| 573 | if ltaxis[iii] < ltaxis[iii-1]: ltaxis[iii] = ltaxis[iii] + 24. |
---|
[489] | 574 | #print ltaxis[iii], solaxis[iii], lsaxis[iii], getattr( nc, 'JULDAY' ) |
---|
[468] | 575 | lschar = lsaxis ; zehour = solaxis ; zehourin = ltaxis |
---|
[195] | 576 | else: |
---|
| 577 | lschar="" |
---|
[197] | 578 | zehour = 0 |
---|
| 579 | zehourin = 1 |
---|
| 580 | return lschar, zehour, zehourin |
---|
[195] | 581 | |
---|
[345] | 582 | ## Author: AS |
---|
[202] | 583 | def getprefix (nc): |
---|
| 584 | prefix = 'LMD_MMM_' |
---|
| 585 | prefix = prefix + 'd'+str(getattr(nc,'GRID_ID'))+'_' |
---|
| 586 | prefix = prefix + str(int(getattr(nc,'DX')/1000.))+'km_' |
---|
| 587 | return prefix |
---|
| 588 | |
---|
[345] | 589 | ## Author: AS |
---|
[184] | 590 | def getproj (nc): |
---|
[233] | 591 | typefile = whatkindfile(nc) |
---|
[548] | 592 | if typefile in ['meso','geo']: |
---|
[233] | 593 | ### (il faudrait passer CEN_LON dans la projection ?) |
---|
| 594 | map_proj = getattr(nc, 'MAP_PROJ') |
---|
| 595 | cen_lat = getattr(nc, 'CEN_LAT') |
---|
| 596 | if map_proj == 2: |
---|
| 597 | if cen_lat > 10.: |
---|
| 598 | proj="npstere" |
---|
[392] | 599 | #print "NP stereographic polar domain" |
---|
[233] | 600 | else: |
---|
| 601 | proj="spstere" |
---|
[392] | 602 | #print "SP stereographic polar domain" |
---|
[233] | 603 | elif map_proj == 1: |
---|
[392] | 604 | #print "lambert projection domain" |
---|
[233] | 605 | proj="lcc" |
---|
| 606 | elif map_proj == 3: |
---|
[392] | 607 | #print "mercator projection" |
---|
[233] | 608 | proj="merc" |
---|
| 609 | else: |
---|
| 610 | proj="merc" |
---|
[548] | 611 | elif typefile in ['gcm']: proj="cyl" ## pb avec les autres (de trace derriere la sphere ?) |
---|
| 612 | else: proj="ortho" |
---|
[184] | 613 | return proj |
---|
| 614 | |
---|
[345] | 615 | ## Author: AS |
---|
[180] | 616 | def ptitle (name): |
---|
| 617 | from matplotlib.pyplot import title |
---|
| 618 | title(name) |
---|
| 619 | print name |
---|
| 620 | |
---|
[345] | 621 | ## Author: AS |
---|
[252] | 622 | def polarinterv (lon2d,lat2d): |
---|
| 623 | import numpy as np |
---|
| 624 | wlon = [np.min(lon2d),np.max(lon2d)] |
---|
| 625 | ind = np.array(lat2d).shape[0] / 2 ## to get a good boundlat and to get the pole |
---|
| 626 | wlat = [np.min(lat2d[ind,:]),np.max(lat2d[ind,:])] |
---|
| 627 | return [wlon,wlat] |
---|
| 628 | |
---|
[345] | 629 | ## Author: AS |
---|
[180] | 630 | def simplinterv (lon2d,lat2d): |
---|
| 631 | import numpy as np |
---|
| 632 | return [[np.min(lon2d),np.max(lon2d)],[np.min(lat2d),np.max(lat2d)]] |
---|
| 633 | |
---|
[345] | 634 | ## Author: AS |
---|
[184] | 635 | def wrfinterv (lon2d,lat2d): |
---|
| 636 | nx = len(lon2d[0,:])-1 |
---|
| 637 | ny = len(lon2d[:,0])-1 |
---|
[225] | 638 | lon1 = lon2d[0,0] |
---|
| 639 | lon2 = lon2d[nx,ny] |
---|
| 640 | lat1 = lat2d[0,0] |
---|
| 641 | lat2 = lat2d[nx,ny] |
---|
[233] | 642 | if abs(0.5*(lat1+lat2)) > 60.: wider = 0.5 * (abs(lon1)+abs(lon2)) * 0.1 |
---|
| 643 | else: wider = 0. |
---|
| 644 | if lon1 < lon2: wlon = [lon1, lon2 + wider] |
---|
[225] | 645 | else: wlon = [lon2, lon1 + wider] |
---|
| 646 | if lat1 < lat2: wlat = [lat1, lat2] |
---|
| 647 | else: wlat = [lat2, lat1] |
---|
| 648 | return [wlon,wlat] |
---|
[184] | 649 | |
---|
[345] | 650 | ## Author: AS |
---|
[240] | 651 | def makeplotres (filename,res=None,pad_inches_value=0.25,folder='',disp=True,ext='png',erase=False): |
---|
[180] | 652 | import matplotlib.pyplot as plt |
---|
[240] | 653 | from os import system |
---|
| 654 | addstr = "" |
---|
| 655 | if res is not None: |
---|
| 656 | res = int(res) |
---|
| 657 | addstr = "_"+str(res) |
---|
| 658 | name = filename+addstr+"."+ext |
---|
[186] | 659 | if folder != '': name = folder+'/'+name |
---|
[180] | 660 | plt.savefig(name,dpi=res,bbox_inches='tight',pad_inches=pad_inches_value) |
---|
[240] | 661 | if disp: display(name) |
---|
| 662 | if ext in ['eps','ps','svg']: system("tar czvf "+name+".tar.gz "+name+" ; rm -f "+name) |
---|
| 663 | if erase: system("mv "+name+" to_be_erased") |
---|
[180] | 664 | return |
---|
| 665 | |
---|
[430] | 666 | ## Author: AS + AC |
---|
[451] | 667 | def dumpbdy (field,n,stag=None,condition=False,onlyx=False,onlyy=False): |
---|
[447] | 668 | nx = len(field[0,:])-1 |
---|
| 669 | ny = len(field[:,0])-1 |
---|
[444] | 670 | if condition: |
---|
| 671 | if stag == 'U': nx = nx-1 |
---|
| 672 | if stag == 'V': ny = ny-1 |
---|
| 673 | if stag == 'W': nx = nx+1 #special les case when we dump stag on W |
---|
[451] | 674 | if onlyx: result = field[:,n:nx-n] |
---|
| 675 | elif onlyy: result = field[n:ny-n,:] |
---|
| 676 | else: result = field[n:ny-n,n:nx-n] |
---|
| 677 | return result |
---|
[180] | 678 | |
---|
[444] | 679 | ## Author: AS + AC |
---|
[233] | 680 | def getcoorddef ( nc ): |
---|
[317] | 681 | import numpy as np |
---|
[233] | 682 | ## getcoord2d for predefined types |
---|
| 683 | typefile = whatkindfile(nc) |
---|
[548] | 684 | if typefile in ['meso']: |
---|
| 685 | if '9999' not in getattr(nc,'START_DATE') : |
---|
[753] | 686 | ## regular mesoscale |
---|
[548] | 687 | [lon2d,lat2d] = getcoord2d(nc) |
---|
| 688 | else: |
---|
| 689 | ## idealized mesoscale |
---|
| 690 | nx=getattr(nc,'WEST-EAST_GRID_DIMENSION') |
---|
| 691 | ny=getattr(nc,'SOUTH-NORTH_GRID_DIMENSION') |
---|
| 692 | dlat=getattr(nc,'DX') |
---|
| 693 | ## this is dirty because Martian-specific |
---|
| 694 | # ... but this just intended to get "lat-lon" like info |
---|
| 695 | falselon = np.arange(-dlat*(nx-1)/2.,dlat*(nx-1)/2.,dlat)/60000. |
---|
| 696 | falselat = np.arange(-dlat*(ny-1)/2.,dlat*(ny-1)/2.,dlat)/60000. |
---|
| 697 | [lon2d,lat2d] = np.meshgrid(falselon,falselat) ## dummy coordinates |
---|
| 698 | print "WARNING: domain plot artificially centered on lat,lon 0,0" |
---|
[637] | 699 | elif typefile in ['gcm','earthgcm','ecmwf']: |
---|
[724] | 700 | #### n est ce pas nc.variables ? |
---|
[637] | 701 | if "longitude" in nc.dimensions: dalon = "longitude" |
---|
| 702 | elif "lon" in nc.dimensions: dalon = "lon" |
---|
[724] | 703 | else: dalon = "nothing" |
---|
[637] | 704 | if "latitude" in nc.dimensions: dalat = "latitude" |
---|
| 705 | elif "lat" in nc.dimensions: dalat = "lat" |
---|
[724] | 706 | else: dalat = "nothing" |
---|
[637] | 707 | [lon2d,lat2d] = getcoord2d(nc,nlat=dalat,nlon=dalon,is1d=True) |
---|
[233] | 708 | elif typefile in ['geo']: |
---|
| 709 | [lon2d,lat2d] = getcoord2d(nc,nlat='XLAT_M',nlon='XLONG_M') |
---|
| 710 | return lon2d,lat2d |
---|
| 711 | |
---|
[345] | 712 | ## Author: AS |
---|
[184] | 713 | def getcoord2d (nc,nlat='XLAT',nlon='XLONG',is1d=False): |
---|
| 714 | import numpy as np |
---|
[724] | 715 | if nlon == "nothing" or nlat == "nothing": |
---|
| 716 | print "NO LAT LON FIELDS. I AM TRYING MY BEST. I ASSUME GLOBAL FIELD." |
---|
| 717 | lon = np.linspace(-180.,180.,getdimfromvar(nc)[-1]) |
---|
| 718 | lat = np.linspace(-90.,90.,getdimfromvar(nc)[-2]) |
---|
[184] | 719 | [lon2d,lat2d] = np.meshgrid(lon,lat) |
---|
| 720 | else: |
---|
[724] | 721 | if is1d: |
---|
| 722 | lat = nc.variables[nlat][:] |
---|
| 723 | lon = nc.variables[nlon][:] |
---|
| 724 | [lon2d,lat2d] = np.meshgrid(lon,lat) |
---|
| 725 | else: |
---|
| 726 | lat = nc.variables[nlat][0,:,:] |
---|
| 727 | lon = nc.variables[nlon][0,:,:] |
---|
| 728 | [lon2d,lat2d] = [lon,lat] |
---|
[184] | 729 | return lon2d,lat2d |
---|
| 730 | |
---|
[724] | 731 | ## Author: AS |
---|
| 732 | def getdimfromvar (nc): |
---|
| 733 | varinfile = nc.variables.keys() |
---|
[871] | 734 | sav = [0.] |
---|
| 735 | for var in varinfile: |
---|
| 736 | dim = nc.variables[var].shape ## usually the last variable is 4D or 3D |
---|
| 737 | if len(dim) > len(sav): sav=dim |
---|
| 738 | return sav |
---|
[724] | 739 | |
---|
[405] | 740 | ## FROM COOKBOOK http://www.scipy.org/Cookbook/SignalSmooth |
---|
| 741 | def smooth1d(x,window_len=11,window='hanning'): |
---|
| 742 | import numpy |
---|
| 743 | """smooth the data using a window with requested size. |
---|
| 744 | This method is based on the convolution of a scaled window with the signal. |
---|
| 745 | The signal is prepared by introducing reflected copies of the signal |
---|
| 746 | (with the window size) in both ends so that transient parts are minimized |
---|
| 747 | in the begining and end part of the output signal. |
---|
| 748 | input: |
---|
| 749 | x: the input signal |
---|
| 750 | window_len: the dimension of the smoothing window; should be an odd integer |
---|
| 751 | window: the type of window from 'flat', 'hanning', 'hamming', 'bartlett', 'blackman' |
---|
| 752 | flat window will produce a moving average smoothing. |
---|
| 753 | output: |
---|
| 754 | the smoothed signal |
---|
| 755 | example: |
---|
| 756 | t=linspace(-2,2,0.1) |
---|
| 757 | x=sin(t)+randn(len(t))*0.1 |
---|
| 758 | y=smooth(x) |
---|
| 759 | see also: |
---|
| 760 | numpy.hanning, numpy.hamming, numpy.bartlett, numpy.blackman, numpy.convolve |
---|
| 761 | scipy.signal.lfilter |
---|
| 762 | TODO: the window parameter could be the window itself if an array instead of a string |
---|
| 763 | """ |
---|
| 764 | x = numpy.array(x) |
---|
| 765 | if x.ndim != 1: |
---|
| 766 | raise ValueError, "smooth only accepts 1 dimension arrays." |
---|
| 767 | if x.size < window_len: |
---|
| 768 | raise ValueError, "Input vector needs to be bigger than window size." |
---|
| 769 | if window_len<3: |
---|
| 770 | return x |
---|
| 771 | if not window in ['flat', 'hanning', 'hamming', 'bartlett', 'blackman']: |
---|
| 772 | raise ValueError, "Window is on of 'flat', 'hanning', 'hamming', 'bartlett', 'blackman'" |
---|
| 773 | s=numpy.r_[x[window_len-1:0:-1],x,x[-1:-window_len:-1]] |
---|
| 774 | #print(len(s)) |
---|
| 775 | if window == 'flat': #moving average |
---|
| 776 | w=numpy.ones(window_len,'d') |
---|
| 777 | else: |
---|
| 778 | w=eval('numpy.'+window+'(window_len)') |
---|
| 779 | y=numpy.convolve(w/w.sum(),s,mode='valid') |
---|
| 780 | return y |
---|
| 781 | |
---|
[345] | 782 | ## Author: AS |
---|
[180] | 783 | def smooth (field, coeff): |
---|
| 784 | ## actually blur_image could work with different coeff on x and y |
---|
| 785 | if coeff > 1: result = blur_image(field,int(coeff)) |
---|
| 786 | else: result = field |
---|
| 787 | return result |
---|
| 788 | |
---|
[345] | 789 | ## FROM COOKBOOK http://www.scipy.org/Cookbook/SignalSmooth |
---|
[180] | 790 | def gauss_kern(size, sizey=None): |
---|
| 791 | import numpy as np |
---|
| 792 | # Returns a normalized 2D gauss kernel array for convolutions |
---|
| 793 | size = int(size) |
---|
| 794 | if not sizey: |
---|
| 795 | sizey = size |
---|
| 796 | else: |
---|
| 797 | sizey = int(sizey) |
---|
| 798 | x, y = np.mgrid[-size:size+1, -sizey:sizey+1] |
---|
| 799 | g = np.exp(-(x**2/float(size)+y**2/float(sizey))) |
---|
| 800 | return g / g.sum() |
---|
| 801 | |
---|
[345] | 802 | ## FROM COOKBOOK http://www.scipy.org/Cookbook/SignalSmooth |
---|
[180] | 803 | def blur_image(im, n, ny=None) : |
---|
| 804 | from scipy.signal import convolve |
---|
| 805 | # blurs the image by convolving with a gaussian kernel of typical size n. |
---|
| 806 | # The optional keyword argument ny allows for a different size in the y direction. |
---|
| 807 | g = gauss_kern(n, sizey=ny) |
---|
| 808 | improc = convolve(im, g, mode='same') |
---|
| 809 | return improc |
---|
| 810 | |
---|
[345] | 811 | ## Author: AS |
---|
[233] | 812 | def getwinddef (nc): |
---|
| 813 | ### |
---|
[548] | 814 | varinfile = nc.variables.keys() |
---|
| 815 | if 'Um' in varinfile: [uchar,vchar] = ['Um','Vm'] #; print "this is API meso file" |
---|
| 816 | elif 'U' in varinfile: [uchar,vchar] = ['U','V'] #; print "this is RAW meso file" |
---|
| 817 | elif 'u' in varinfile: [uchar,vchar] = ['u','v'] #; print "this is GCM file" |
---|
[721] | 818 | elif 'vitu' in varinfile: [uchar,vchar] = ['vitu','vitv'] #; print "this is GCM v5 file" |
---|
[871] | 819 | elif 'ucomp' in varinfile: [uchar,vchar] = ['ucomp','vcomp'] |
---|
[783] | 820 | ### you can add choices here ! |
---|
[548] | 821 | else: [uchar,vchar] = ['not found','not found'] |
---|
[233] | 822 | ### |
---|
[548] | 823 | if uchar in ['U']: metwind = False ## geometrical (wrt grid) |
---|
| 824 | else: metwind = True ## meteorological (zon/mer) |
---|
| 825 | if metwind is False: print "Not using meteorological winds. You trust numerical grid as being (x,y)" |
---|
[233] | 826 | ### |
---|
| 827 | return uchar,vchar,metwind |
---|
[202] | 828 | |
---|
[345] | 829 | ## Author: AS |
---|
[184] | 830 | def vectorfield (u, v, x, y, stride=3, scale=15., factor=250., color='black', csmooth=1, key=True): |
---|
| 831 | ## scale regle la reference du vecteur |
---|
| 832 | ## factor regle toutes les longueurs (dont la reference). l'AUGMENTER pour raccourcir les vecteurs. |
---|
| 833 | import matplotlib.pyplot as plt |
---|
| 834 | import numpy as np |
---|
[638] | 835 | #posx = np.min(x) - np.std(x) / 10. |
---|
| 836 | #posy = np.min(y) - np.std(y) / 10. |
---|
[865] | 837 | #posx = np.min(x) |
---|
| 838 | #posy = np.min(y) - 4.*np.std(y) / 10. |
---|
[184] | 839 | u = smooth(u,csmooth) |
---|
| 840 | v = smooth(v,csmooth) |
---|
[864] | 841 | widthvec = 0.004 #0.003 #0.005 #0.003 |
---|
[184] | 842 | q = plt.quiver( x[::stride,::stride],\ |
---|
| 843 | y[::stride,::stride],\ |
---|
| 844 | u[::stride,::stride],\ |
---|
| 845 | v[::stride,::stride],\ |
---|
[228] | 846 | angles='xy',color=color,pivot='middle',\ |
---|
[184] | 847 | scale=factor,width=widthvec ) |
---|
[202] | 848 | if color in ['white','yellow']: kcolor='black' |
---|
| 849 | else: kcolor=color |
---|
[865] | 850 | if key: p = plt.quiverkey(q,-0.06,0.98,scale,\ |
---|
| 851 | str(int(scale)),color=kcolor,labelpos='S',labelsep = 0.03) |
---|
[184] | 852 | return |
---|
[180] | 853 | |
---|
[345] | 854 | ## Author: AS |
---|
[180] | 855 | def display (name): |
---|
[184] | 856 | from os import system |
---|
| 857 | system("display "+name+" > /dev/null 2> /dev/null &") |
---|
| 858 | return name |
---|
[180] | 859 | |
---|
[345] | 860 | ## Author: AS |
---|
[180] | 861 | def findstep (wlon): |
---|
[184] | 862 | steplon = int((wlon[1]-wlon[0])/4.) #3 |
---|
| 863 | step = 120. |
---|
| 864 | while step > steplon and step > 15. : step = step / 2. |
---|
| 865 | if step <= 15.: |
---|
| 866 | while step > steplon and step > 5. : step = step - 5. |
---|
| 867 | if step <= 5.: |
---|
| 868 | while step > steplon and step > 1. : step = step - 1. |
---|
| 869 | if step <= 1.: |
---|
| 870 | step = 1. |
---|
[180] | 871 | return step |
---|
| 872 | |
---|
[345] | 873 | ## Author: AS |
---|
[451] | 874 | def define_proj (char,wlon,wlat,back=None,blat=None,blon=None): |
---|
[180] | 875 | from mpl_toolkits.basemap import Basemap |
---|
| 876 | import numpy as np |
---|
| 877 | import matplotlib as mpl |
---|
[240] | 878 | from mymath import max |
---|
[180] | 879 | meanlon = 0.5*(wlon[0]+wlon[1]) |
---|
| 880 | meanlat = 0.5*(wlat[0]+wlat[1]) |
---|
[637] | 881 | zewidth = np.abs(wlon[0]-wlon[1])*60000.*np.cos(3.14*meanlat/180.) |
---|
| 882 | zeheight = np.abs(wlat[0]-wlat[1])*60000. |
---|
[385] | 883 | if blat is None: |
---|
[398] | 884 | ortholat=meanlat |
---|
[453] | 885 | if wlat[0] >= 80.: blat = -40. |
---|
[345] | 886 | elif wlat[1] <= -80.: blat = -40. |
---|
| 887 | elif wlat[1] >= 0.: blat = wlat[0] |
---|
| 888 | elif wlat[0] <= 0.: blat = wlat[1] |
---|
[398] | 889 | else: ortholat=blat |
---|
[451] | 890 | if blon is None: ortholon=meanlon |
---|
| 891 | else: ortholon=blon |
---|
[207] | 892 | h = 50. ## en km |
---|
[202] | 893 | radius = 3397200. |
---|
[184] | 894 | if char == "cyl": m = Basemap(rsphere=radius,projection='cyl',\ |
---|
[180] | 895 | llcrnrlat=wlat[0],urcrnrlat=wlat[1],llcrnrlon=wlon[0],urcrnrlon=wlon[1]) |
---|
[184] | 896 | elif char == "moll": m = Basemap(rsphere=radius,projection='moll',lon_0=meanlon) |
---|
[451] | 897 | elif char == "ortho": m = Basemap(rsphere=radius,projection='ortho',lon_0=ortholon,lat_0=ortholat) |
---|
[184] | 898 | elif char == "lcc": m = Basemap(rsphere=radius,projection='lcc',lat_1=meanlat,lat_0=meanlat,lon_0=meanlon,\ |
---|
[637] | 899 | width=zewidth,height=zeheight) |
---|
| 900 | #llcrnrlat=wlat[0],urcrnrlat=wlat[1],llcrnrlon=wlon[0],urcrnrlon=wlon[1]) |
---|
[184] | 901 | elif char == "npstere": m = Basemap(rsphere=radius,projection='npstere', boundinglat=blat, lon_0=0.) |
---|
[395] | 902 | elif char == "spstere": m = Basemap(rsphere=radius,projection='spstere', boundinglat=blat, lon_0=180.) |
---|
[207] | 903 | elif char == "nplaea": m = Basemap(rsphere=radius,projection='nplaea', boundinglat=wlat[0], lon_0=meanlon) |
---|
| 904 | elif char == "laea": m = Basemap(rsphere=radius,projection='laea',lon_0=meanlon,lat_0=meanlat,lat_ts=meanlat,\ |
---|
[637] | 905 | width=zewidth,height=zeheight) |
---|
| 906 | #llcrnrlat=wlat[0],urcrnrlat=wlat[1],llcrnrlon=wlon[0],urcrnrlon=wlon[1]) |
---|
[184] | 907 | elif char == "nsper": m = Basemap(rsphere=radius,projection='nsper',lon_0=meanlon,lat_0=meanlat,satellite_height=h*1000.) |
---|
| 908 | elif char == "merc": m = Basemap(rsphere=radius,projection='merc',lat_ts=0.,\ |
---|
| 909 | llcrnrlat=wlat[0],urcrnrlat=wlat[1],llcrnrlon=wlon[0],urcrnrlon=wlon[1]) |
---|
[817] | 910 | elif char == "geos": m = Basemap(rsphere=radius,projection='geos',lon_0=meanlon) |
---|
| 911 | elif char == "robin": m = Basemap(rsphere=radius,projection='robin',lon_0=0) |
---|
| 912 | elif char == "cass": |
---|
| 913 | if zewidth > 60000.: ## approx. more than one degree |
---|
| 914 | m = Basemap(rsphere=radius,projection='cass',\ |
---|
| 915 | lon_0=meanlon,lat_0=meanlat,\ |
---|
| 916 | width=zewidth,height=zeheight) |
---|
| 917 | else: |
---|
| 918 | m = Basemap(rsphere=radius,projection='cass',\ |
---|
| 919 | lon_0=meanlon,lat_0=meanlat,\ |
---|
| 920 | llcrnrlat=wlat[0],urcrnrlat=wlat[1],llcrnrlon=wlon[0],urcrnrlon=wlon[1]) |
---|
| 921 | else: errormess("projection not supported.") |
---|
[184] | 922 | fontsizemer = int(mpl.rcParams['font.size']*3./4.) |
---|
[864] | 923 | if zewidth > 60000. or char in ["npstere","spstere","ortho"]: |
---|
[817] | 924 | if char in ["cyl","lcc","merc","nsper","laea"]: step = findstep(wlon) |
---|
| 925 | else: step = 10. |
---|
| 926 | steplon = step*2. |
---|
| 927 | else: |
---|
| 928 | print "very small domain !" |
---|
| 929 | steplon = 0.5 |
---|
| 930 | step = 0.5 |
---|
[453] | 931 | zecolor ='grey' |
---|
| 932 | zelinewidth = 1 |
---|
[817] | 933 | zelatmax = 80. |
---|
| 934 | if meanlat > 75.: zelatmax = 90. ; step = step/2. ; steplon = steplon*2. |
---|
[453] | 935 | # to show gcm grid: |
---|
| 936 | #zecolor = 'r' |
---|
| 937 | #zelinewidth = 1 |
---|
[647] | 938 | #step = 180./48. |
---|
| 939 | #steplon = 360./64. |
---|
| 940 | #zelatmax = 90. - step/3 |
---|
[817] | 941 | if char not in ["moll","robin"]: |
---|
[760] | 942 | if wlon[1]-wlon[0] < 2.: ## LOCAL MODE |
---|
| 943 | m.drawmeridians(np.r_[-1.:1.:0.05], labels=[0,0,0,1], color=zecolor, linewidth=zelinewidth, fontsize=fontsizemer, fmt='%5.2f') |
---|
| 944 | m.drawparallels(np.r_[-1.:1.:0.05], labels=[1,0,0,0], color=zecolor, linewidth=zelinewidth, fontsize=fontsizemer, fmt='%5.2f') |
---|
| 945 | else: ## GLOBAL OR REGIONAL MODE |
---|
[864] | 946 | m.drawmeridians(np.r_[-360.:360.:steplon], labels=[0,0,0,1], color=zecolor, linewidth=zelinewidth, fontsize=fontsizemer, latmax=zelatmax) |
---|
| 947 | m.drawparallels(np.r_[-180.:180.:step], labels=[1,0,0,0], color=zecolor, linewidth=zelinewidth, fontsize=fontsizemer, latmax=zelatmax) |
---|
[783] | 948 | if back: |
---|
| 949 | if back not in ["coast","sea"]: m.warpimage(marsmap(back),scale=0.75) |
---|
| 950 | elif back == "coast": m.drawcoastlines() |
---|
| 951 | elif back == "sea": m.drawlsmask(land_color='white',ocean_color='aqua') |
---|
[233] | 952 | #if not back: |
---|
| 953 | # if not var: back = "mola" ## if no var: draw mola |
---|
| 954 | # elif typefile in ['mesoapi','meso','geo'] \ |
---|
| 955 | # and proj not in ['merc','lcc','nsper','laea']: back = "molabw" ## if var but meso: draw molabw |
---|
| 956 | # else: pass ## else: draw None |
---|
[180] | 957 | return m |
---|
| 958 | |
---|
[345] | 959 | ## Author: AS |
---|
[232] | 960 | #### test temporaire |
---|
| 961 | def putpoints (map,plot): |
---|
| 962 | #### from http://www.scipy.org/Cookbook/Matplotlib/Maps |
---|
| 963 | # lat/lon coordinates of five cities. |
---|
| 964 | lats = [18.4] |
---|
| 965 | lons = [-134.0] |
---|
| 966 | points=['Olympus Mons'] |
---|
| 967 | # compute the native map projection coordinates for cities. |
---|
| 968 | x,y = map(lons,lats) |
---|
| 969 | # plot filled circles at the locations of the cities. |
---|
| 970 | map.plot(x,y,'bo') |
---|
| 971 | # plot the names of those five cities. |
---|
| 972 | wherept = 0 #1000 #50000 |
---|
| 973 | for name,xpt,ypt in zip(points,x,y): |
---|
| 974 | plot.text(xpt+wherept,ypt+wherept,name) |
---|
| 975 | ## le nom ne s'affiche pas... |
---|
| 976 | return |
---|
| 977 | |
---|
[345] | 978 | ## Author: AS |
---|
[233] | 979 | def calculate_bounds(field,vmin=None,vmax=None): |
---|
| 980 | import numpy as np |
---|
| 981 | from mymath import max,min,mean |
---|
| 982 | ind = np.where(field < 9e+35) |
---|
| 983 | fieldcalc = field[ ind ] # la syntaxe compacte ne marche si field est un tuple |
---|
| 984 | ### |
---|
| 985 | dev = np.std(fieldcalc)*3.0 |
---|
| 986 | ### |
---|
[562] | 987 | if vmin is None: zevmin = mean(fieldcalc) - dev |
---|
[233] | 988 | else: zevmin = vmin |
---|
| 989 | ### |
---|
| 990 | if vmax is None: zevmax = mean(fieldcalc) + dev |
---|
| 991 | else: zevmax = vmax |
---|
| 992 | if vmin == vmax: |
---|
| 993 | zevmin = mean(fieldcalc) - dev ### for continuity |
---|
| 994 | zevmax = mean(fieldcalc) + dev ### for continuity |
---|
| 995 | ### |
---|
| 996 | if zevmin < 0. and min(fieldcalc) > 0.: zevmin = 0. |
---|
[468] | 997 | print "BOUNDS field ", min(fieldcalc), max(fieldcalc), " //// adopted", zevmin, zevmax |
---|
[233] | 998 | return zevmin, zevmax |
---|
[232] | 999 | |
---|
[345] | 1000 | ## Author: AS |
---|
[233] | 1001 | def bounds(what_I_plot,zevmin,zevmax): |
---|
[247] | 1002 | from mymath import max,min,mean |
---|
[233] | 1003 | ### might be convenient to add the missing value in arguments |
---|
[310] | 1004 | #what_I_plot[ what_I_plot < zevmin ] = zevmin#*(1. + 1.e-7) |
---|
| 1005 | if zevmin < 0: what_I_plot[ what_I_plot < zevmin*(1. - 1.e-7) ] = zevmin*(1. - 1.e-7) |
---|
| 1006 | else: what_I_plot[ what_I_plot < zevmin*(1. + 1.e-7) ] = zevmin*(1. + 1.e-7) |
---|
[451] | 1007 | #print "NEW MIN ", min(what_I_plot) |
---|
[233] | 1008 | what_I_plot[ what_I_plot > 9e+35 ] = -9e+35 |
---|
[587] | 1009 | what_I_plot[ what_I_plot > zevmax ] = zevmax*(1. - 1.e-7) |
---|
[451] | 1010 | #print "NEW MAX ", max(what_I_plot) |
---|
[233] | 1011 | return what_I_plot |
---|
| 1012 | |
---|
[345] | 1013 | ## Author: AS |
---|
[241] | 1014 | def nolow(what_I_plot): |
---|
| 1015 | from mymath import max,min |
---|
| 1016 | lim = 0.15*0.5*(abs(max(what_I_plot))+abs(min(what_I_plot))) |
---|
[392] | 1017 | print "NO PLOT BELOW VALUE ", lim |
---|
[241] | 1018 | what_I_plot [ abs(what_I_plot) < lim ] = 1.e40 |
---|
| 1019 | return what_I_plot |
---|
| 1020 | |
---|
[418] | 1021 | |
---|
| 1022 | ## Author : AC |
---|
| 1023 | def hole_bounds(what_I_plot,zevmin,zevmax): |
---|
| 1024 | import numpy as np |
---|
| 1025 | zi=0 |
---|
| 1026 | for i in what_I_plot: |
---|
| 1027 | zj=0 |
---|
| 1028 | for j in i: |
---|
| 1029 | if ((j < zevmin) or (j > zevmax)):what_I_plot[zi,zj]=np.NaN |
---|
| 1030 | zj=zj+1 |
---|
| 1031 | zi=zi+1 |
---|
| 1032 | |
---|
| 1033 | return what_I_plot |
---|
| 1034 | |
---|
[345] | 1035 | ## Author: AS |
---|
[233] | 1036 | def zoomset (wlon,wlat,zoom): |
---|
| 1037 | dlon = abs(wlon[1]-wlon[0])/2. |
---|
| 1038 | dlat = abs(wlat[1]-wlat[0])/2. |
---|
| 1039 | [wlon,wlat] = [ [wlon[0]+zoom*dlon/100.,wlon[1]-zoom*dlon/100.],\ |
---|
| 1040 | [wlat[0]+zoom*dlat/100.,wlat[1]-zoom*dlat/100.] ] |
---|
[392] | 1041 | print "ZOOM %",zoom,wlon,wlat |
---|
[233] | 1042 | return wlon,wlat |
---|
| 1043 | |
---|
[345] | 1044 | ## Author: AS |
---|
[201] | 1045 | def fmtvar (whichvar="def"): |
---|
[204] | 1046 | fmtvar = { \ |
---|
[502] | 1047 | "MIXED": "%.0f",\ |
---|
| 1048 | "UPDRAFT": "%.0f",\ |
---|
| 1049 | "DOWNDRAFT": "%.0f",\ |
---|
[405] | 1050 | "TK": "%.0f",\ |
---|
[637] | 1051 | "T": "%.0f",\ |
---|
[822] | 1052 | "MARS_TI": "%.0f",\ |
---|
| 1053 | "THERMAL_INERTIA": "%.0f",\ |
---|
[516] | 1054 | #"ZMAX_TH": "%.0f",\ |
---|
| 1055 | #"WSTAR": "%.0f",\ |
---|
[425] | 1056 | # Variables from TES ncdf format |
---|
[363] | 1057 | "T_NADIR_DAY": "%.0f",\ |
---|
[376] | 1058 | "T_NADIR_NIT": "%.0f",\ |
---|
[425] | 1059 | # Variables from tes.py ncdf format |
---|
[398] | 1060 | "TEMP_DAY": "%.0f",\ |
---|
| 1061 | "TEMP_NIGHT": "%.0f",\ |
---|
[425] | 1062 | # Variables from MCS and mcs.py ncdf format |
---|
[427] | 1063 | "DTEMP": "%.0f",\ |
---|
| 1064 | "NTEMP": "%.0f",\ |
---|
| 1065 | "DNUMBINTEMP": "%.0f",\ |
---|
| 1066 | "NNUMBINTEMP": "%.0f",\ |
---|
[425] | 1067 | # other stuff |
---|
[405] | 1068 | "TPOT": "%.0f",\ |
---|
[295] | 1069 | "TSURF": "%.0f",\ |
---|
[817] | 1070 | "TSK": "%.0f",\ |
---|
[612] | 1071 | "U_OUT1": "%.0f",\ |
---|
| 1072 | "T_OUT1": "%.0f",\ |
---|
[204] | 1073 | "def": "%.1e",\ |
---|
| 1074 | "PTOT": "%.0f",\ |
---|
[760] | 1075 | "PSFC": "%.1f",\ |
---|
[204] | 1076 | "HGT": "%.1e",\ |
---|
| 1077 | "USTM": "%.2f",\ |
---|
[225] | 1078 | "HFX": "%.0f",\ |
---|
[232] | 1079 | "ICETOT": "%.1e",\ |
---|
[237] | 1080 | "TAU_ICE": "%.2f",\ |
---|
[451] | 1081 | "TAUICE": "%.2f",\ |
---|
[252] | 1082 | "VMR_ICE": "%.1e",\ |
---|
[345] | 1083 | "MTOT": "%.1f",\ |
---|
[405] | 1084 | "ANOMALY": "%.1f",\ |
---|
[771] | 1085 | "W": "%.2f",\ |
---|
[287] | 1086 | "WMAX_TH": "%.1f",\ |
---|
[562] | 1087 | "WSTAR": "%.1f",\ |
---|
[287] | 1088 | "QSURFICE": "%.0f",\ |
---|
[405] | 1089 | "UM": "%.0f",\ |
---|
[490] | 1090 | "WIND": "%.0f",\ |
---|
[612] | 1091 | "UVMET": "%.0f",\ |
---|
| 1092 | "UV": "%.0f",\ |
---|
[295] | 1093 | "ALBBARE": "%.2f",\ |
---|
[317] | 1094 | "TAU": "%.1f",\ |
---|
[382] | 1095 | "CO2": "%.2f",\ |
---|
[701] | 1096 | "ENFACT": "%.1f",\ |
---|
[771] | 1097 | "QDUST": "%.6f",\ |
---|
[345] | 1098 | #### T.N. |
---|
| 1099 | "TEMP": "%.0f",\ |
---|
| 1100 | "VMR_H2OICE": "%.0f",\ |
---|
| 1101 | "VMR_H2OVAP": "%.0f",\ |
---|
| 1102 | "TAUTES": "%.2f",\ |
---|
| 1103 | "TAUTESAP": "%.2f",\ |
---|
| 1104 | |
---|
[204] | 1105 | } |
---|
[518] | 1106 | if "TSURF" in whichvar: whichvar = "TSURF" |
---|
[204] | 1107 | if whichvar not in fmtvar: |
---|
| 1108 | whichvar = "def" |
---|
| 1109 | return fmtvar[whichvar] |
---|
[201] | 1110 | |
---|
[345] | 1111 | ## Author: AS |
---|
[233] | 1112 | #################################################################################################################### |
---|
| 1113 | ### Colorbars http://www.scipy.org/Cookbook/Matplotlib/Show_colormaps?action=AttachFile&do=get&target=colormaps3.png |
---|
[202] | 1114 | def defcolorb (whichone="def"): |
---|
[204] | 1115 | whichcolorb = { \ |
---|
| 1116 | "def": "spectral",\ |
---|
| 1117 | "HGT": "spectral",\ |
---|
[426] | 1118 | "HGT_M": "spectral",\ |
---|
[405] | 1119 | "TK": "gist_heat",\ |
---|
[425] | 1120 | "TPOT": "Paired",\ |
---|
[295] | 1121 | "TSURF": "RdBu_r",\ |
---|
[817] | 1122 | "TSK": "RdBu_r",\ |
---|
[204] | 1123 | "QH2O": "PuBu",\ |
---|
[871] | 1124 | "PSFC": "RdYlBu_r",\ |
---|
[204] | 1125 | "USTM": "YlOrRd",\ |
---|
[490] | 1126 | "WIND": "YlOrRd",\ |
---|
[363] | 1127 | #"T_nadir_nit": "RdBu_r",\ |
---|
| 1128 | #"T_nadir_day": "RdBu_r",\ |
---|
[225] | 1129 | "HFX": "RdYlBu",\ |
---|
[310] | 1130 | "ICETOT": "YlGnBu_r",\ |
---|
[345] | 1131 | #"MTOT": "PuBu",\ |
---|
| 1132 | "CCNQ": "YlOrBr",\ |
---|
| 1133 | "CCNN": "YlOrBr",\ |
---|
| 1134 | "TEMP": "Jet",\ |
---|
[238] | 1135 | "TAU_ICE": "Blues",\ |
---|
[451] | 1136 | "TAUICE": "Blues",\ |
---|
[252] | 1137 | "VMR_ICE": "Blues",\ |
---|
[241] | 1138 | "W": "jet",\ |
---|
[287] | 1139 | "WMAX_TH": "spectral",\ |
---|
[874] | 1140 | "ANOMALY": "gist_ncar",\ |
---|
| 1141 | #"ANOMALY": "RdBu_r",\ |
---|
[287] | 1142 | "QSURFICE": "hot_r",\ |
---|
[295] | 1143 | "ALBBARE": "spectral",\ |
---|
[317] | 1144 | "TAU": "YlOrBr_r",\ |
---|
[382] | 1145 | "CO2": "YlOrBr_r",\ |
---|
[753] | 1146 | "MIXED": "GnBu",\ |
---|
[345] | 1147 | #### T.N. |
---|
[647] | 1148 | "MTOT": "spectral",\ |
---|
[345] | 1149 | "H2O_ICE_S": "RdBu",\ |
---|
| 1150 | "VMR_H2OICE": "PuBu",\ |
---|
| 1151 | "VMR_H2OVAP": "PuBu",\ |
---|
[453] | 1152 | "WATERCAPTAG": "Blues",\ |
---|
[204] | 1153 | } |
---|
[241] | 1154 | #W --> spectral ou jet |
---|
[240] | 1155 | #spectral BrBG RdBu_r |
---|
[392] | 1156 | #print "predefined colorbars" |
---|
[518] | 1157 | if "TSURF" in whichone: whichone = "TSURF" |
---|
[204] | 1158 | if whichone not in whichcolorb: |
---|
| 1159 | whichone = "def" |
---|
| 1160 | return whichcolorb[whichone] |
---|
[202] | 1161 | |
---|
[345] | 1162 | ## Author: AS |
---|
[202] | 1163 | def definecolorvec (whichone="def"): |
---|
| 1164 | whichcolor = { \ |
---|
| 1165 | "def": "black",\ |
---|
| 1166 | "vis": "yellow",\ |
---|
[864] | 1167 | "vishires": "blue",\ |
---|
[202] | 1168 | "molabw": "yellow",\ |
---|
| 1169 | "mola": "black",\ |
---|
| 1170 | "gist_heat": "white",\ |
---|
[865] | 1171 | "hot": "white",\ |
---|
[202] | 1172 | "gist_rainbow": "black",\ |
---|
| 1173 | "spectral": "black",\ |
---|
| 1174 | "gray": "red",\ |
---|
| 1175 | "PuBu": "black",\ |
---|
[721] | 1176 | "titan": "red",\ |
---|
[202] | 1177 | } |
---|
| 1178 | if whichone not in whichcolor: |
---|
| 1179 | whichone = "def" |
---|
| 1180 | return whichcolor[whichone] |
---|
| 1181 | |
---|
[345] | 1182 | ## Author: AS |
---|
[180] | 1183 | def marsmap (whichone="vishires"): |
---|
[233] | 1184 | from os import uname |
---|
| 1185 | mymachine = uname()[1] |
---|
| 1186 | ### not sure about speed-up with this method... looks the same |
---|
[511] | 1187 | if "lmd.jussieu.fr" in mymachine: domain = "/u/aslmd/WWW/maps/" |
---|
| 1188 | elif "aymeric-laptop" in mymachine: domain = "/home/aymeric/Dropbox/Public/" |
---|
| 1189 | else: domain = "http://www.lmd.jussieu.fr/~aslmd/maps/" |
---|
[180] | 1190 | whichlink = { \ |
---|
[233] | 1191 | #"vis": "http://maps.jpl.nasa.gov/pix/mar0kuu2.jpg",\ |
---|
| 1192 | #"vishires": "http://www.lmd.jussieu.fr/~aslmd/maps/MarsMap_2500x1250.jpg",\ |
---|
| 1193 | #"geolocal": "http://dl.dropbox.com/u/11078310/geolocal.jpg",\ |
---|
| 1194 | #"mola": "http://www.lns.cornell.edu/~seb/celestia/mars-mola-2k.jpg",\ |
---|
| 1195 | #"molabw": "http://dl.dropbox.com/u/11078310/MarsElevation_2500x1250.jpg",\ |
---|
[453] | 1196 | "thermalday": domain+"thermalday.jpg",\ |
---|
| 1197 | "thermalnight": domain+"thermalnight.jpg",\ |
---|
| 1198 | "tesalbedo": domain+"tesalbedo.jpg",\ |
---|
[233] | 1199 | "vis": domain+"mar0kuu2.jpg",\ |
---|
| 1200 | "vishires": domain+"MarsMap_2500x1250.jpg",\ |
---|
| 1201 | "geolocal": domain+"geolocal.jpg",\ |
---|
| 1202 | "mola": domain+"mars-mola-2k.jpg",\ |
---|
| 1203 | "molabw": domain+"MarsElevation_2500x1250.jpg",\ |
---|
[238] | 1204 | "clouds": "http://www.johnstonsarchive.net/spaceart/marswcloudmap.jpg",\ |
---|
| 1205 | "jupiter": "http://www.mmedia.is/~bjj/data/jupiter_css/jupiter_css.jpg",\ |
---|
| 1206 | "jupiter_voy": "http://www.mmedia.is/~bjj/data/jupiter/jupiter_vgr2.jpg",\ |
---|
[558] | 1207 | #"bw": domain+"EarthElevation_2500x1250.jpg",\ |
---|
[273] | 1208 | "bw": "http://users.info.unicaen.fr/~karczma/TEACH/InfoGeo/Images/Planets/EarthElevation_2500x1250.jpg",\ |
---|
| 1209 | "contrast": "http://users.info.unicaen.fr/~karczma/TEACH/InfoGeo/Images/Planets/EarthMapAtmos_2500x1250.jpg",\ |
---|
| 1210 | "nice": "http://users.info.unicaen.fr/~karczma/TEACH/InfoGeo/Images/Planets/earthmap1k.jpg",\ |
---|
| 1211 | "blue": "http://eoimages.gsfc.nasa.gov/ve/2430/land_ocean_ice_2048.jpg",\ |
---|
[296] | 1212 | "blueclouds": "http://eoimages.gsfc.nasa.gov/ve/2431/land_ocean_ice_cloud_2048.jpg",\ |
---|
| 1213 | "justclouds": "http://eoimages.gsfc.nasa.gov/ve/2432/cloud_combined_2048.jpg",\ |
---|
[721] | 1214 | "pluto": "http://www.boulder.swri.edu/~buie/pluto/pluto_all.png",\ |
---|
| 1215 | "triton": "http://laps.noaa.gov/albers/sos/neptune/triton/triton_rgb_cyl_www.jpg",\ |
---|
| 1216 | "titan": "http://laps.noaa.gov/albers/sos/saturn/titan/titan_rgb_cyl_www.jpg",\ |
---|
| 1217 | #"titan": "http://laps.noaa.gov/albers/sos/celestia/titan_50.jpg",\ |
---|
| 1218 | "titanuni": "http://maps.jpl.nasa.gov/pix/sat6fss1.jpg",\ |
---|
| 1219 | "venus": "http://laps.noaa.gov/albers/sos/venus/venus4/venus4_rgb_cyl_www.jpg",\ |
---|
| 1220 | "cosmic": "http://laps.noaa.gov/albers/sos/universe/wmap/wmap_rgb_cyl_www.jpg",\ |
---|
[180] | 1221 | } |
---|
[238] | 1222 | ### see http://www.mmedia.is/~bjj/planetary_maps.html |
---|
[180] | 1223 | if whichone not in whichlink: |
---|
| 1224 | print "marsmap: choice not defined... you'll get the default one... " |
---|
| 1225 | whichone = "vishires" |
---|
| 1226 | return whichlink[whichone] |
---|
| 1227 | |
---|
[273] | 1228 | #def earthmap (whichone): |
---|
| 1229 | # if whichone == "contrast": whichlink="http://users.info.unicaen.fr/~karczma/TEACH/InfoGeo/Images/Planets/EarthMapAtmos_2500x1250.jpg" |
---|
| 1230 | # elif whichone == "bw": whichlink="http://users.info.unicaen.fr/~karczma/TEACH/InfoGeo/Images/Planets/EarthElevation_2500x1250.jpg" |
---|
| 1231 | # elif whichone == "nice": whichlink="http://users.info.unicaen.fr/~karczma/TEACH/InfoGeo/Images/Planets/earthmap1k.jpg" |
---|
| 1232 | # return whichlink |
---|
[180] | 1233 | |
---|
[345] | 1234 | ## Author: AS |
---|
[241] | 1235 | def latinterv (area="Whole"): |
---|
| 1236 | list = { \ |
---|
| 1237 | "Europe": [[ 20., 80.],[- 50., 50.]],\ |
---|
| 1238 | "Central_America": [[-10., 40.],[ 230., 300.]],\ |
---|
| 1239 | "Africa": [[-20., 50.],[- 50., 50.]],\ |
---|
[273] | 1240 | "Whole": [[-90., 90.],[-180., 180.]],\ |
---|
| 1241 | "Southern_Hemisphere": [[-90., 60.],[-180., 180.]],\ |
---|
| 1242 | "Northern_Hemisphere": [[-60., 90.],[-180., 180.]],\ |
---|
[871] | 1243 | "Tharsis_alt": [[-30., 60.],[ 190., 350.]],\ |
---|
[241] | 1244 | "Tharsis": [[-30., 60.],[-170.,- 10.]],\ |
---|
| 1245 | "Whole_No_High": [[-60., 60.],[-180., 180.]],\ |
---|
| 1246 | "Chryse": [[-60., 60.],[- 60., 60.]],\ |
---|
| 1247 | "North_Pole": [[ 50., 90.],[-180., 180.]],\ |
---|
| 1248 | "Close_North_Pole": [[ 75., 90.],[-180., 180.]],\ |
---|
| 1249 | "Far_South_Pole": [[-90.,-40.],[-180., 180.]],\ |
---|
| 1250 | "South_Pole": [[-90.,-50.],[-180., 180.]],\ |
---|
| 1251 | "Close_South_Pole": [[-90.,-75.],[-180., 180.]],\ |
---|
[637] | 1252 | "Sirenum_Crater_large": [[-46.,-34.],[-166.,-151.]],\ |
---|
| 1253 | "Sirenum_Crater_small": [[-36.,-26.],[-168.,-156.]],\ |
---|
| 1254 | "Rupes": [[ 72., 90.],[-120.,- 20.]],\ |
---|
[865] | 1255 | "Rupes2": [[ 80., 85.],[- 90.,- 60.]],\ |
---|
[721] | 1256 | "Xanadu": [[-40., 20.],[ 40., 120.]],\ |
---|
[781] | 1257 | "Hyperboreae": [[ 80., 87.],[- 70.,- 10.]],\ |
---|
[871] | 1258 | "VM_alt": [[-15., 0.],[ 270., 300.]],\ |
---|
[241] | 1259 | } |
---|
| 1260 | if area not in list: area = "Whole" |
---|
| 1261 | [olat,olon] = list[area] |
---|
| 1262 | return olon,olat |
---|
| 1263 | |
---|
[345] | 1264 | ## Author: TN |
---|
| 1265 | def separatenames (name): |
---|
| 1266 | from numpy import concatenate |
---|
| 1267 | # look for comas in the input name to separate different names (files, variables,etc ..) |
---|
| 1268 | if name is None: |
---|
| 1269 | names = None |
---|
| 1270 | else: |
---|
| 1271 | names = [] |
---|
| 1272 | stop = 0 |
---|
| 1273 | currentname = name |
---|
| 1274 | while stop == 0: |
---|
| 1275 | indexvir = currentname.find(',') |
---|
| 1276 | if indexvir == -1: |
---|
| 1277 | stop = 1 |
---|
| 1278 | name1 = currentname |
---|
| 1279 | else: |
---|
| 1280 | name1 = currentname[0:indexvir] |
---|
| 1281 | names = concatenate((names,[name1])) |
---|
| 1282 | currentname = currentname[indexvir+1:len(currentname)] |
---|
| 1283 | return names |
---|
| 1284 | |
---|
| 1285 | |
---|
| 1286 | ## Author: TN |
---|
| 1287 | def readslices(saxis): |
---|
| 1288 | from numpy import empty |
---|
| 1289 | if saxis == None: |
---|
| 1290 | zesaxis = None |
---|
| 1291 | else: |
---|
| 1292 | zesaxis = empty((len(saxis),2)) |
---|
| 1293 | for i in range(len(saxis)): |
---|
| 1294 | a = separatenames(saxis[i]) |
---|
| 1295 | if len(a) == 1: |
---|
| 1296 | zesaxis[i,:] = float(a[0]) |
---|
| 1297 | else: |
---|
| 1298 | zesaxis[i,0] = float(a[0]) |
---|
| 1299 | zesaxis[i,1] = float(a[1]) |
---|
| 1300 | |
---|
| 1301 | return zesaxis |
---|
| 1302 | |
---|
[568] | 1303 | ## Author: TN |
---|
| 1304 | def readdata(data,datatype,coord1,coord2): |
---|
| 1305 | ## Read sparse data |
---|
| 1306 | from numpy import empty |
---|
[572] | 1307 | if datatype == 'txt': |
---|
[568] | 1308 | if len(data[coord1].shape) == 1: |
---|
| 1309 | return data[coord1][:] |
---|
| 1310 | elif len(data[coord1].shape) == 2: |
---|
| 1311 | return data[coord1][:,int(coord2)-1] |
---|
| 1312 | else: |
---|
| 1313 | errormess('error in readdata') |
---|
[572] | 1314 | elif datatype == 'sav': |
---|
[568] | 1315 | return data[coord1][coord2] |
---|
| 1316 | else: |
---|
| 1317 | errormess(datatype+' type is not supported!') |
---|
| 1318 | |
---|
| 1319 | |
---|
[399] | 1320 | ## Author: AS |
---|
[475] | 1321 | def bidimfind(lon2d,lat2d,vlon,vlat,file=None): |
---|
[399] | 1322 | import numpy as np |
---|
[475] | 1323 | import matplotlib.pyplot as mpl |
---|
[399] | 1324 | if vlat is None: array = (lon2d - vlon)**2 |
---|
| 1325 | elif vlon is None: array = (lat2d - vlat)**2 |
---|
| 1326 | else: array = (lon2d - vlon)**2 + (lat2d - vlat)**2 |
---|
| 1327 | idy,idx = np.unravel_index( np.argmin(array), lon2d.shape ) |
---|
| 1328 | if vlon is not None: |
---|
[475] | 1329 | if (np.abs(lon2d[idy,idx]-vlon)) > 5: errormess("longitude not found ",printvar=lon2d) |
---|
[399] | 1330 | if vlat is not None: |
---|
[475] | 1331 | if (np.abs(lat2d[idy,idx]-vlat)) > 5: errormess("latitude not found ",printvar=lat2d) |
---|
| 1332 | if file is not None: |
---|
| 1333 | print idx,idy,lon2d[idy,idx],vlon |
---|
| 1334 | print idx,idy,lat2d[idy,idx],vlat |
---|
| 1335 | var = file.variables["HGT"][:,:,:] |
---|
[489] | 1336 | mpl.contourf(var[0,:,:],30,cmap = mpl.get_cmap(name="Greys_r") ) ; mpl.axis('off') ; mpl.plot(idx,idy,'mx',mew=4.0,ms=20.0) |
---|
[475] | 1337 | mpl.show() |
---|
| 1338 | return idy,idx |
---|
[399] | 1339 | |
---|
[345] | 1340 | ## Author: TN |
---|
[399] | 1341 | def getsindex(saxis,index,axis): |
---|
[345] | 1342 | # input : all the desired slices and the good index |
---|
| 1343 | # output : all indexes to be taken into account for reducing field |
---|
| 1344 | import numpy as np |
---|
[349] | 1345 | if ( np.array(axis).ndim == 2): |
---|
| 1346 | axis = axis[:,0] |
---|
[345] | 1347 | if saxis is None: |
---|
| 1348 | zeindex = None |
---|
| 1349 | else: |
---|
| 1350 | aaa = int(np.argmin(abs(saxis[index,0] - axis))) |
---|
| 1351 | bbb = int(np.argmin(abs(saxis[index,1] - axis))) |
---|
| 1352 | [imin,imax] = np.sort(np.array([aaa,bbb])) |
---|
| 1353 | zeindex = np.array(range(imax-imin+1))+imin |
---|
| 1354 | # because -180 and 180 are the same point in longitude, |
---|
| 1355 | # we get rid of one for averaging purposes. |
---|
| 1356 | if axis[imin] == -180 and axis[imax] == 180: |
---|
| 1357 | zeindex = zeindex[0:len(zeindex)-1] |
---|
[392] | 1358 | print "INFO: whole longitude averaging asked, so last point is not taken into account." |
---|
[345] | 1359 | return zeindex |
---|
| 1360 | |
---|
| 1361 | ## Author: TN |
---|
[763] | 1362 | def define_axis(lon,lat,vert,time,indexlon,indexlat,indexvert,indextime,what_I_plot,dim0,vertmode,redope): |
---|
[345] | 1363 | # Purpose of define_axis is to find x and y axis scales in a smart way |
---|
| 1364 | # x axis priority: 1/time 2/lon 3/lat 4/vertical |
---|
| 1365 | # To be improved !!!... |
---|
| 1366 | from numpy import array,swapaxes |
---|
| 1367 | x = None |
---|
| 1368 | y = None |
---|
| 1369 | count = 0 |
---|
| 1370 | what_I_plot = array(what_I_plot) |
---|
| 1371 | shape = what_I_plot.shape |
---|
[477] | 1372 | if indextime is None and len(time) > 1: |
---|
[392] | 1373 | print "AXIS is time" |
---|
[345] | 1374 | x = time |
---|
| 1375 | count = count+1 |
---|
[763] | 1376 | if indexlon is None and len(lon) > 1 and redope not in ['edge_x1','edge_x2']: |
---|
[392] | 1377 | print "AXIS is lon" |
---|
[345] | 1378 | if count == 0: x = lon |
---|
| 1379 | else: y = lon |
---|
| 1380 | count = count+1 |
---|
[763] | 1381 | if indexlat is None and len(lat) > 1 and redope not in ['edge_y1','edge_y2']: |
---|
[392] | 1382 | print "AXIS is lat" |
---|
[345] | 1383 | if count == 0: x = lat |
---|
| 1384 | else: y = lat |
---|
| 1385 | count = count+1 |
---|
[817] | 1386 | if indexvert is None and len(vert) > 1 and ((dim0 == 4) or (y is None)): |
---|
[392] | 1387 | print "AXIS is vert" |
---|
[345] | 1388 | if vertmode == 0: # vertical axis is as is (GCM grid) |
---|
| 1389 | if count == 0: x=range(len(vert)) |
---|
| 1390 | else: y=range(len(vert)) |
---|
| 1391 | count = count+1 |
---|
| 1392 | else: # vertical axis is in kms |
---|
| 1393 | if count == 0: x = vert |
---|
| 1394 | else: y = vert |
---|
| 1395 | count = count+1 |
---|
| 1396 | x = array(x) |
---|
| 1397 | y = array(y) |
---|
[468] | 1398 | print "CHECK SHAPE: what_I_plot, x, y", what_I_plot.shape, x.shape, y.shape |
---|
[345] | 1399 | if len(shape) == 1: |
---|
[562] | 1400 | if shape[0] != len(x): print "WARNING: shape[0] != len(x). Correcting." ; what_I_plot = what_I_plot[0:len(x)] |
---|
[579] | 1401 | if len(y.shape) > 0: y = () |
---|
[345] | 1402 | elif len(shape) == 2: |
---|
[562] | 1403 | if shape[1] == len(y) and shape[0] == len(x) and shape[0] != shape[1]: |
---|
| 1404 | print "INFO: swapaxes: ",what_I_plot.shape,shape ; what_I_plot = swapaxes(what_I_plot,0,1) |
---|
| 1405 | else: |
---|
| 1406 | if shape[0] != len(y): print "WARNING: shape[0] != len(y). Correcting." ; what_I_plot = what_I_plot[0:len(y),:] |
---|
| 1407 | elif shape[1] != len(x): print "WARNING: shape[1] != len(x). Correcting." ; what_I_plot = what_I_plot[:,0:len(x)] |
---|
| 1408 | elif len(shape) == 3: |
---|
| 1409 | if vertmode < 0: print "not supported. must check array dimensions at some point. not difficult to implement though." |
---|
[345] | 1410 | return what_I_plot,x,y |
---|
[349] | 1411 | |
---|
[763] | 1412 | # Author: TN + AS + AC |
---|
| 1413 | def determineplot(slon, slat, svert, stime, redope): |
---|
[349] | 1414 | nlon = 1 # number of longitudinal slices -- 1 is None |
---|
| 1415 | nlat = 1 |
---|
| 1416 | nvert = 1 |
---|
| 1417 | ntime = 1 |
---|
| 1418 | nslices = 1 |
---|
| 1419 | if slon is not None: |
---|
[770] | 1420 | length=len(slon[:,0]) |
---|
[763] | 1421 | nslices = nslices*length |
---|
[349] | 1422 | nlon = len(slon) |
---|
| 1423 | if slat is not None: |
---|
[770] | 1424 | length=len(slat[:,0]) |
---|
[763] | 1425 | nslices = nslices*length |
---|
[349] | 1426 | nlat = len(slat) |
---|
| 1427 | if svert is not None: |
---|
[771] | 1428 | length=len(svert[:,0]) |
---|
[763] | 1429 | nslices = nslices*length |
---|
[349] | 1430 | nvert = len(svert) |
---|
| 1431 | if stime is not None: |
---|
[770] | 1432 | length=len(stime[:,0]) |
---|
[763] | 1433 | nslices = nslices*length |
---|
[349] | 1434 | ntime = len(stime) |
---|
| 1435 | #else: |
---|
| 1436 | # nslices = 2 |
---|
| 1437 | mapmode = 0 |
---|
[763] | 1438 | if slon is None and slat is None and redope not in ['edge_x1','edge_x2','edge_y1','edge_y2']: |
---|
[349] | 1439 | mapmode = 1 # in this case we plot a map, with the given projection |
---|
| 1440 | return nlon, nlat, nvert, ntime, mapmode, nslices |
---|
[440] | 1441 | |
---|
[638] | 1442 | ## Author : AS |
---|
| 1443 | def maplatlon( lon,lat,field,\ |
---|
| 1444 | proj="cyl",colorb="jet",ndiv=10,zeback="molabw",trans=0.6,title="",\ |
---|
| 1445 | vecx=None,vecy=None,stride=2 ): |
---|
| 1446 | ### an easy way to map a field over lat/lon grid |
---|
| 1447 | import numpy as np |
---|
| 1448 | import matplotlib.pyplot as mpl |
---|
| 1449 | from matplotlib.cm import get_cmap |
---|
| 1450 | ## get lon and lat in 2D version. get lat/lon intervals |
---|
| 1451 | numdim = len(np.array(lon).shape) |
---|
| 1452 | if numdim == 2: [lon2d,lat2d] = [lon,lat] |
---|
| 1453 | elif numdim == 1: [lon2d,lat2d] = np.meshgrid(lon,lat) |
---|
| 1454 | else: errormess("lon and lat arrays must be 1D or 2D") |
---|
[796] | 1455 | #[wlon,wlat] = latinterv() |
---|
| 1456 | [wlon,wlat] = simplinterv(lon2d,lat2d) |
---|
[638] | 1457 | ## define projection and background. define x and y given the projection |
---|
| 1458 | m = define_proj(proj,wlon,wlat,back=zeback,blat=None,blon=None) |
---|
| 1459 | x, y = m(lon2d, lat2d) |
---|
| 1460 | ## define field. bound field. |
---|
| 1461 | what_I_plot = np.transpose(field) |
---|
| 1462 | zevmin, zevmax = calculate_bounds(what_I_plot) ## vmin=min(what_I_plot_frame), vmax=max(what_I_plot_frame)) |
---|
| 1463 | what_I_plot = bounds(what_I_plot,zevmin,zevmax) |
---|
| 1464 | ## define contour field levels. define color palette |
---|
| 1465 | ticks = ndiv + 1 |
---|
| 1466 | zelevels = np.linspace(zevmin,zevmax,ticks) |
---|
| 1467 | palette = get_cmap(name=colorb) |
---|
| 1468 | ## contour field |
---|
| 1469 | m.contourf( x, y, what_I_plot, zelevels, cmap = palette, alpha = trans ) |
---|
| 1470 | ## draw colorbar |
---|
| 1471 | if proj in ['moll','cyl']: zeorientation="horizontal" ; zepad = 0.07 |
---|
| 1472 | else: zeorientation="vertical" ; zepad = 0.03 |
---|
| 1473 | #daformat = fmtvar(fvar.upper()) |
---|
| 1474 | daformat = "%.0f" |
---|
| 1475 | zecb = mpl.colorbar( fraction=0.05,pad=zepad,format=daformat,orientation=zeorientation,\ |
---|
| 1476 | ticks=np.linspace(zevmin,zevmax,num=min([ticks/2+1,21])),extend='neither',spacing='proportional' ) |
---|
| 1477 | ## give a title |
---|
| 1478 | if zeorientation == "horizontal": zecb.ax.set_xlabel(title) |
---|
| 1479 | else: ptitle(title) |
---|
| 1480 | ## draw vector |
---|
| 1481 | if vecx is not None and vecy is not None: |
---|
| 1482 | [vecx_frame,vecy_frame] = m.rotate_vector( np.transpose(vecx), np.transpose(vecy), lon2d, lat2d ) ## for metwinds |
---|
| 1483 | vectorfield(vecx_frame, vecy_frame, x, y, stride=stride, csmooth=2,\ |
---|
| 1484 | scale=30., factor=500., color=definecolorvec(colorb), key=True) |
---|
| 1485 | ## scale regle la reference du vecteur. factor regle toutes les longueurs (dont la reference). l'AUGMENTER pour raccourcir les vecteurs. |
---|
| 1486 | return |
---|
[754] | 1487 | ## Author : AC |
---|
| 1488 | ## Handles calls to specific computations (e.g. wind norm, enrichment factor...) |
---|
[763] | 1489 | def select_getfield(zvarname=None,znc=None,ztypefile=None,mode=None,ztsat=None,ylon=None,ylat=None,yalt=None,ytime=None,analysis=None): |
---|
[754] | 1490 | from mymath import get_tsat |
---|
| 1491 | |
---|
| 1492 | ## Specific variables are described here: |
---|
| 1493 | # for the mesoscale: |
---|
[763] | 1494 | specificname_meso = ['UV','uv','uvmet','slopexy','SLOPEXY','deltat','DELTAT','hodograph','tk','hodograph_2'] |
---|
[754] | 1495 | # for the gcm: |
---|
| 1496 | specificname_gcm = ['enfact'] |
---|
| 1497 | |
---|
| 1498 | ## Check for variable in file: |
---|
| 1499 | if mode == 'check': |
---|
| 1500 | varname = zvarname |
---|
| 1501 | varinfile=znc.variables.keys() |
---|
| 1502 | logical_novarname = zvarname not in znc.variables |
---|
| 1503 | logical_nospecificname_meso = not ((ztypefile in ['meso']) and (zvarname in specificname_meso)) |
---|
| 1504 | logical_nospecificname_gcm = not ((ztypefile in ['gcm']) and (zvarname in specificname_gcm)) |
---|
| 1505 | if ( logical_novarname and logical_nospecificname_meso and logical_nospecificname_gcm ): |
---|
| 1506 | if len(varinfile) == 1: varname = varinfile[0] |
---|
| 1507 | else: varname = False |
---|
| 1508 | ## Return the variable name: |
---|
| 1509 | return varname |
---|
| 1510 | |
---|
| 1511 | ## Get the corresponding variable: |
---|
| 1512 | if mode == 'getvar': |
---|
[763] | 1513 | plot_x = None ; plot_y = None ; |
---|
[754] | 1514 | ### ----------- 1. saturation temperature |
---|
| 1515 | if zvarname in ["temp","t","T_nadir_nit","T_nadir_day","temp_day","temp_night"] and ztsat: |
---|
| 1516 | tt=getfield(znc,zvarname) ; print "computing Tsat-T, I ASSUME Z-AXIS IS PRESSURE" |
---|
| 1517 | if type(tt).__name__=='MaskedArray': tt.set_fill_value([np.NaN]) ; tinput=tt.filled() |
---|
| 1518 | else: tinput=tt |
---|
| 1519 | all_var=get_tsat(yalt,tinput,zlon=ylon,zlat=ylat,zalt=yalt,ztime=ytime) |
---|
| 1520 | ### ----------- 2. wind amplitude |
---|
| 1521 | elif ((zvarname in ['UV','uv','uvmet']) and (ztypefile in ['meso']) and (zvarname not in znc.variables)): |
---|
[763] | 1522 | all_var=windamplitude(znc,'amplitude') |
---|
| 1523 | elif ((zvarname in ['hodograph','hodograph_2']) and (ztypefile in ['meso']) and (zvarname not in znc.variables)): |
---|
| 1524 | plot_x, plot_y = windamplitude(znc,zvarname) |
---|
| 1525 | if plot_x is not None: all_var=plot_x # dummy |
---|
| 1526 | else: all_var=plot_y ; plot_x = None ; plot_y = None # Hodograph type 2 is not 'xy' mode |
---|
[754] | 1527 | elif ((zvarname in ['slopexy','SLOPEXY']) and (ztypefile in ['meso']) and (zvarname not in znc.variables)): |
---|
| 1528 | all_var=slopeamplitude(znc) |
---|
| 1529 | ### ------------ 3. Near surface instability |
---|
| 1530 | elif ((zvarname in ['DELTAT','deltat']) and (ztypefile in ['meso']) and (zvarname not in znc.variables)): |
---|
| 1531 | all_var=deltat0t1(znc) |
---|
| 1532 | ### ------------ 4. Enrichment factor |
---|
| 1533 | elif ((ztypefile in ['gcm']) and (zvarname in ['enfact'])): |
---|
| 1534 | all_var=enrichment_factor(znc,ylon,ylat,ytime) |
---|
[763] | 1535 | ### ------------ 5. teta -> temp |
---|
| 1536 | elif ((ztypefile in ['meso']) and (zvarname in ['tk']) and ('tk' not in znc.variables.keys())): |
---|
| 1537 | all_var=teta_to_tk(znc) |
---|
[754] | 1538 | else: |
---|
| 1539 | ### ----------- 999. Normal case |
---|
| 1540 | all_var = getfield(znc,zvarname) |
---|
[763] | 1541 | if analysis is not None: |
---|
| 1542 | if analysis in ['histo','density','histodensity']: plot_y=all_var ; plot_x = plot_y |
---|
| 1543 | elif analysis == 'fft': plot_y, plot_x = spectrum(all_var,ytime,yalt,ylat,ylon) ; all_var = plot_y |
---|
| 1544 | return all_var, plot_x, plot_y |
---|
| 1545 | |
---|
| 1546 | # Author : A.C |
---|
| 1547 | # FFT is computed before reducefield voluntarily, because we dont want to compute |
---|
| 1548 | # ffts on averaged fields (which would kill all waves). Instead, we take the fft everywhere |
---|
| 1549 | # (which is not efficient but it is still ok) and then, make the average (if the user wants to) |
---|
| 1550 | def spectrum(var,time,vert,lat,lon): |
---|
| 1551 | import numpy as np |
---|
| 1552 | fft=np.fft.fft(var,axis=1) |
---|
| 1553 | N=len(vert) |
---|
| 1554 | step=(vert[1]-vert[0])*1000. |
---|
| 1555 | print "step is: ",step |
---|
| 1556 | fftfreq=np.fft.fftfreq(N,d=step) |
---|
| 1557 | fftfreq=np.fft.fftshift(fftfreq) # spatial FFT => this is the wavenumber |
---|
| 1558 | fft=np.fft.fftshift(fft) |
---|
| 1559 | fftfreq = 1./fftfreq # => wavelength (div by 0 expected, don't panic) |
---|
| 1560 | fft=np.abs(fft) # => amplitude spectrum |
---|
| 1561 | # fft=np.abs(fft)**2 # => power spectrum |
---|
| 1562 | return fft,fftfreq |
---|
| 1563 | |
---|
| 1564 | # Author : A.C. |
---|
| 1565 | # Computes temperature from potential temperature for mesoscale files, without the need to use API, i.e. using natural vertical grid |
---|
| 1566 | def teta_to_tk(nc): |
---|
| 1567 | import numpy as np |
---|
| 1568 | varinfile = nc.variables.keys() |
---|
| 1569 | p0=610. |
---|
| 1570 | t0=220. |
---|
| 1571 | r_cp=1./3.89419 |
---|
| 1572 | if "T" in varinfile: zteta=getfield(nc,'T') |
---|
| 1573 | else: errormess("you need T in your file.") |
---|
| 1574 | if "PTOT" in varinfile: zptot=getfield(nc,'PTOT') |
---|
| 1575 | else: errormess("you need PTOT in your file.") |
---|
| 1576 | zt=(zteta+220.)*(zptot/p0)**(r_cp) |
---|
| 1577 | return zt |
---|
[792] | 1578 | |
---|
| 1579 | # Author : A.C. |
---|
| 1580 | # Find the lon and lat index of the dust devil with the largest pressure gradient |
---|
| 1581 | # Steps : |
---|
| 1582 | # 1/ convert the chosen PSFC frame to an image of the PSFC anomaly with respect to the mean |
---|
| 1583 | # 2/ apply the Sobel operator |
---|
| 1584 | # (The Sobel operator performs a 2-D spatial gradient measurement on an image and so emphasizes regions of high spatial frequency that correspond to edges.) |
---|
| 1585 | # 3/ find the maximum of the resulting field |
---|
| 1586 | # 4/ find the points in a 5 pixel radius around the maximum for which the value of the Sobel transform is greater than half the maximum |
---|
| 1587 | # 5/ define a slab of points encompassing the above selected points, including the potential points 'inside' them (if the above points are a hollow circle for example) |
---|
| 1588 | # 6/ in this slab, find the point at which the surface pressure is minimum |
---|
| 1589 | def find_devil(nc,indextime): |
---|
| 1590 | import numpy as np |
---|
| 1591 | from scipy import ndimage |
---|
| 1592 | from mymath import array2image,image2array |
---|
| 1593 | |
---|
| 1594 | varinfile = nc.variables.keys() |
---|
| 1595 | if "PSFC" not in varinfile: errormess("You need PSFC in your file to find dust devils") |
---|
| 1596 | else: psfc_full=getfield(nc,'PSFC') |
---|
| 1597 | psfc,error=reducefield( psfc_full, d4=indextime) |
---|
| 1598 | psfcim=array2image(1000.*(psfc-psfc.mean())) |
---|
| 1599 | sx = ndimage.sobel(psfcim, axis=0, mode='constant') ; sy = ndimage.sobel(psfcim, axis=1, mode='constant') |
---|
| 1600 | sob = np.hypot(sx, sy) |
---|
| 1601 | zemax=np.max(sob) |
---|
| 1602 | goodvalues = sob[sob >= zemax/2] |
---|
| 1603 | ix = np.in1d(sob.ravel(), goodvalues).reshape(sob.shape) |
---|
| 1604 | idxs,idys=np.where(ix) |
---|
| 1605 | maxvalue = sob[sob == zemax] |
---|
| 1606 | ixmax = np.in1d(sob.ravel(), maxvalue[0]).reshape(sob.shape) |
---|
| 1607 | idxmax,idymax=np.where(ixmax) |
---|
| 1608 | valok=[] |
---|
| 1609 | for i in np.arange(len(idxs)): |
---|
| 1610 | a=np.sqrt((idxmax-idxs[i])**2 + (idymax-idys[i])**2) |
---|
| 1611 | if 0 < a <= 5.*np.sqrt(2.): valok.append(goodvalues[i]) |
---|
| 1612 | ix = np.in1d(sob.ravel(), valok).reshape(sob.shape) |
---|
| 1613 | idxs,idys=np.where(ix) |
---|
| 1614 | hyperslab=psfc[np.min(idxs):np.max(idxs),np.min(idys):np.max(idys)] |
---|
| 1615 | idxsub,idysub=np.where(hyperslab==hyperslab.min()) |
---|
| 1616 | idx=idxsub[0]+np.min(idxs) ; idy=idysub[0]+np.min(idys) |
---|
| 1617 | return np.int(idx),np.int(idy) |
---|