[345] | 1 | ## Author: AS |
---|
[252] | 2 | def errormess(text,printvar=None): |
---|
[233] | 3 | print text |
---|
[399] | 4 | if printvar is not None: print printvar |
---|
[233] | 5 | exit() |
---|
| 6 | return |
---|
| 7 | |
---|
[345] | 8 | ## Author: AS |
---|
[349] | 9 | def adjust_length (tab, zelen): |
---|
| 10 | from numpy import ones |
---|
| 11 | if tab is None: |
---|
| 12 | outtab = ones(zelen) * -999999 |
---|
| 13 | else: |
---|
| 14 | if zelen != len(tab): |
---|
| 15 | print "not enough or too much values... setting same values all variables" |
---|
| 16 | outtab = ones(zelen) * tab[0] |
---|
| 17 | else: |
---|
| 18 | outtab = tab |
---|
| 19 | return outtab |
---|
| 20 | |
---|
| 21 | ## Author: AS |
---|
[468] | 22 | def getname(var=False,var2=False,winds=False,anomaly=False): |
---|
[252] | 23 | if var and winds: basename = var + '_UV' |
---|
| 24 | elif var: basename = var |
---|
| 25 | elif winds: basename = 'UV' |
---|
| 26 | else: errormess("please set at least winds or var",printvar=nc.variables) |
---|
| 27 | if anomaly: basename = 'd' + basename |
---|
[468] | 28 | if var2: basename = basename + '_' + var2 |
---|
[252] | 29 | return basename |
---|
| 30 | |
---|
[763] | 31 | ## Author: AS + AC |
---|
| 32 | def localtime(time,lon,nc): # lon is the mean longitude of the plot, not of the domain. central lon of domain is taken from cen_lon |
---|
| 33 | import numpy as np |
---|
| 34 | dt_hour=1. |
---|
| 35 | start=0. |
---|
| 36 | if lon is not None: |
---|
| 37 | if lon[0,1]!=lon[0,0]: mean_lon_plot = 0.5*(lon[0,1]-lon[0,0]) |
---|
| 38 | else: mean_lon_plot=lon[0,0] |
---|
| 39 | elif hasattr(nc, 'CEN_LON'): mean_lon_plot=getattr(nc, 'CEN_LON') |
---|
| 40 | else: mean_lon_plot=0. |
---|
| 41 | if hasattr(nc,'TITLE'): |
---|
| 42 | title=getattr(nc, 'TITLE') |
---|
| 43 | if hasattr(nc,'DT') and 'MRAMS' in title: dt_hour=getattr(nc, 'DT')/60. #LMD MMM is 1 output/hour (and not 1 output/timestep) |
---|
| 44 | # MRAMS is 1 output/timestep, unless stride is added in ic.py |
---|
| 45 | if hasattr(nc,'START_DATE'): |
---|
| 46 | start_date=getattr(nc, 'START_DATE') |
---|
| 47 | start_hour=np.float(start_date[11:13]) |
---|
| 48 | start_minute=np.float(start_date[14:16])/60. |
---|
| 49 | start=start_hour+start_minute # start is the local time of simu at longitude 0 |
---|
| 50 | ltst = start + time*dt_hour + mean_lon_plot / 15. |
---|
| 51 | else: ltst = time*dt_hour + mean_lon_plot / 15. |
---|
| 52 | else: ltst = time*dt_hour + mean_lon_plot / 15. |
---|
[252] | 53 | ltst = int (ltst * 10) / 10. |
---|
| 54 | ltst = ltst % 24 |
---|
| 55 | return ltst |
---|
| 56 | |
---|
[569] | 57 | ## Author: AC |
---|
| 58 | def check_localtime(time): |
---|
| 59 | a=-1 |
---|
[763] | 60 | print time |
---|
[569] | 61 | for i in range(len(time)-1): |
---|
[583] | 62 | if (time[i] > time[i+1]): a=i |
---|
| 63 | if a >= 0 and a < (len(time)-1)/2.: |
---|
[569] | 64 | print "Sorry, time axis is not regular." |
---|
| 65 | print "Contourf needs regular axis... recasting" |
---|
| 66 | for i in range(a+1): |
---|
| 67 | time[i]=time[i]-24. |
---|
[583] | 68 | if a >= 0 and a >= (len(time)-1)/2.: |
---|
| 69 | print "Sorry, time axis is not regular." |
---|
| 70 | print "Contourf needs regular axis... recasting" |
---|
| 71 | for i in range((len(time)-1) - a): |
---|
| 72 | time[a+1+i]=time[a+1+i]+24. |
---|
[569] | 73 | return time |
---|
| 74 | |
---|
[525] | 75 | ## Author: AS, AC, JL |
---|
[233] | 76 | def whatkindfile (nc): |
---|
[647] | 77 | typefile = 'gcm' # default |
---|
[429] | 78 | if 'controle' in nc.variables: typefile = 'gcm' |
---|
| 79 | elif 'phisinit' in nc.variables: typefile = 'gcm' |
---|
[721] | 80 | elif 'phis' in nc.variables: typefile = 'gcm' |
---|
[525] | 81 | elif 'time_counter' in nc.variables: typefile = 'earthgcm' |
---|
[548] | 82 | elif hasattr(nc,'START_DATE'): typefile = 'meso' |
---|
[429] | 83 | elif 'HGT_M' in nc.variables: typefile = 'geo' |
---|
[558] | 84 | elif hasattr(nc,'institution'): |
---|
| 85 | if "European Centre" in getattr(nc,'institution'): typefile = 'ecmwf' |
---|
[233] | 86 | return typefile |
---|
| 87 | |
---|
[345] | 88 | ## Author: AS |
---|
[233] | 89 | def getfield (nc,var): |
---|
| 90 | ## this allows to get much faster (than simply referring to nc.variables[var]) |
---|
[395] | 91 | import numpy as np |
---|
[233] | 92 | dimension = len(nc.variables[var].dimensions) |
---|
[392] | 93 | #print " Opening variable with", dimension, "dimensions ..." |
---|
[233] | 94 | if dimension == 2: field = nc.variables[var][:,:] |
---|
| 95 | elif dimension == 3: field = nc.variables[var][:,:,:] |
---|
| 96 | elif dimension == 4: field = nc.variables[var][:,:,:,:] |
---|
[494] | 97 | elif dimension == 1: field = nc.variables[var][:] |
---|
[395] | 98 | # if there are NaNs in the ncdf, they should be loaded as a masked array which will be |
---|
| 99 | # recasted as a regular array later in reducefield |
---|
| 100 | if (np.isnan(np.sum(field)) and (type(field).__name__ not in 'MaskedArray')): |
---|
| 101 | print "Warning: netcdf as nan values but is not loaded as a Masked Array." |
---|
| 102 | print "recasting array type" |
---|
| 103 | out=np.ma.masked_invalid(field) |
---|
| 104 | out.set_fill_value([np.NaN]) |
---|
| 105 | else: |
---|
[464] | 106 | # missing values from zrecast or hrecast are -1e-33 |
---|
[469] | 107 | masked=np.ma.masked_where(field < -1e30,field) |
---|
[578] | 108 | masked2=np.ma.masked_where(field > 1e35,field) |
---|
| 109 | masked.set_fill_value([np.NaN]) ; masked2.set_fill_value([np.NaN]) |
---|
| 110 | mask = np.ma.getmask(masked) ; mask2 = np.ma.getmask(masked2) |
---|
| 111 | if (True in np.array(mask)): |
---|
| 112 | out=masked |
---|
| 113 | print "Masked array... Missing value is NaN" |
---|
| 114 | elif (True in np.array(mask2)): |
---|
| 115 | out=masked2 |
---|
| 116 | print "Masked array... Missing value is NaN" |
---|
| 117 | # else: |
---|
| 118 | # # missing values from api are 1e36 |
---|
| 119 | # masked=np.ma.masked_where(field > 1e35,field) |
---|
| 120 | # masked.set_fill_value([np.NaN]) |
---|
| 121 | # mask = np.ma.getmask(masked) |
---|
| 122 | # if (True in np.array(mask)):out=masked |
---|
| 123 | # else:out=field |
---|
[763] | 124 | else: |
---|
| 125 | # # missing values from MRAMS files are 0.100E+32 |
---|
| 126 | masked=np.ma.masked_where(field > 1e30,field) |
---|
| 127 | masked.set_fill_value([np.NaN]) |
---|
| 128 | mask = np.ma.getmask(masked) |
---|
| 129 | if (True in np.array(mask)):out=masked |
---|
| 130 | else:out=field |
---|
| 131 | # else:out=field |
---|
[395] | 132 | return out |
---|
[233] | 133 | |
---|
[571] | 134 | ## Author: AC |
---|
[763] | 135 | # Compute the norm of the winds or return an hodograph |
---|
[612] | 136 | # The corresponding variable to call is UV or uvmet (to use api) |
---|
[763] | 137 | def windamplitude (nc,mode): |
---|
[581] | 138 | import numpy as np |
---|
[571] | 139 | varinfile = nc.variables.keys() |
---|
| 140 | if "U" in varinfile: zu=getfield(nc,'U') |
---|
| 141 | elif "Um" in varinfile: zu=getfield(nc,'Um') |
---|
[763] | 142 | else: errormess("you need slopex or U or Um in your file.") |
---|
[571] | 143 | if "V" in varinfile: zv=getfield(nc,'V') |
---|
| 144 | elif "Vm" in varinfile: zv=getfield(nc,'Vm') |
---|
[763] | 145 | else: errormess("you need V or Vm in your file.") |
---|
[571] | 146 | znt,znz,zny,znx = np.array(zu).shape |
---|
[763] | 147 | if hasattr(nc,'WEST-EAST_PATCH_END_UNSTAG'):znx=getattr(nc, 'WEST-EAST_PATCH_END_UNSTAG') |
---|
[571] | 148 | zuint = np.zeros([znt,znz,zny,znx]) |
---|
| 149 | zvint = np.zeros([znt,znz,zny,znx]) |
---|
| 150 | if "U" in varinfile: |
---|
[763] | 151 | if hasattr(nc,'SOUTH-NORTH_PATCH_END_STAG'): zny_stag=getattr(nc, 'SOUTH-NORTH_PATCH_END_STAG') |
---|
| 152 | if hasattr(nc,'WEST-EAST_PATCH_END_STAG'): znx_stag=getattr(nc, 'WEST-EAST_PATCH_END_STAG') |
---|
| 153 | if zny_stag == zny: zvint=zv |
---|
| 154 | else: |
---|
| 155 | for yy in np.arange(zny): zvint[:,:,yy,:] = (zv[:,:,yy,:] + zv[:,:,yy+1,:])/2. |
---|
| 156 | if znx_stag == znx: zuint=zu |
---|
| 157 | else: |
---|
| 158 | for xx in np.arange(znx): zuint[:,:,:,xx] = (zu[:,:,:,xx] + zu[:,:,:,xx+1])/2. |
---|
[571] | 159 | else: |
---|
| 160 | zuint=zu |
---|
| 161 | zvint=zv |
---|
[763] | 162 | if mode=='amplitude': return np.sqrt(zuint**2 + zvint**2) |
---|
| 163 | if mode=='hodograph': return zuint,zvint |
---|
| 164 | if mode=='hodograph_2': return None, 360.*np.arctan(zvint/zuint)/(2.*np.pi) |
---|
[571] | 165 | |
---|
[612] | 166 | ## Author: AC |
---|
[701] | 167 | # Compute the enrichment factor of non condensible gases |
---|
| 168 | # The corresponding variable to call is enfact |
---|
[753] | 169 | # enrichment factor is computed as in Yuan Lian et al. 2012 |
---|
| 170 | # i.e. you need to have VL2 site at LS 135 in your data |
---|
| 171 | # this only requires co2col so that you can concat.nc at low cost |
---|
| 172 | def enrichment_factor(nc,lon,lat,time): |
---|
[701] | 173 | import numpy as np |
---|
[753] | 174 | from myplot import reducefield |
---|
[701] | 175 | varinfile = nc.variables.keys() |
---|
[753] | 176 | if "co2col" in varinfile: co2col=getfield(nc,'co2col') |
---|
| 177 | else: print "error, you need co2col var in your file" |
---|
[701] | 178 | if "ps" in varinfile: ps=getfield(nc,'ps') |
---|
| 179 | else: print "error, you need ps var in your file" |
---|
[753] | 180 | dimension = len(nc.variables['co2col'].dimensions) |
---|
| 181 | if dimension == 2: |
---|
| 182 | zny,znx = np.array(co2col).shape |
---|
[701] | 183 | znt=1 |
---|
[753] | 184 | elif dimension == 3: znt,zny,znx = np.array(co2col).shape |
---|
[701] | 185 | mmrarcol = np.zeros([znt,zny,znx]) |
---|
| 186 | enfact = np.zeros([znt,zny,znx]) |
---|
| 187 | grav=3.72 |
---|
[753] | 188 | mmrarcol[:,:,:] = 1. - grav*co2col[:,:,:]/ps[:,:,:] |
---|
| 189 | # Computation with reference argon mmr at VL2 Ls 135 (as in Yuan Lian et al 2012) |
---|
| 190 | lonvl2=np.zeros([1,2]) |
---|
| 191 | latvl2=np.zeros([1,2]) |
---|
| 192 | timevl2=np.zeros([1,2]) |
---|
| 193 | lonvl2[0,0]=-180 |
---|
| 194 | lonvl2[0,1]=180 |
---|
| 195 | latvl2[:,:]=48.16 |
---|
| 196 | timevl2[:,:]=135. |
---|
| 197 | indexlon = getsindex(lonvl2,0,lon) |
---|
| 198 | indexlat = getsindex(latvl2,0,lat) |
---|
| 199 | indextime = getsindex(timevl2,0,time) |
---|
| 200 | mmrvl2, error = reducefield( mmrarcol, d4=indextime, d1=indexlon, d2=indexlat) |
---|
| 201 | print "VL2 Ls 135 mmr arcol:", mmrvl2 |
---|
| 202 | enfact[:,:,:] = mmrarcol[:,:,:]/mmrvl2 |
---|
[701] | 203 | return enfact |
---|
| 204 | |
---|
| 205 | ## Author: AC |
---|
[612] | 206 | # Compute the norm of the slope angles |
---|
| 207 | # The corresponding variable to call is SLOPEXY |
---|
| 208 | def slopeamplitude (nc): |
---|
| 209 | import numpy as np |
---|
| 210 | varinfile = nc.variables.keys() |
---|
| 211 | if "slopex" in varinfile: zu=getfield(nc,'slopex') |
---|
| 212 | elif "SLOPEX" in varinfile: zu=getfield(nc,'SLOPEX') |
---|
[754] | 213 | else: errormess("you need slopex or SLOPEX in your file.") |
---|
[612] | 214 | if "slopey" in varinfile: zv=getfield(nc,'slopey') |
---|
| 215 | elif "SLOPEY" in varinfile: zv=getfield(nc,'SLOPEY') |
---|
[754] | 216 | else: errormess("you need slopey or SLOPEY in your file.") |
---|
[612] | 217 | znt,zny,znx = np.array(zu).shape |
---|
| 218 | zuint = np.zeros([znt,zny,znx]) |
---|
| 219 | zvint = np.zeros([znt,zny,znx]) |
---|
| 220 | zuint=zu |
---|
| 221 | zvint=zv |
---|
| 222 | return np.sqrt(zuint**2 + zvint**2) |
---|
| 223 | |
---|
| 224 | ## Author: AC |
---|
| 225 | # Compute the temperature difference between surface and first level. |
---|
| 226 | # API is automatically called to get TSURF and TK. |
---|
| 227 | # The corresponding variable to call is DELTAT |
---|
| 228 | def deltat0t1 (nc): |
---|
| 229 | import numpy as np |
---|
| 230 | varinfile = nc.variables.keys() |
---|
| 231 | if "tsurf" in varinfile: zu=getfield(nc,'tsurf') |
---|
| 232 | elif "TSURF" in varinfile: zu=getfield(nc,'TSURF') |
---|
[754] | 233 | else: errormess("You need tsurf or TSURF in your file") |
---|
[612] | 234 | if "tk" in varinfile: zv=getfield(nc,'tk') |
---|
| 235 | elif "TK" in varinfile: zv=getfield(nc,'TK') |
---|
[754] | 236 | else: errormess("You need tk or TK in your file. (might need to use API. try to add -i 4 -l XXX)") |
---|
[612] | 237 | znt,zny,znx = np.array(zu).shape |
---|
| 238 | zuint = np.zeros([znt,zny,znx]) |
---|
| 239 | zuint=zu - zv[:,0,:,:] |
---|
| 240 | return zuint |
---|
| 241 | |
---|
[382] | 242 | ## Author: AS + TN + AC |
---|
[717] | 243 | def reducefield (input,d4=None,d3=None,d2=None,d1=None,yint=False,alt=None,anomaly=False,redope=None,mesharea=None,unidim=999): |
---|
[252] | 244 | ### we do it the reverse way to be compliant with netcdf "t z y x" or "t y x" or "y x" |
---|
[233] | 245 | ### it would be actually better to name d4 d3 d2 d1 as t z y x |
---|
[405] | 246 | ### ... note, anomaly is only computed over d1 and d2 for the moment |
---|
[233] | 247 | import numpy as np |
---|
[647] | 248 | from mymath import max,mean,min,sum,getmask |
---|
[422] | 249 | csmooth = 12 ## a fair amount of grid points (too high results in high computation time) |
---|
[483] | 250 | if redope is not None: |
---|
| 251 | if redope == "mint": input = min(input,axis=0) ; d1 = None |
---|
| 252 | elif redope == "maxt": input = max(input,axis=0) ; d1 = None |
---|
[763] | 253 | elif redope == "edge_y1": input = input[:,:,0,:] ; d2 = None |
---|
| 254 | elif redope == "edge_y2": input = input[:,:,-1,:] ; d2 = None |
---|
| 255 | elif redope == "edge_x1": input = input[:,:,:,0] ; d1 = None |
---|
| 256 | elif redope == "edge_x2": input = input[:,:,:,-1] ; d1 = None |
---|
[483] | 257 | else: errormess("not supported. but try lines in reducefield beforehand.") |
---|
| 258 | #elif redope == "minz": input = min(input,axis=1) ; d2 = None |
---|
| 259 | #elif redope == "maxz": input = max(input,axis=1) ; d2 = None |
---|
| 260 | #elif redope == "miny": input = min(input,axis=2) ; d3 = None |
---|
| 261 | #elif redope == "maxy": input = max(input,axis=2) ; d3 = None |
---|
| 262 | #elif redope == "minx": input = min(input,axis=3) ; d4 = None |
---|
| 263 | #elif redope == "maxx": input = max(input,axis=3) ; d4 = None |
---|
[233] | 264 | dimension = np.array(input).ndim |
---|
[525] | 265 | shape = np.array(np.array(input).shape) |
---|
[349] | 266 | #print 'd1,d2,d3,d4: ',d1,d2,d3,d4 |
---|
[405] | 267 | if anomaly: print 'ANOMALY ANOMALY' |
---|
[233] | 268 | output = input |
---|
| 269 | error = False |
---|
[350] | 270 | #### this is needed to cope the case where d4,d3,d2,d1 are single integers and not arrays |
---|
[345] | 271 | if d4 is not None and not isinstance(d4, np.ndarray): d4=[d4] |
---|
| 272 | if d3 is not None and not isinstance(d3, np.ndarray): d3=[d3] |
---|
| 273 | if d2 is not None and not isinstance(d2, np.ndarray): d2=[d2] |
---|
| 274 | if d1 is not None and not isinstance(d1, np.ndarray): d1=[d1] |
---|
| 275 | ### now the main part |
---|
[233] | 276 | if dimension == 2: |
---|
[717] | 277 | #### this is needed for 1d-type files (where dim=2 but axes are time-vert and not lat-lon) |
---|
[753] | 278 | if unidim==1: d2=d4 ; d1=d3 ; d4 = None ; d3 = None |
---|
[525] | 279 | if mesharea is None: mesharea=np.ones(shape) |
---|
| 280 | if max(d2) >= shape[0]: error = True |
---|
| 281 | elif max(d1) >= shape[1]: error = True |
---|
| 282 | elif d1 is not None and d2 is not None: |
---|
[687] | 283 | try: |
---|
| 284 | totalarea = np.ma.masked_where(getmask(output),mesharea) |
---|
| 285 | totalarea = mean(totalarea[d2,:],axis=0);totalarea = mean(totalarea[d1]) |
---|
| 286 | except: print "(problem with areas. I skip this)" ; mesharea = 1. ; totalarea = 1. |
---|
[647] | 287 | output = output*mesharea; output = mean(output[d2,:],axis=0); output = mean(output[d1])/totalarea |
---|
[350] | 288 | elif d1 is not None: output = mean(input[:,d1],axis=1) |
---|
[647] | 289 | elif d2 is not None: |
---|
[687] | 290 | try: |
---|
| 291 | totalarea = np.ma.masked_where(getmask(output),mesharea) |
---|
| 292 | totalarea = mean(totalarea[d2,:],axis=0) |
---|
| 293 | except: print "(problem with areas. I skip this)" ; mesharea = 1. ; totalarea = 1. |
---|
[647] | 294 | output = output*mesharea; output = mean(output[d2,:],axis=0)/totalarea |
---|
[233] | 295 | elif dimension == 3: |
---|
[525] | 296 | if mesharea is None: mesharea=np.ones(shape[[1,2]]) |
---|
[345] | 297 | if max(d4) >= shape[0]: error = True |
---|
| 298 | elif max(d2) >= shape[1]: error = True |
---|
| 299 | elif max(d1) >= shape[2]: error = True |
---|
[647] | 300 | elif d4 is not None and d2 is not None and d1 is not None: |
---|
[525] | 301 | output = mean(input[d4,:,:],axis=0) |
---|
[687] | 302 | try: |
---|
| 303 | totalarea = np.ma.masked_where(getmask(output),mesharea) |
---|
| 304 | totalarea = mean(totalarea[d2,:],axis=0);totalarea = mean(totalarea[d1]) |
---|
| 305 | except: print "(problem with areas. I skip this)" ; mesharea = 1. ; totalarea = 1. |
---|
[647] | 306 | output = output*mesharea; output = mean(output[d2,:],axis=0); output = mean(output[d1])/totalarea |
---|
[525] | 307 | elif d4 is not None and d2 is not None: |
---|
| 308 | output = mean(input[d4,:,:],axis=0) |
---|
[687] | 309 | try: |
---|
| 310 | totalarea = np.ma.masked_where(getmask(output),mesharea) |
---|
| 311 | totalarea = mean(totalarea[d2,:],axis=0) |
---|
| 312 | except: print "(problem with areas. I skip this)" ; mesharea = 1. ; totalarea = 1. |
---|
[647] | 313 | output = output*mesharea; output = mean(output[d2,:],axis=0)/totalarea |
---|
[349] | 314 | elif d4 is not None and d1 is not None: output = mean(input[d4,:,:],axis=0); output=mean(output[:,d1],axis=1) |
---|
[525] | 315 | elif d2 is not None and d1 is not None: |
---|
[687] | 316 | try: |
---|
| 317 | totalarea = np.tile(mesharea,(output.shape[0],1,1)) |
---|
| 318 | totalarea = np.ma.masked_where(getmask(output),totalarea) |
---|
| 319 | totalarea = mean(totalarea[:,d2,:],axis=1);totalarea = mean(totalarea[:,d1],axis=1) |
---|
| 320 | except: print "(problem with areas. I skip this)" ; mesharea = 1. ; totalarea = 1. |
---|
[647] | 321 | output = output*mesharea; output = mean(output[:,d2,:],axis=1); output = mean(output[:,d1],axis=1)/totalarea |
---|
[525] | 322 | elif d1 is not None: output = mean(input[:,:,d1],axis=2) |
---|
[647] | 323 | elif d2 is not None: |
---|
[687] | 324 | try: |
---|
| 325 | totalarea = np.tile(mesharea,(output.shape[0],1,1)) |
---|
| 326 | totalarea = np.ma.masked_where(getmask(output),totalarea) |
---|
| 327 | totalarea = mean(totalarea[:,d2,:],axis=1) |
---|
| 328 | except: print "(problem with areas. I skip this)" ; mesharea = 1. ; totalarea = 1. |
---|
[647] | 329 | output = output*mesharea; output = mean(output[:,d2,:],axis=1)/totalarea |
---|
[525] | 330 | elif d4 is not None: output = mean(input[d4,:,:],axis=0) |
---|
[233] | 331 | elif dimension == 4: |
---|
[647] | 332 | if mesharea is None: mesharea=np.ones(shape[[2,3]]) # mesharea=np.random.random_sample(shape[[2,3]])*5. + 2. # pour tester |
---|
[345] | 333 | if max(d4) >= shape[0]: error = True |
---|
| 334 | elif max(d3) >= shape[1]: error = True |
---|
| 335 | elif max(d2) >= shape[2]: error = True |
---|
| 336 | elif max(d1) >= shape[3]: error = True |
---|
[382] | 337 | elif d4 is not None and d3 is not None and d2 is not None and d1 is not None: |
---|
[392] | 338 | output = mean(input[d4,:,:,:],axis=0) |
---|
| 339 | output = reduce_zaxis(output[d3,:,:],ax=0,yint=yint,vert=alt,indice=d3) |
---|
[427] | 340 | if anomaly: output = 100. * ((output / smooth(output,csmooth)) - 1.) |
---|
[687] | 341 | try: |
---|
| 342 | totalarea = np.ma.masked_where(np.isnan(output),mesharea) |
---|
| 343 | totalarea = mean(totalarea[d2,:],axis=0); totalarea = mean(totalarea[d1]) |
---|
| 344 | except: print "(problem with areas. I skip this)" ; mesharea = 1. ; totalarea = 1. |
---|
[647] | 345 | output = output*mesharea; output = mean(output[d2,:],axis=0); output = mean(output[d1])/totalarea |
---|
[350] | 346 | elif d4 is not None and d3 is not None and d2 is not None: |
---|
[392] | 347 | output = mean(input[d4,:,:,:],axis=0) |
---|
| 348 | output = reduce_zaxis(output[d3,:,:],ax=0,yint=yint,vert=alt,indice=d3) |
---|
[525] | 349 | if anomaly: output = 100. * ((output / smooth(output,csmooth)) - 1.) |
---|
[687] | 350 | try: |
---|
| 351 | totalarea = np.ma.masked_where(np.isnan(output),mesharea) |
---|
| 352 | totalarea = mean(totalarea[d2,:],axis=0) |
---|
| 353 | except: print "(problem with areas. I skip this)" ; mesharea = 1. ; totalarea = 1. |
---|
[647] | 354 | output = output*mesharea; output = mean(output[d2,:],axis=0)/totalarea |
---|
[350] | 355 | elif d4 is not None and d3 is not None and d1 is not None: |
---|
[392] | 356 | output = mean(input[d4,:,:,:],axis=0) |
---|
| 357 | output = reduce_zaxis(output[d3,:,:],ax=0,yint=yint,vert=alt,indice=d3) |
---|
[405] | 358 | if anomaly: output = 100. * ((output / smooth(output,csmooth)) - 1.) |
---|
[392] | 359 | output = mean(output[:,d1],axis=1) |
---|
[350] | 360 | elif d4 is not None and d2 is not None and d1 is not None: |
---|
[392] | 361 | output = mean(input[d4,:,:,:],axis=0) |
---|
[405] | 362 | if anomaly: |
---|
| 363 | for k in range(output.shape[0]): output[k,:,:] = 100. * ((output[k,:,:] / smooth(output[k,:,:],csmooth)) - 1.) |
---|
[687] | 364 | try: |
---|
| 365 | totalarea = np.tile(mesharea,(output.shape[0],1,1)) |
---|
| 366 | totalarea = np.ma.masked_where(getmask(output),mesharea) |
---|
| 367 | totalarea = mean(totalarea[:,d2,:],axis=1); totalarea = mean(totalarea[:,d1],axis=1) |
---|
| 368 | except: print "(problem with areas. I skip this)" ; mesharea = 1. ; totalarea = 1. |
---|
[647] | 369 | output = output*mesharea; output = mean(output[:,d2,:],axis=1); output = mean(output[:,d1],axis=1)/totalarea |
---|
[405] | 370 | #noperturb = smooth1d(output,window_len=7) |
---|
| 371 | #lenlen = len(output) ; output = output[1:lenlen-7] ; yeye = noperturb[4:lenlen-4] |
---|
| 372 | #plot(output) ; plot(yeye) ; show() ; plot(output-yeye) ; show() |
---|
[647] | 373 | elif d3 is not None and d2 is not None and d1 is not None: |
---|
| 374 | output = reduce_zaxis(input[:,d3,:,:],ax=1,yint=yint,vert=alt,indice=d3) |
---|
[405] | 375 | if anomaly: |
---|
| 376 | for k in range(output.shape[0]): output[k,:,:] = 100. * ((output[k,:,:] / smooth(output[k,:,:],csmooth)) - 1.) |
---|
[687] | 377 | try: |
---|
| 378 | totalarea = np.tile(mesharea,(output.shape[0],1,1)) |
---|
| 379 | totalarea = np.ma.masked_where(getmask(output),totalarea) |
---|
| 380 | totalarea = mean(totalarea[:,d2,:],axis=1); totalarea = mean(totalarea[:,d1],axis=1) |
---|
| 381 | except: print "(problem with areas. I skip this)" ; mesharea = 1. ; totalarea = 1. |
---|
[647] | 382 | output = output*mesharea; output = mean(output[:,d2,:],axis=1); output = mean(output[:,d1],axis=1)/totalarea |
---|
[392] | 383 | elif d4 is not None and d3 is not None: |
---|
| 384 | output = mean(input[d4,:,:,:],axis=0) |
---|
| 385 | output = reduce_zaxis(output[d3,:,:],ax=0,yint=yint,vert=alt,indice=d3) |
---|
[405] | 386 | if anomaly: output = 100. * ((output / smooth(output,csmooth)) - 1.) |
---|
[392] | 387 | elif d4 is not None and d2 is not None: |
---|
[647] | 388 | output = mean(input[d4,:,:,:],axis=0) |
---|
[687] | 389 | try: |
---|
| 390 | totalarea = np.tile(mesharea,(output.shape[0],1,1)) |
---|
| 391 | totalarea = np.ma.masked_where(getmask(output),mesharea) |
---|
| 392 | totalarea = mean(totalarea[:,d2,:],axis=1) |
---|
| 393 | except: print "(problem with areas. I skip this)" ; mesharea = 1. ; totalarea = 1. |
---|
[647] | 394 | output = output*mesharea; output = mean(output[:,d2,:],axis=1)/totalarea |
---|
[392] | 395 | elif d4 is not None and d1 is not None: |
---|
| 396 | output = mean(input[d4,:,:,:],axis=0) |
---|
| 397 | output = mean(output[:,:,d1],axis=2) |
---|
[647] | 398 | elif d3 is not None and d2 is not None: |
---|
[392] | 399 | output = reduce_zaxis(input[:,d3,:,:],ax=1,yint=yint,vert=alt,indice=d3) |
---|
[687] | 400 | try: |
---|
| 401 | totalarea = np.tile(mesharea,(output.shape[0],1,1)) |
---|
| 402 | totalarea = np.ma.masked_where(getmask(output),mesharea) |
---|
| 403 | totalarea = mean(totalarea[:,d2,:],axis=1) |
---|
| 404 | except: print "(problem with areas. I skip this)" ; mesharea = 1. ; totalarea = 1. |
---|
[647] | 405 | output = output*mesharea; output = mean(output[:,d2,:],axis=1)/totalarea |
---|
[392] | 406 | elif d3 is not None and d1 is not None: |
---|
| 407 | output = reduce_zaxis(input[:,d3,:,:],ax=1,yint=yint,vert=alt,indice=d3) |
---|
[448] | 408 | output = mean(output[:,:,d1],axis=2) |
---|
[647] | 409 | elif d2 is not None and d1 is not None: |
---|
[687] | 410 | try: |
---|
| 411 | totalarea = np.tile(mesharea,(output.shape[0],output.shape[1],1,1)) |
---|
| 412 | totalarea = np.ma.masked_where(getmask(output),totalarea) |
---|
| 413 | totalarea = mean(totalarea[:,:,d2,:],axis=2); totalarea = mean(totalarea[:,:,d1],axis=1) |
---|
| 414 | except: print "(problem with areas. I skip this)" ; mesharea = 1. ; totalarea = 1. |
---|
[763] | 415 | output = output*mesharea; output = mean(output[:,:,d2,:],axis=2); output = mean(output[:,:,d1],axis=2)/totalarea |
---|
[392] | 416 | elif d1 is not None: output = mean(input[:,:,:,d1],axis=3) |
---|
[647] | 417 | elif d2 is not None: |
---|
[687] | 418 | try: |
---|
| 419 | totalarea = np.tile(mesharea,(output.shape[0],output.shape[1],1,output.shape[3])) |
---|
| 420 | totalarea = np.ma.masked_where(getmask(output),totalarea) |
---|
| 421 | totalarea = mean(totalarea[:,:,d2,:],axis=2) |
---|
| 422 | except: print "(problem with areas. I skip this)" ; mesharea = 1. ; totalarea = 1. |
---|
[647] | 423 | output = output*mesharea; output = mean(output[:,:,d2,:],axis=2)/totalarea |
---|
[437] | 424 | elif d3 is not None: output = reduce_zaxis(input[:,d3,:,:],ax=1,yint=yint,vert=alt,indice=d3) |
---|
[392] | 425 | elif d4 is not None: output = mean(input[d4,:,:,:],axis=0) |
---|
[468] | 426 | dimension2 = np.array(output).ndim |
---|
| 427 | shape2 = np.array(output).shape |
---|
| 428 | print 'REDUCEFIELD dim,shape: ',dimension,shape,' >>> ',dimension2,shape2 |
---|
[233] | 429 | return output, error |
---|
| 430 | |
---|
[392] | 431 | ## Author: AC + AS |
---|
| 432 | def reduce_zaxis (input,ax=None,yint=False,vert=None,indice=None): |
---|
[382] | 433 | from mymath import max,mean |
---|
| 434 | from scipy import integrate |
---|
[637] | 435 | import numpy as np |
---|
[392] | 436 | if yint and vert is not None and indice is not None: |
---|
[391] | 437 | if type(input).__name__=='MaskedArray': |
---|
| 438 | input.set_fill_value([np.NaN]) |
---|
[392] | 439 | output = integrate.trapz(input.filled(),x=vert[indice],axis=ax) |
---|
[391] | 440 | else: |
---|
[396] | 441 | output = integrate.trapz(input,x=vert[indice],axis=ax) |
---|
[382] | 442 | else: |
---|
| 443 | output = mean(input,axis=ax) |
---|
| 444 | return output |
---|
| 445 | |
---|
[345] | 446 | ## Author: AS + TN |
---|
[233] | 447 | def definesubplot ( numplot, fig ): |
---|
| 448 | from matplotlib.pyplot import rcParams |
---|
| 449 | rcParams['font.size'] = 12. ## default (important for multiple calls) |
---|
[345] | 450 | if numplot <= 0: |
---|
| 451 | subv = 99999 |
---|
| 452 | subh = 99999 |
---|
| 453 | elif numplot == 1: |
---|
[568] | 454 | subv = 1 |
---|
| 455 | subh = 1 |
---|
[233] | 456 | elif numplot == 2: |
---|
[483] | 457 | subv = 1 #2 |
---|
| 458 | subh = 2 #1 |
---|
[233] | 459 | fig.subplots_adjust(wspace = 0.35) |
---|
| 460 | rcParams['font.size'] = int( rcParams['font.size'] * 3. / 4. ) |
---|
| 461 | elif numplot == 3: |
---|
[453] | 462 | subv = 3 |
---|
| 463 | subh = 1 |
---|
[613] | 464 | fig.subplots_adjust(hspace = 0.5) |
---|
[233] | 465 | rcParams['font.size'] = int( rcParams['font.size'] * 1. / 2. ) |
---|
[637] | 466 | elif numplot == 4: |
---|
[345] | 467 | subv = 2 |
---|
| 468 | subh = 2 |
---|
[610] | 469 | fig.subplots_adjust(wspace = 0.4, hspace = 0.3) |
---|
[345] | 470 | rcParams['font.size'] = int( rcParams['font.size'] * 2. / 3. ) |
---|
| 471 | elif numplot <= 6: |
---|
| 472 | subv = 2 |
---|
| 473 | subh = 3 |
---|
[638] | 474 | #fig.subplots_adjust(wspace = 0.4, hspace = 0.0) |
---|
| 475 | fig.subplots_adjust(wspace = 0.5, hspace = 0.3) |
---|
[233] | 476 | rcParams['font.size'] = int( rcParams['font.size'] * 1. / 2. ) |
---|
[345] | 477 | elif numplot <= 8: |
---|
| 478 | subv = 2 |
---|
| 479 | subh = 4 |
---|
[233] | 480 | fig.subplots_adjust(wspace = 0.3, hspace = 0.3) |
---|
| 481 | rcParams['font.size'] = int( rcParams['font.size'] * 1. / 2. ) |
---|
[345] | 482 | elif numplot <= 9: |
---|
| 483 | subv = 3 |
---|
| 484 | subh = 3 |
---|
[233] | 485 | fig.subplots_adjust(wspace = 0.3, hspace = 0.3) |
---|
| 486 | rcParams['font.size'] = int( rcParams['font.size'] * 1. / 2. ) |
---|
[345] | 487 | elif numplot <= 12: |
---|
| 488 | subv = 3 |
---|
| 489 | subh = 4 |
---|
[453] | 490 | fig.subplots_adjust(wspace = 0, hspace = 0.1) |
---|
[345] | 491 | rcParams['font.size'] = int( rcParams['font.size'] * 1. / 2. ) |
---|
| 492 | elif numplot <= 16: |
---|
| 493 | subv = 4 |
---|
| 494 | subh = 4 |
---|
| 495 | fig.subplots_adjust(wspace = 0.3, hspace = 0.3) |
---|
| 496 | rcParams['font.size'] = int( rcParams['font.size'] * 1. / 2. ) |
---|
[233] | 497 | else: |
---|
[345] | 498 | print "number of plot supported: 1 to 16" |
---|
[233] | 499 | exit() |
---|
[345] | 500 | return subv,subh |
---|
[233] | 501 | |
---|
[345] | 502 | ## Author: AS |
---|
[233] | 503 | def getstralt(nc,nvert): |
---|
[548] | 504 | varinfile = nc.variables.keys() |
---|
| 505 | if 'vert' not in varinfile: |
---|
[233] | 506 | stralt = "_lvl" + str(nvert) |
---|
[548] | 507 | else: |
---|
[233] | 508 | zelevel = int(nc.variables['vert'][nvert]) |
---|
| 509 | if abs(zelevel) < 10000.: strheight=str(zelevel)+"m" |
---|
| 510 | else: strheight=str(int(zelevel/1000.))+"km" |
---|
| 511 | if 'altitude' in nc.dimensions: stralt = "_"+strheight+"-AMR" |
---|
| 512 | elif 'altitude_abg' in nc.dimensions: stralt = "_"+strheight+"-ALS" |
---|
| 513 | elif 'bottom_top' in nc.dimensions: stralt = "_"+strheight |
---|
| 514 | elif 'pressure' in nc.dimensions: stralt = "_"+str(zelevel)+"Pa" |
---|
| 515 | else: stralt = "" |
---|
| 516 | return stralt |
---|
| 517 | |
---|
[345] | 518 | ## Author: AS |
---|
[468] | 519 | def getlschar ( namefile, getaxis=False ): |
---|
[195] | 520 | from netCDF4 import Dataset |
---|
| 521 | from timestuff import sol2ls |
---|
[233] | 522 | from numpy import array |
---|
[400] | 523 | from string import rstrip |
---|
[687] | 524 | import os as daos |
---|
| 525 | namefiletest = rstrip( rstrip( rstrip( namefile, chars="_z"), chars="_zabg"), chars="_p") |
---|
| 526 | testexist = daos.path.isfile(namefiletest) |
---|
[237] | 527 | zetime = None |
---|
[687] | 528 | if testexist: |
---|
| 529 | namefile = namefiletest |
---|
| 530 | #### we assume that wrfout is next to wrfout_z and wrfout_zabg |
---|
| 531 | nc = Dataset(namefile) |
---|
| 532 | zetime = None |
---|
| 533 | days_in_month = [61, 66, 66, 65, 60, 54, 50, 46, 47, 47, 51, 56] |
---|
| 534 | plus_in_month = [ 0, 61,127,193,258,318,372,422,468,515,562,613] |
---|
| 535 | if 'Times' in nc.variables: |
---|
[233] | 536 | zetime = nc.variables['Times'][0] |
---|
| 537 | shape = array(nc.variables['Times']).shape |
---|
| 538 | if shape[0] < 2: zetime = None |
---|
| 539 | if zetime is not None \ |
---|
[225] | 540 | and 'vert' not in nc.variables: |
---|
[489] | 541 | ##### strangely enough this does not work for api or ncrcat results! |
---|
| 542 | zesol = plus_in_month[ int(zetime[5]+zetime[6])-1 ] + int(zetime[8]+zetime[9]) - 1 ##les sols GCM commencent a 0 |
---|
| 543 | dals = int( 10. * sol2ls ( zesol ) ) / 10. |
---|
[197] | 544 | ### |
---|
| 545 | zetime2 = nc.variables['Times'][1] |
---|
| 546 | one = int(zetime[11]+zetime[12]) + int(zetime[14]+zetime[15])/37. |
---|
| 547 | next = int(zetime2[11]+zetime2[12]) + int(zetime2[14]+zetime2[15])/37. |
---|
| 548 | zehour = one |
---|
| 549 | zehourin = abs ( next - one ) |
---|
[489] | 550 | if not getaxis: |
---|
| 551 | lschar = "_Ls"+str(dals) |
---|
| 552 | else: |
---|
[468] | 553 | zelen = len(nc.variables['Times'][:]) |
---|
| 554 | yeye = range(zelen) ; lsaxis = range(zelen) ; solaxis = range(zelen) ; ltaxis = range(zelen) |
---|
| 555 | for iii in yeye: |
---|
[489] | 556 | zetime = nc.variables['Times'][iii] |
---|
[468] | 557 | ltaxis[iii] = int(zetime[11]+zetime[12]) + int(zetime[14]+zetime[15])/37. |
---|
[489] | 558 | solaxis[iii] = ltaxis[iii] / 24. + plus_in_month[ int(zetime[5]+zetime[6])-1 ] + int(zetime[8]+zetime[9]) - 1 ##les sols GCM commencent a 0 |
---|
[468] | 559 | lsaxis[iii] = sol2ls ( solaxis[iii] ) |
---|
| 560 | if ltaxis[iii] < ltaxis[iii-1]: ltaxis[iii] = ltaxis[iii] + 24. |
---|
[489] | 561 | #print ltaxis[iii], solaxis[iii], lsaxis[iii], getattr( nc, 'JULDAY' ) |
---|
[468] | 562 | lschar = lsaxis ; zehour = solaxis ; zehourin = ltaxis |
---|
[195] | 563 | else: |
---|
| 564 | lschar="" |
---|
[197] | 565 | zehour = 0 |
---|
| 566 | zehourin = 1 |
---|
| 567 | return lschar, zehour, zehourin |
---|
[195] | 568 | |
---|
[345] | 569 | ## Author: AS |
---|
[202] | 570 | def getprefix (nc): |
---|
| 571 | prefix = 'LMD_MMM_' |
---|
| 572 | prefix = prefix + 'd'+str(getattr(nc,'GRID_ID'))+'_' |
---|
| 573 | prefix = prefix + str(int(getattr(nc,'DX')/1000.))+'km_' |
---|
| 574 | return prefix |
---|
| 575 | |
---|
[345] | 576 | ## Author: AS |
---|
[184] | 577 | def getproj (nc): |
---|
[233] | 578 | typefile = whatkindfile(nc) |
---|
[548] | 579 | if typefile in ['meso','geo']: |
---|
[233] | 580 | ### (il faudrait passer CEN_LON dans la projection ?) |
---|
| 581 | map_proj = getattr(nc, 'MAP_PROJ') |
---|
| 582 | cen_lat = getattr(nc, 'CEN_LAT') |
---|
| 583 | if map_proj == 2: |
---|
| 584 | if cen_lat > 10.: |
---|
| 585 | proj="npstere" |
---|
[392] | 586 | #print "NP stereographic polar domain" |
---|
[233] | 587 | else: |
---|
| 588 | proj="spstere" |
---|
[392] | 589 | #print "SP stereographic polar domain" |
---|
[233] | 590 | elif map_proj == 1: |
---|
[392] | 591 | #print "lambert projection domain" |
---|
[233] | 592 | proj="lcc" |
---|
| 593 | elif map_proj == 3: |
---|
[392] | 594 | #print "mercator projection" |
---|
[233] | 595 | proj="merc" |
---|
| 596 | else: |
---|
| 597 | proj="merc" |
---|
[548] | 598 | elif typefile in ['gcm']: proj="cyl" ## pb avec les autres (de trace derriere la sphere ?) |
---|
| 599 | else: proj="ortho" |
---|
[184] | 600 | return proj |
---|
| 601 | |
---|
[345] | 602 | ## Author: AS |
---|
[180] | 603 | def ptitle (name): |
---|
| 604 | from matplotlib.pyplot import title |
---|
| 605 | title(name) |
---|
| 606 | print name |
---|
| 607 | |
---|
[345] | 608 | ## Author: AS |
---|
[252] | 609 | def polarinterv (lon2d,lat2d): |
---|
| 610 | import numpy as np |
---|
| 611 | wlon = [np.min(lon2d),np.max(lon2d)] |
---|
| 612 | ind = np.array(lat2d).shape[0] / 2 ## to get a good boundlat and to get the pole |
---|
| 613 | wlat = [np.min(lat2d[ind,:]),np.max(lat2d[ind,:])] |
---|
| 614 | return [wlon,wlat] |
---|
| 615 | |
---|
[345] | 616 | ## Author: AS |
---|
[180] | 617 | def simplinterv (lon2d,lat2d): |
---|
| 618 | import numpy as np |
---|
| 619 | return [[np.min(lon2d),np.max(lon2d)],[np.min(lat2d),np.max(lat2d)]] |
---|
| 620 | |
---|
[345] | 621 | ## Author: AS |
---|
[184] | 622 | def wrfinterv (lon2d,lat2d): |
---|
| 623 | nx = len(lon2d[0,:])-1 |
---|
| 624 | ny = len(lon2d[:,0])-1 |
---|
[225] | 625 | lon1 = lon2d[0,0] |
---|
| 626 | lon2 = lon2d[nx,ny] |
---|
| 627 | lat1 = lat2d[0,0] |
---|
| 628 | lat2 = lat2d[nx,ny] |
---|
[233] | 629 | if abs(0.5*(lat1+lat2)) > 60.: wider = 0.5 * (abs(lon1)+abs(lon2)) * 0.1 |
---|
| 630 | else: wider = 0. |
---|
| 631 | if lon1 < lon2: wlon = [lon1, lon2 + wider] |
---|
[225] | 632 | else: wlon = [lon2, lon1 + wider] |
---|
| 633 | if lat1 < lat2: wlat = [lat1, lat2] |
---|
| 634 | else: wlat = [lat2, lat1] |
---|
| 635 | return [wlon,wlat] |
---|
[184] | 636 | |
---|
[345] | 637 | ## Author: AS |
---|
[240] | 638 | def makeplotres (filename,res=None,pad_inches_value=0.25,folder='',disp=True,ext='png',erase=False): |
---|
[180] | 639 | import matplotlib.pyplot as plt |
---|
[240] | 640 | from os import system |
---|
| 641 | addstr = "" |
---|
| 642 | if res is not None: |
---|
| 643 | res = int(res) |
---|
| 644 | addstr = "_"+str(res) |
---|
| 645 | name = filename+addstr+"."+ext |
---|
[186] | 646 | if folder != '': name = folder+'/'+name |
---|
[180] | 647 | plt.savefig(name,dpi=res,bbox_inches='tight',pad_inches=pad_inches_value) |
---|
[240] | 648 | if disp: display(name) |
---|
| 649 | if ext in ['eps','ps','svg']: system("tar czvf "+name+".tar.gz "+name+" ; rm -f "+name) |
---|
| 650 | if erase: system("mv "+name+" to_be_erased") |
---|
[180] | 651 | return |
---|
| 652 | |
---|
[430] | 653 | ## Author: AS + AC |
---|
[451] | 654 | def dumpbdy (field,n,stag=None,condition=False,onlyx=False,onlyy=False): |
---|
[447] | 655 | nx = len(field[0,:])-1 |
---|
| 656 | ny = len(field[:,0])-1 |
---|
[444] | 657 | if condition: |
---|
| 658 | if stag == 'U': nx = nx-1 |
---|
| 659 | if stag == 'V': ny = ny-1 |
---|
| 660 | if stag == 'W': nx = nx+1 #special les case when we dump stag on W |
---|
[451] | 661 | if onlyx: result = field[:,n:nx-n] |
---|
| 662 | elif onlyy: result = field[n:ny-n,:] |
---|
| 663 | else: result = field[n:ny-n,n:nx-n] |
---|
| 664 | return result |
---|
[180] | 665 | |
---|
[444] | 666 | ## Author: AS + AC |
---|
[233] | 667 | def getcoorddef ( nc ): |
---|
[317] | 668 | import numpy as np |
---|
[233] | 669 | ## getcoord2d for predefined types |
---|
| 670 | typefile = whatkindfile(nc) |
---|
[548] | 671 | if typefile in ['meso']: |
---|
| 672 | if '9999' not in getattr(nc,'START_DATE') : |
---|
[753] | 673 | ## regular mesoscale |
---|
[548] | 674 | [lon2d,lat2d] = getcoord2d(nc) |
---|
| 675 | else: |
---|
| 676 | ## idealized mesoscale |
---|
| 677 | nx=getattr(nc,'WEST-EAST_GRID_DIMENSION') |
---|
| 678 | ny=getattr(nc,'SOUTH-NORTH_GRID_DIMENSION') |
---|
| 679 | dlat=getattr(nc,'DX') |
---|
| 680 | ## this is dirty because Martian-specific |
---|
| 681 | # ... but this just intended to get "lat-lon" like info |
---|
| 682 | falselon = np.arange(-dlat*(nx-1)/2.,dlat*(nx-1)/2.,dlat)/60000. |
---|
| 683 | falselat = np.arange(-dlat*(ny-1)/2.,dlat*(ny-1)/2.,dlat)/60000. |
---|
| 684 | [lon2d,lat2d] = np.meshgrid(falselon,falselat) ## dummy coordinates |
---|
| 685 | print "WARNING: domain plot artificially centered on lat,lon 0,0" |
---|
[637] | 686 | elif typefile in ['gcm','earthgcm','ecmwf']: |
---|
[724] | 687 | #### n est ce pas nc.variables ? |
---|
[637] | 688 | if "longitude" in nc.dimensions: dalon = "longitude" |
---|
| 689 | elif "lon" in nc.dimensions: dalon = "lon" |
---|
[724] | 690 | else: dalon = "nothing" |
---|
[637] | 691 | if "latitude" in nc.dimensions: dalat = "latitude" |
---|
| 692 | elif "lat" in nc.dimensions: dalat = "lat" |
---|
[724] | 693 | else: dalat = "nothing" |
---|
[637] | 694 | [lon2d,lat2d] = getcoord2d(nc,nlat=dalat,nlon=dalon,is1d=True) |
---|
[233] | 695 | elif typefile in ['geo']: |
---|
| 696 | [lon2d,lat2d] = getcoord2d(nc,nlat='XLAT_M',nlon='XLONG_M') |
---|
| 697 | return lon2d,lat2d |
---|
| 698 | |
---|
[345] | 699 | ## Author: AS |
---|
[184] | 700 | def getcoord2d (nc,nlat='XLAT',nlon='XLONG',is1d=False): |
---|
| 701 | import numpy as np |
---|
[724] | 702 | if nlon == "nothing" or nlat == "nothing": |
---|
| 703 | print "NO LAT LON FIELDS. I AM TRYING MY BEST. I ASSUME GLOBAL FIELD." |
---|
| 704 | lon = np.linspace(-180.,180.,getdimfromvar(nc)[-1]) |
---|
| 705 | lat = np.linspace(-90.,90.,getdimfromvar(nc)[-2]) |
---|
[184] | 706 | [lon2d,lat2d] = np.meshgrid(lon,lat) |
---|
| 707 | else: |
---|
[724] | 708 | if is1d: |
---|
| 709 | lat = nc.variables[nlat][:] |
---|
| 710 | lon = nc.variables[nlon][:] |
---|
| 711 | [lon2d,lat2d] = np.meshgrid(lon,lat) |
---|
| 712 | else: |
---|
| 713 | lat = nc.variables[nlat][0,:,:] |
---|
| 714 | lon = nc.variables[nlon][0,:,:] |
---|
| 715 | [lon2d,lat2d] = [lon,lat] |
---|
[184] | 716 | return lon2d,lat2d |
---|
| 717 | |
---|
[724] | 718 | ## Author: AS |
---|
| 719 | def getdimfromvar (nc): |
---|
| 720 | varinfile = nc.variables.keys() |
---|
| 721 | dim = nc.variables[varinfile[-1]].shape ## usually the last variable is 4D or 3D |
---|
| 722 | return dim |
---|
| 723 | |
---|
[405] | 724 | ## FROM COOKBOOK http://www.scipy.org/Cookbook/SignalSmooth |
---|
| 725 | def smooth1d(x,window_len=11,window='hanning'): |
---|
| 726 | import numpy |
---|
| 727 | """smooth the data using a window with requested size. |
---|
| 728 | This method is based on the convolution of a scaled window with the signal. |
---|
| 729 | The signal is prepared by introducing reflected copies of the signal |
---|
| 730 | (with the window size) in both ends so that transient parts are minimized |
---|
| 731 | in the begining and end part of the output signal. |
---|
| 732 | input: |
---|
| 733 | x: the input signal |
---|
| 734 | window_len: the dimension of the smoothing window; should be an odd integer |
---|
| 735 | window: the type of window from 'flat', 'hanning', 'hamming', 'bartlett', 'blackman' |
---|
| 736 | flat window will produce a moving average smoothing. |
---|
| 737 | output: |
---|
| 738 | the smoothed signal |
---|
| 739 | example: |
---|
| 740 | t=linspace(-2,2,0.1) |
---|
| 741 | x=sin(t)+randn(len(t))*0.1 |
---|
| 742 | y=smooth(x) |
---|
| 743 | see also: |
---|
| 744 | numpy.hanning, numpy.hamming, numpy.bartlett, numpy.blackman, numpy.convolve |
---|
| 745 | scipy.signal.lfilter |
---|
| 746 | TODO: the window parameter could be the window itself if an array instead of a string |
---|
| 747 | """ |
---|
| 748 | x = numpy.array(x) |
---|
| 749 | if x.ndim != 1: |
---|
| 750 | raise ValueError, "smooth only accepts 1 dimension arrays." |
---|
| 751 | if x.size < window_len: |
---|
| 752 | raise ValueError, "Input vector needs to be bigger than window size." |
---|
| 753 | if window_len<3: |
---|
| 754 | return x |
---|
| 755 | if not window in ['flat', 'hanning', 'hamming', 'bartlett', 'blackman']: |
---|
| 756 | raise ValueError, "Window is on of 'flat', 'hanning', 'hamming', 'bartlett', 'blackman'" |
---|
| 757 | s=numpy.r_[x[window_len-1:0:-1],x,x[-1:-window_len:-1]] |
---|
| 758 | #print(len(s)) |
---|
| 759 | if window == 'flat': #moving average |
---|
| 760 | w=numpy.ones(window_len,'d') |
---|
| 761 | else: |
---|
| 762 | w=eval('numpy.'+window+'(window_len)') |
---|
| 763 | y=numpy.convolve(w/w.sum(),s,mode='valid') |
---|
| 764 | return y |
---|
| 765 | |
---|
[345] | 766 | ## Author: AS |
---|
[180] | 767 | def smooth (field, coeff): |
---|
| 768 | ## actually blur_image could work with different coeff on x and y |
---|
| 769 | if coeff > 1: result = blur_image(field,int(coeff)) |
---|
| 770 | else: result = field |
---|
| 771 | return result |
---|
| 772 | |
---|
[345] | 773 | ## FROM COOKBOOK http://www.scipy.org/Cookbook/SignalSmooth |
---|
[180] | 774 | def gauss_kern(size, sizey=None): |
---|
| 775 | import numpy as np |
---|
| 776 | # Returns a normalized 2D gauss kernel array for convolutions |
---|
| 777 | size = int(size) |
---|
| 778 | if not sizey: |
---|
| 779 | sizey = size |
---|
| 780 | else: |
---|
| 781 | sizey = int(sizey) |
---|
| 782 | x, y = np.mgrid[-size:size+1, -sizey:sizey+1] |
---|
| 783 | g = np.exp(-(x**2/float(size)+y**2/float(sizey))) |
---|
| 784 | return g / g.sum() |
---|
| 785 | |
---|
[345] | 786 | ## FROM COOKBOOK http://www.scipy.org/Cookbook/SignalSmooth |
---|
[180] | 787 | def blur_image(im, n, ny=None) : |
---|
| 788 | from scipy.signal import convolve |
---|
| 789 | # blurs the image by convolving with a gaussian kernel of typical size n. |
---|
| 790 | # The optional keyword argument ny allows for a different size in the y direction. |
---|
| 791 | g = gauss_kern(n, sizey=ny) |
---|
| 792 | improc = convolve(im, g, mode='same') |
---|
| 793 | return improc |
---|
| 794 | |
---|
[345] | 795 | ## Author: AS |
---|
[233] | 796 | def getwinddef (nc): |
---|
| 797 | ### |
---|
[548] | 798 | varinfile = nc.variables.keys() |
---|
| 799 | if 'Um' in varinfile: [uchar,vchar] = ['Um','Vm'] #; print "this is API meso file" |
---|
| 800 | elif 'U' in varinfile: [uchar,vchar] = ['U','V'] #; print "this is RAW meso file" |
---|
| 801 | elif 'u' in varinfile: [uchar,vchar] = ['u','v'] #; print "this is GCM file" |
---|
[721] | 802 | elif 'vitu' in varinfile: [uchar,vchar] = ['vitu','vitv'] #; print "this is GCM v5 file" |
---|
[548] | 803 | ### you can add choices here ! |
---|
| 804 | else: [uchar,vchar] = ['not found','not found'] |
---|
[233] | 805 | ### |
---|
[548] | 806 | if uchar in ['U']: metwind = False ## geometrical (wrt grid) |
---|
| 807 | else: metwind = True ## meteorological (zon/mer) |
---|
| 808 | if metwind is False: print "Not using meteorological winds. You trust numerical grid as being (x,y)" |
---|
[233] | 809 | ### |
---|
| 810 | return uchar,vchar,metwind |
---|
[202] | 811 | |
---|
[345] | 812 | ## Author: AS |
---|
[184] | 813 | def vectorfield (u, v, x, y, stride=3, scale=15., factor=250., color='black', csmooth=1, key=True): |
---|
| 814 | ## scale regle la reference du vecteur |
---|
| 815 | ## factor regle toutes les longueurs (dont la reference). l'AUGMENTER pour raccourcir les vecteurs. |
---|
| 816 | import matplotlib.pyplot as plt |
---|
| 817 | import numpy as np |
---|
[638] | 818 | #posx = np.min(x) - np.std(x) / 10. |
---|
| 819 | #posy = np.min(y) - np.std(y) / 10. |
---|
| 820 | posx = np.min(x) |
---|
| 821 | posy = np.min(y) - 4.*np.std(y) / 10. |
---|
[184] | 822 | u = smooth(u,csmooth) |
---|
| 823 | v = smooth(v,csmooth) |
---|
[188] | 824 | widthvec = 0.003 #0.005 #0.003 |
---|
[184] | 825 | q = plt.quiver( x[::stride,::stride],\ |
---|
| 826 | y[::stride,::stride],\ |
---|
| 827 | u[::stride,::stride],\ |
---|
| 828 | v[::stride,::stride],\ |
---|
[228] | 829 | angles='xy',color=color,pivot='middle',\ |
---|
[184] | 830 | scale=factor,width=widthvec ) |
---|
[202] | 831 | if color in ['white','yellow']: kcolor='black' |
---|
| 832 | else: kcolor=color |
---|
[184] | 833 | if key: p = plt.quiverkey(q,posx,posy,scale,\ |
---|
[194] | 834 | str(int(scale)),coordinates='data',color=kcolor,labelpos='S',labelsep = 0.03) |
---|
[184] | 835 | return |
---|
[180] | 836 | |
---|
[345] | 837 | ## Author: AS |
---|
[180] | 838 | def display (name): |
---|
[184] | 839 | from os import system |
---|
| 840 | system("display "+name+" > /dev/null 2> /dev/null &") |
---|
| 841 | return name |
---|
[180] | 842 | |
---|
[345] | 843 | ## Author: AS |
---|
[180] | 844 | def findstep (wlon): |
---|
[184] | 845 | steplon = int((wlon[1]-wlon[0])/4.) #3 |
---|
| 846 | step = 120. |
---|
| 847 | while step > steplon and step > 15. : step = step / 2. |
---|
| 848 | if step <= 15.: |
---|
| 849 | while step > steplon and step > 5. : step = step - 5. |
---|
| 850 | if step <= 5.: |
---|
| 851 | while step > steplon and step > 1. : step = step - 1. |
---|
| 852 | if step <= 1.: |
---|
| 853 | step = 1. |
---|
[180] | 854 | return step |
---|
| 855 | |
---|
[345] | 856 | ## Author: AS |
---|
[451] | 857 | def define_proj (char,wlon,wlat,back=None,blat=None,blon=None): |
---|
[180] | 858 | from mpl_toolkits.basemap import Basemap |
---|
| 859 | import numpy as np |
---|
| 860 | import matplotlib as mpl |
---|
[240] | 861 | from mymath import max |
---|
[180] | 862 | meanlon = 0.5*(wlon[0]+wlon[1]) |
---|
| 863 | meanlat = 0.5*(wlat[0]+wlat[1]) |
---|
[637] | 864 | zewidth = np.abs(wlon[0]-wlon[1])*60000.*np.cos(3.14*meanlat/180.) |
---|
| 865 | zeheight = np.abs(wlat[0]-wlat[1])*60000. |
---|
[385] | 866 | if blat is None: |
---|
[398] | 867 | ortholat=meanlat |
---|
[453] | 868 | if wlat[0] >= 80.: blat = -40. |
---|
[345] | 869 | elif wlat[1] <= -80.: blat = -40. |
---|
| 870 | elif wlat[1] >= 0.: blat = wlat[0] |
---|
| 871 | elif wlat[0] <= 0.: blat = wlat[1] |
---|
[398] | 872 | else: ortholat=blat |
---|
[451] | 873 | if blon is None: ortholon=meanlon |
---|
| 874 | else: ortholon=blon |
---|
[207] | 875 | h = 50. ## en km |
---|
[202] | 876 | radius = 3397200. |
---|
[637] | 877 | #print meanlat, meanlon |
---|
[184] | 878 | if char == "cyl": m = Basemap(rsphere=radius,projection='cyl',\ |
---|
[180] | 879 | llcrnrlat=wlat[0],urcrnrlat=wlat[1],llcrnrlon=wlon[0],urcrnrlon=wlon[1]) |
---|
[184] | 880 | elif char == "moll": m = Basemap(rsphere=radius,projection='moll',lon_0=meanlon) |
---|
[451] | 881 | elif char == "ortho": m = Basemap(rsphere=radius,projection='ortho',lon_0=ortholon,lat_0=ortholat) |
---|
[184] | 882 | elif char == "lcc": m = Basemap(rsphere=radius,projection='lcc',lat_1=meanlat,lat_0=meanlat,lon_0=meanlon,\ |
---|
[637] | 883 | width=zewidth,height=zeheight) |
---|
| 884 | #llcrnrlat=wlat[0],urcrnrlat=wlat[1],llcrnrlon=wlon[0],urcrnrlon=wlon[1]) |
---|
[184] | 885 | elif char == "npstere": m = Basemap(rsphere=radius,projection='npstere', boundinglat=blat, lon_0=0.) |
---|
[395] | 886 | elif char == "spstere": m = Basemap(rsphere=radius,projection='spstere', boundinglat=blat, lon_0=180.) |
---|
[207] | 887 | elif char == "nplaea": m = Basemap(rsphere=radius,projection='nplaea', boundinglat=wlat[0], lon_0=meanlon) |
---|
| 888 | elif char == "laea": m = Basemap(rsphere=radius,projection='laea',lon_0=meanlon,lat_0=meanlat,lat_ts=meanlat,\ |
---|
[637] | 889 | width=zewidth,height=zeheight) |
---|
| 890 | #llcrnrlat=wlat[0],urcrnrlat=wlat[1],llcrnrlon=wlon[0],urcrnrlon=wlon[1]) |
---|
[184] | 891 | elif char == "nsper": m = Basemap(rsphere=radius,projection='nsper',lon_0=meanlon,lat_0=meanlat,satellite_height=h*1000.) |
---|
| 892 | elif char == "merc": m = Basemap(rsphere=radius,projection='merc',lat_ts=0.,\ |
---|
| 893 | llcrnrlat=wlat[0],urcrnrlat=wlat[1],llcrnrlon=wlon[0],urcrnrlon=wlon[1]) |
---|
| 894 | fontsizemer = int(mpl.rcParams['font.size']*3./4.) |
---|
[207] | 895 | if char in ["cyl","lcc","merc","nsper","laea"]: step = findstep(wlon) |
---|
| 896 | else: step = 10. |
---|
[238] | 897 | steplon = step*2. |
---|
[453] | 898 | zecolor ='grey' |
---|
| 899 | zelinewidth = 1 |
---|
| 900 | zelatmax = 80 |
---|
[637] | 901 | if meanlat > 75.: zelatmax = 90. ; step = step/2. |
---|
[453] | 902 | # to show gcm grid: |
---|
| 903 | #zecolor = 'r' |
---|
| 904 | #zelinewidth = 1 |
---|
[647] | 905 | #step = 180./48. |
---|
| 906 | #steplon = 360./64. |
---|
| 907 | #zelatmax = 90. - step/3 |
---|
[516] | 908 | if char not in ["moll"]: |
---|
[760] | 909 | if wlon[1]-wlon[0] < 2.: ## LOCAL MODE |
---|
| 910 | m.drawmeridians(np.r_[-1.:1.:0.05], labels=[0,0,0,1], color=zecolor, linewidth=zelinewidth, fontsize=fontsizemer, fmt='%5.2f') |
---|
| 911 | m.drawparallels(np.r_[-1.:1.:0.05], labels=[1,0,0,0], color=zecolor, linewidth=zelinewidth, fontsize=fontsizemer, fmt='%5.2f') |
---|
| 912 | else: ## GLOBAL OR REGIONAL MODE |
---|
| 913 | m.drawmeridians(np.r_[-180.:180.:steplon], labels=[0,0,0,1], color=zecolor, linewidth=zelinewidth, fontsize=fontsizemer, latmax=zelatmax) |
---|
| 914 | m.drawparallels(np.r_[-90.:90.:step], labels=[1,0,0,0], color=zecolor, linewidth=zelinewidth, fontsize=fontsizemer, latmax=zelatmax) |
---|
[233] | 915 | if back: m.warpimage(marsmap(back),scale=0.75) |
---|
| 916 | #if not back: |
---|
| 917 | # if not var: back = "mola" ## if no var: draw mola |
---|
| 918 | # elif typefile in ['mesoapi','meso','geo'] \ |
---|
| 919 | # and proj not in ['merc','lcc','nsper','laea']: back = "molabw" ## if var but meso: draw molabw |
---|
| 920 | # else: pass ## else: draw None |
---|
[180] | 921 | return m |
---|
| 922 | |
---|
[345] | 923 | ## Author: AS |
---|
[232] | 924 | #### test temporaire |
---|
| 925 | def putpoints (map,plot): |
---|
| 926 | #### from http://www.scipy.org/Cookbook/Matplotlib/Maps |
---|
| 927 | # lat/lon coordinates of five cities. |
---|
| 928 | lats = [18.4] |
---|
| 929 | lons = [-134.0] |
---|
| 930 | points=['Olympus Mons'] |
---|
| 931 | # compute the native map projection coordinates for cities. |
---|
| 932 | x,y = map(lons,lats) |
---|
| 933 | # plot filled circles at the locations of the cities. |
---|
| 934 | map.plot(x,y,'bo') |
---|
| 935 | # plot the names of those five cities. |
---|
| 936 | wherept = 0 #1000 #50000 |
---|
| 937 | for name,xpt,ypt in zip(points,x,y): |
---|
| 938 | plot.text(xpt+wherept,ypt+wherept,name) |
---|
| 939 | ## le nom ne s'affiche pas... |
---|
| 940 | return |
---|
| 941 | |
---|
[345] | 942 | ## Author: AS |
---|
[233] | 943 | def calculate_bounds(field,vmin=None,vmax=None): |
---|
| 944 | import numpy as np |
---|
| 945 | from mymath import max,min,mean |
---|
| 946 | ind = np.where(field < 9e+35) |
---|
| 947 | fieldcalc = field[ ind ] # la syntaxe compacte ne marche si field est un tuple |
---|
| 948 | ### |
---|
| 949 | dev = np.std(fieldcalc)*3.0 |
---|
| 950 | ### |
---|
[562] | 951 | if vmin is None: zevmin = mean(fieldcalc) - dev |
---|
[233] | 952 | else: zevmin = vmin |
---|
| 953 | ### |
---|
| 954 | if vmax is None: zevmax = mean(fieldcalc) + dev |
---|
| 955 | else: zevmax = vmax |
---|
| 956 | if vmin == vmax: |
---|
| 957 | zevmin = mean(fieldcalc) - dev ### for continuity |
---|
| 958 | zevmax = mean(fieldcalc) + dev ### for continuity |
---|
| 959 | ### |
---|
| 960 | if zevmin < 0. and min(fieldcalc) > 0.: zevmin = 0. |
---|
[468] | 961 | print "BOUNDS field ", min(fieldcalc), max(fieldcalc), " //// adopted", zevmin, zevmax |
---|
[233] | 962 | return zevmin, zevmax |
---|
[232] | 963 | |
---|
[345] | 964 | ## Author: AS |
---|
[233] | 965 | def bounds(what_I_plot,zevmin,zevmax): |
---|
[247] | 966 | from mymath import max,min,mean |
---|
[233] | 967 | ### might be convenient to add the missing value in arguments |
---|
[310] | 968 | #what_I_plot[ what_I_plot < zevmin ] = zevmin#*(1. + 1.e-7) |
---|
| 969 | if zevmin < 0: what_I_plot[ what_I_plot < zevmin*(1. - 1.e-7) ] = zevmin*(1. - 1.e-7) |
---|
| 970 | else: what_I_plot[ what_I_plot < zevmin*(1. + 1.e-7) ] = zevmin*(1. + 1.e-7) |
---|
[451] | 971 | #print "NEW MIN ", min(what_I_plot) |
---|
[233] | 972 | what_I_plot[ what_I_plot > 9e+35 ] = -9e+35 |
---|
[587] | 973 | what_I_plot[ what_I_plot > zevmax ] = zevmax*(1. - 1.e-7) |
---|
[451] | 974 | #print "NEW MAX ", max(what_I_plot) |
---|
[233] | 975 | return what_I_plot |
---|
| 976 | |
---|
[345] | 977 | ## Author: AS |
---|
[241] | 978 | def nolow(what_I_plot): |
---|
| 979 | from mymath import max,min |
---|
| 980 | lim = 0.15*0.5*(abs(max(what_I_plot))+abs(min(what_I_plot))) |
---|
[392] | 981 | print "NO PLOT BELOW VALUE ", lim |
---|
[241] | 982 | what_I_plot [ abs(what_I_plot) < lim ] = 1.e40 |
---|
| 983 | return what_I_plot |
---|
| 984 | |
---|
[418] | 985 | |
---|
| 986 | ## Author : AC |
---|
| 987 | def hole_bounds(what_I_plot,zevmin,zevmax): |
---|
| 988 | import numpy as np |
---|
| 989 | zi=0 |
---|
| 990 | for i in what_I_plot: |
---|
| 991 | zj=0 |
---|
| 992 | for j in i: |
---|
| 993 | if ((j < zevmin) or (j > zevmax)):what_I_plot[zi,zj]=np.NaN |
---|
| 994 | zj=zj+1 |
---|
| 995 | zi=zi+1 |
---|
| 996 | |
---|
| 997 | return what_I_plot |
---|
| 998 | |
---|
[345] | 999 | ## Author: AS |
---|
[233] | 1000 | def zoomset (wlon,wlat,zoom): |
---|
| 1001 | dlon = abs(wlon[1]-wlon[0])/2. |
---|
| 1002 | dlat = abs(wlat[1]-wlat[0])/2. |
---|
| 1003 | [wlon,wlat] = [ [wlon[0]+zoom*dlon/100.,wlon[1]-zoom*dlon/100.],\ |
---|
| 1004 | [wlat[0]+zoom*dlat/100.,wlat[1]-zoom*dlat/100.] ] |
---|
[392] | 1005 | print "ZOOM %",zoom,wlon,wlat |
---|
[233] | 1006 | return wlon,wlat |
---|
| 1007 | |
---|
[345] | 1008 | ## Author: AS |
---|
[201] | 1009 | def fmtvar (whichvar="def"): |
---|
[204] | 1010 | fmtvar = { \ |
---|
[502] | 1011 | "MIXED": "%.0f",\ |
---|
| 1012 | "UPDRAFT": "%.0f",\ |
---|
| 1013 | "DOWNDRAFT": "%.0f",\ |
---|
[405] | 1014 | "TK": "%.0f",\ |
---|
[637] | 1015 | "T": "%.0f",\ |
---|
[516] | 1016 | #"ZMAX_TH": "%.0f",\ |
---|
| 1017 | #"WSTAR": "%.0f",\ |
---|
[425] | 1018 | # Variables from TES ncdf format |
---|
[363] | 1019 | "T_NADIR_DAY": "%.0f",\ |
---|
[376] | 1020 | "T_NADIR_NIT": "%.0f",\ |
---|
[425] | 1021 | # Variables from tes.py ncdf format |
---|
[398] | 1022 | "TEMP_DAY": "%.0f",\ |
---|
| 1023 | "TEMP_NIGHT": "%.0f",\ |
---|
[425] | 1024 | # Variables from MCS and mcs.py ncdf format |
---|
[427] | 1025 | "DTEMP": "%.0f",\ |
---|
| 1026 | "NTEMP": "%.0f",\ |
---|
| 1027 | "DNUMBINTEMP": "%.0f",\ |
---|
| 1028 | "NNUMBINTEMP": "%.0f",\ |
---|
[425] | 1029 | # other stuff |
---|
[405] | 1030 | "TPOT": "%.0f",\ |
---|
[295] | 1031 | "TSURF": "%.0f",\ |
---|
[612] | 1032 | "U_OUT1": "%.0f",\ |
---|
| 1033 | "T_OUT1": "%.0f",\ |
---|
[204] | 1034 | "def": "%.1e",\ |
---|
| 1035 | "PTOT": "%.0f",\ |
---|
[760] | 1036 | "PSFC": "%.1f",\ |
---|
[204] | 1037 | "HGT": "%.1e",\ |
---|
| 1038 | "USTM": "%.2f",\ |
---|
[225] | 1039 | "HFX": "%.0f",\ |
---|
[232] | 1040 | "ICETOT": "%.1e",\ |
---|
[237] | 1041 | "TAU_ICE": "%.2f",\ |
---|
[451] | 1042 | "TAUICE": "%.2f",\ |
---|
[252] | 1043 | "VMR_ICE": "%.1e",\ |
---|
[345] | 1044 | "MTOT": "%.1f",\ |
---|
[405] | 1045 | "ANOMALY": "%.1f",\ |
---|
[241] | 1046 | "W": "%.1f",\ |
---|
[287] | 1047 | "WMAX_TH": "%.1f",\ |
---|
[562] | 1048 | "WSTAR": "%.1f",\ |
---|
[287] | 1049 | "QSURFICE": "%.0f",\ |
---|
[405] | 1050 | "UM": "%.0f",\ |
---|
[490] | 1051 | "WIND": "%.0f",\ |
---|
[612] | 1052 | "UVMET": "%.0f",\ |
---|
| 1053 | "UV": "%.0f",\ |
---|
[295] | 1054 | "ALBBARE": "%.2f",\ |
---|
[317] | 1055 | "TAU": "%.1f",\ |
---|
[382] | 1056 | "CO2": "%.2f",\ |
---|
[701] | 1057 | "ENFACT": "%.1f",\ |
---|
[345] | 1058 | #### T.N. |
---|
| 1059 | "TEMP": "%.0f",\ |
---|
| 1060 | "VMR_H2OICE": "%.0f",\ |
---|
| 1061 | "VMR_H2OVAP": "%.0f",\ |
---|
| 1062 | "TAUTES": "%.2f",\ |
---|
| 1063 | "TAUTESAP": "%.2f",\ |
---|
| 1064 | |
---|
[204] | 1065 | } |
---|
[518] | 1066 | if "TSURF" in whichvar: whichvar = "TSURF" |
---|
[204] | 1067 | if whichvar not in fmtvar: |
---|
| 1068 | whichvar = "def" |
---|
| 1069 | return fmtvar[whichvar] |
---|
[201] | 1070 | |
---|
[345] | 1071 | ## Author: AS |
---|
[233] | 1072 | #################################################################################################################### |
---|
| 1073 | ### Colorbars http://www.scipy.org/Cookbook/Matplotlib/Show_colormaps?action=AttachFile&do=get&target=colormaps3.png |
---|
[202] | 1074 | def defcolorb (whichone="def"): |
---|
[204] | 1075 | whichcolorb = { \ |
---|
| 1076 | "def": "spectral",\ |
---|
| 1077 | "HGT": "spectral",\ |
---|
[426] | 1078 | "HGT_M": "spectral",\ |
---|
[405] | 1079 | "TK": "gist_heat",\ |
---|
[425] | 1080 | "TPOT": "Paired",\ |
---|
[295] | 1081 | "TSURF": "RdBu_r",\ |
---|
[204] | 1082 | "QH2O": "PuBu",\ |
---|
| 1083 | "USTM": "YlOrRd",\ |
---|
[490] | 1084 | "WIND": "YlOrRd",\ |
---|
[363] | 1085 | #"T_nadir_nit": "RdBu_r",\ |
---|
| 1086 | #"T_nadir_day": "RdBu_r",\ |
---|
[225] | 1087 | "HFX": "RdYlBu",\ |
---|
[310] | 1088 | "ICETOT": "YlGnBu_r",\ |
---|
[345] | 1089 | #"MTOT": "PuBu",\ |
---|
| 1090 | "CCNQ": "YlOrBr",\ |
---|
| 1091 | "CCNN": "YlOrBr",\ |
---|
| 1092 | "TEMP": "Jet",\ |
---|
[238] | 1093 | "TAU_ICE": "Blues",\ |
---|
[451] | 1094 | "TAUICE": "Blues",\ |
---|
[252] | 1095 | "VMR_ICE": "Blues",\ |
---|
[241] | 1096 | "W": "jet",\ |
---|
[287] | 1097 | "WMAX_TH": "spectral",\ |
---|
[405] | 1098 | "ANOMALY": "RdBu_r",\ |
---|
[287] | 1099 | "QSURFICE": "hot_r",\ |
---|
[295] | 1100 | "ALBBARE": "spectral",\ |
---|
[317] | 1101 | "TAU": "YlOrBr_r",\ |
---|
[382] | 1102 | "CO2": "YlOrBr_r",\ |
---|
[753] | 1103 | "MIXED": "GnBu",\ |
---|
[345] | 1104 | #### T.N. |
---|
[647] | 1105 | "MTOT": "spectral",\ |
---|
[345] | 1106 | "H2O_ICE_S": "RdBu",\ |
---|
| 1107 | "VMR_H2OICE": "PuBu",\ |
---|
| 1108 | "VMR_H2OVAP": "PuBu",\ |
---|
[453] | 1109 | "WATERCAPTAG": "Blues",\ |
---|
[204] | 1110 | } |
---|
[241] | 1111 | #W --> spectral ou jet |
---|
[240] | 1112 | #spectral BrBG RdBu_r |
---|
[392] | 1113 | #print "predefined colorbars" |
---|
[518] | 1114 | if "TSURF" in whichone: whichone = "TSURF" |
---|
[204] | 1115 | if whichone not in whichcolorb: |
---|
| 1116 | whichone = "def" |
---|
| 1117 | return whichcolorb[whichone] |
---|
[202] | 1118 | |
---|
[345] | 1119 | ## Author: AS |
---|
[202] | 1120 | def definecolorvec (whichone="def"): |
---|
| 1121 | whichcolor = { \ |
---|
| 1122 | "def": "black",\ |
---|
| 1123 | "vis": "yellow",\ |
---|
| 1124 | "vishires": "yellow",\ |
---|
| 1125 | "molabw": "yellow",\ |
---|
| 1126 | "mola": "black",\ |
---|
| 1127 | "gist_heat": "white",\ |
---|
| 1128 | "hot": "tk",\ |
---|
| 1129 | "gist_rainbow": "black",\ |
---|
| 1130 | "spectral": "black",\ |
---|
| 1131 | "gray": "red",\ |
---|
| 1132 | "PuBu": "black",\ |
---|
[721] | 1133 | "titan": "red",\ |
---|
[202] | 1134 | } |
---|
| 1135 | if whichone not in whichcolor: |
---|
| 1136 | whichone = "def" |
---|
| 1137 | return whichcolor[whichone] |
---|
| 1138 | |
---|
[345] | 1139 | ## Author: AS |
---|
[180] | 1140 | def marsmap (whichone="vishires"): |
---|
[233] | 1141 | from os import uname |
---|
| 1142 | mymachine = uname()[1] |
---|
| 1143 | ### not sure about speed-up with this method... looks the same |
---|
[511] | 1144 | if "lmd.jussieu.fr" in mymachine: domain = "/u/aslmd/WWW/maps/" |
---|
| 1145 | elif "aymeric-laptop" in mymachine: domain = "/home/aymeric/Dropbox/Public/" |
---|
| 1146 | else: domain = "http://www.lmd.jussieu.fr/~aslmd/maps/" |
---|
[180] | 1147 | whichlink = { \ |
---|
[233] | 1148 | #"vis": "http://maps.jpl.nasa.gov/pix/mar0kuu2.jpg",\ |
---|
| 1149 | #"vishires": "http://www.lmd.jussieu.fr/~aslmd/maps/MarsMap_2500x1250.jpg",\ |
---|
| 1150 | #"geolocal": "http://dl.dropbox.com/u/11078310/geolocal.jpg",\ |
---|
| 1151 | #"mola": "http://www.lns.cornell.edu/~seb/celestia/mars-mola-2k.jpg",\ |
---|
| 1152 | #"molabw": "http://dl.dropbox.com/u/11078310/MarsElevation_2500x1250.jpg",\ |
---|
[453] | 1153 | "thermalday": domain+"thermalday.jpg",\ |
---|
| 1154 | "thermalnight": domain+"thermalnight.jpg",\ |
---|
| 1155 | "tesalbedo": domain+"tesalbedo.jpg",\ |
---|
[233] | 1156 | "vis": domain+"mar0kuu2.jpg",\ |
---|
| 1157 | "vishires": domain+"MarsMap_2500x1250.jpg",\ |
---|
| 1158 | "geolocal": domain+"geolocal.jpg",\ |
---|
| 1159 | "mola": domain+"mars-mola-2k.jpg",\ |
---|
| 1160 | "molabw": domain+"MarsElevation_2500x1250.jpg",\ |
---|
[238] | 1161 | "clouds": "http://www.johnstonsarchive.net/spaceart/marswcloudmap.jpg",\ |
---|
| 1162 | "jupiter": "http://www.mmedia.is/~bjj/data/jupiter_css/jupiter_css.jpg",\ |
---|
| 1163 | "jupiter_voy": "http://www.mmedia.is/~bjj/data/jupiter/jupiter_vgr2.jpg",\ |
---|
[558] | 1164 | #"bw": domain+"EarthElevation_2500x1250.jpg",\ |
---|
[273] | 1165 | "bw": "http://users.info.unicaen.fr/~karczma/TEACH/InfoGeo/Images/Planets/EarthElevation_2500x1250.jpg",\ |
---|
| 1166 | "contrast": "http://users.info.unicaen.fr/~karczma/TEACH/InfoGeo/Images/Planets/EarthMapAtmos_2500x1250.jpg",\ |
---|
| 1167 | "nice": "http://users.info.unicaen.fr/~karczma/TEACH/InfoGeo/Images/Planets/earthmap1k.jpg",\ |
---|
| 1168 | "blue": "http://eoimages.gsfc.nasa.gov/ve/2430/land_ocean_ice_2048.jpg",\ |
---|
[296] | 1169 | "blueclouds": "http://eoimages.gsfc.nasa.gov/ve/2431/land_ocean_ice_cloud_2048.jpg",\ |
---|
| 1170 | "justclouds": "http://eoimages.gsfc.nasa.gov/ve/2432/cloud_combined_2048.jpg",\ |
---|
[721] | 1171 | "pluto": "http://www.boulder.swri.edu/~buie/pluto/pluto_all.png",\ |
---|
| 1172 | "triton": "http://laps.noaa.gov/albers/sos/neptune/triton/triton_rgb_cyl_www.jpg",\ |
---|
| 1173 | "titan": "http://laps.noaa.gov/albers/sos/saturn/titan/titan_rgb_cyl_www.jpg",\ |
---|
| 1174 | #"titan": "http://laps.noaa.gov/albers/sos/celestia/titan_50.jpg",\ |
---|
| 1175 | "titanuni": "http://maps.jpl.nasa.gov/pix/sat6fss1.jpg",\ |
---|
| 1176 | "venus": "http://laps.noaa.gov/albers/sos/venus/venus4/venus4_rgb_cyl_www.jpg",\ |
---|
| 1177 | "cosmic": "http://laps.noaa.gov/albers/sos/universe/wmap/wmap_rgb_cyl_www.jpg",\ |
---|
[180] | 1178 | } |
---|
[238] | 1179 | ### see http://www.mmedia.is/~bjj/planetary_maps.html |
---|
[180] | 1180 | if whichone not in whichlink: |
---|
| 1181 | print "marsmap: choice not defined... you'll get the default one... " |
---|
| 1182 | whichone = "vishires" |
---|
| 1183 | return whichlink[whichone] |
---|
| 1184 | |
---|
[273] | 1185 | #def earthmap (whichone): |
---|
| 1186 | # if whichone == "contrast": whichlink="http://users.info.unicaen.fr/~karczma/TEACH/InfoGeo/Images/Planets/EarthMapAtmos_2500x1250.jpg" |
---|
| 1187 | # elif whichone == "bw": whichlink="http://users.info.unicaen.fr/~karczma/TEACH/InfoGeo/Images/Planets/EarthElevation_2500x1250.jpg" |
---|
| 1188 | # elif whichone == "nice": whichlink="http://users.info.unicaen.fr/~karczma/TEACH/InfoGeo/Images/Planets/earthmap1k.jpg" |
---|
| 1189 | # return whichlink |
---|
[180] | 1190 | |
---|
[345] | 1191 | ## Author: AS |
---|
[241] | 1192 | def latinterv (area="Whole"): |
---|
| 1193 | list = { \ |
---|
| 1194 | "Europe": [[ 20., 80.],[- 50., 50.]],\ |
---|
| 1195 | "Central_America": [[-10., 40.],[ 230., 300.]],\ |
---|
| 1196 | "Africa": [[-20., 50.],[- 50., 50.]],\ |
---|
[273] | 1197 | "Whole": [[-90., 90.],[-180., 180.]],\ |
---|
| 1198 | "Southern_Hemisphere": [[-90., 60.],[-180., 180.]],\ |
---|
| 1199 | "Northern_Hemisphere": [[-60., 90.],[-180., 180.]],\ |
---|
[241] | 1200 | "Tharsis": [[-30., 60.],[-170.,- 10.]],\ |
---|
| 1201 | "Whole_No_High": [[-60., 60.],[-180., 180.]],\ |
---|
| 1202 | "Chryse": [[-60., 60.],[- 60., 60.]],\ |
---|
| 1203 | "North_Pole": [[ 50., 90.],[-180., 180.]],\ |
---|
| 1204 | "Close_North_Pole": [[ 75., 90.],[-180., 180.]],\ |
---|
| 1205 | "Far_South_Pole": [[-90.,-40.],[-180., 180.]],\ |
---|
| 1206 | "South_Pole": [[-90.,-50.],[-180., 180.]],\ |
---|
| 1207 | "Close_South_Pole": [[-90.,-75.],[-180., 180.]],\ |
---|
[637] | 1208 | "Sirenum_Crater_large": [[-46.,-34.],[-166.,-151.]],\ |
---|
| 1209 | "Sirenum_Crater_small": [[-36.,-26.],[-168.,-156.]],\ |
---|
| 1210 | "Rupes": [[ 72., 90.],[-120.,- 20.]],\ |
---|
[721] | 1211 | "Xanadu": [[-40., 20.],[ 40., 120.]],\ |
---|
[241] | 1212 | } |
---|
| 1213 | if area not in list: area = "Whole" |
---|
| 1214 | [olat,olon] = list[area] |
---|
| 1215 | return olon,olat |
---|
| 1216 | |
---|
[345] | 1217 | ## Author: TN |
---|
| 1218 | def separatenames (name): |
---|
| 1219 | from numpy import concatenate |
---|
| 1220 | # look for comas in the input name to separate different names (files, variables,etc ..) |
---|
| 1221 | if name is None: |
---|
| 1222 | names = None |
---|
| 1223 | else: |
---|
| 1224 | names = [] |
---|
| 1225 | stop = 0 |
---|
| 1226 | currentname = name |
---|
| 1227 | while stop == 0: |
---|
| 1228 | indexvir = currentname.find(',') |
---|
| 1229 | if indexvir == -1: |
---|
| 1230 | stop = 1 |
---|
| 1231 | name1 = currentname |
---|
| 1232 | else: |
---|
| 1233 | name1 = currentname[0:indexvir] |
---|
| 1234 | names = concatenate((names,[name1])) |
---|
| 1235 | currentname = currentname[indexvir+1:len(currentname)] |
---|
| 1236 | return names |
---|
| 1237 | |
---|
| 1238 | |
---|
| 1239 | ## Author: TN |
---|
| 1240 | def readslices(saxis): |
---|
| 1241 | from numpy import empty |
---|
| 1242 | if saxis == None: |
---|
| 1243 | zesaxis = None |
---|
| 1244 | else: |
---|
| 1245 | zesaxis = empty((len(saxis),2)) |
---|
| 1246 | for i in range(len(saxis)): |
---|
| 1247 | a = separatenames(saxis[i]) |
---|
| 1248 | if len(a) == 1: |
---|
| 1249 | zesaxis[i,:] = float(a[0]) |
---|
| 1250 | else: |
---|
| 1251 | zesaxis[i,0] = float(a[0]) |
---|
| 1252 | zesaxis[i,1] = float(a[1]) |
---|
| 1253 | |
---|
| 1254 | return zesaxis |
---|
| 1255 | |
---|
[568] | 1256 | ## Author: TN |
---|
| 1257 | def readdata(data,datatype,coord1,coord2): |
---|
| 1258 | ## Read sparse data |
---|
| 1259 | from numpy import empty |
---|
[572] | 1260 | if datatype == 'txt': |
---|
[568] | 1261 | if len(data[coord1].shape) == 1: |
---|
| 1262 | return data[coord1][:] |
---|
| 1263 | elif len(data[coord1].shape) == 2: |
---|
| 1264 | return data[coord1][:,int(coord2)-1] |
---|
| 1265 | else: |
---|
| 1266 | errormess('error in readdata') |
---|
[572] | 1267 | elif datatype == 'sav': |
---|
[568] | 1268 | return data[coord1][coord2] |
---|
| 1269 | else: |
---|
| 1270 | errormess(datatype+' type is not supported!') |
---|
| 1271 | |
---|
| 1272 | |
---|
[399] | 1273 | ## Author: AS |
---|
[475] | 1274 | def bidimfind(lon2d,lat2d,vlon,vlat,file=None): |
---|
[399] | 1275 | import numpy as np |
---|
[475] | 1276 | import matplotlib.pyplot as mpl |
---|
[399] | 1277 | if vlat is None: array = (lon2d - vlon)**2 |
---|
| 1278 | elif vlon is None: array = (lat2d - vlat)**2 |
---|
| 1279 | else: array = (lon2d - vlon)**2 + (lat2d - vlat)**2 |
---|
| 1280 | idy,idx = np.unravel_index( np.argmin(array), lon2d.shape ) |
---|
| 1281 | if vlon is not None: |
---|
[475] | 1282 | if (np.abs(lon2d[idy,idx]-vlon)) > 5: errormess("longitude not found ",printvar=lon2d) |
---|
[399] | 1283 | if vlat is not None: |
---|
[475] | 1284 | if (np.abs(lat2d[idy,idx]-vlat)) > 5: errormess("latitude not found ",printvar=lat2d) |
---|
| 1285 | if file is not None: |
---|
| 1286 | print idx,idy,lon2d[idy,idx],vlon |
---|
| 1287 | print idx,idy,lat2d[idy,idx],vlat |
---|
| 1288 | var = file.variables["HGT"][:,:,:] |
---|
[489] | 1289 | mpl.contourf(var[0,:,:],30,cmap = mpl.get_cmap(name="Greys_r") ) ; mpl.axis('off') ; mpl.plot(idx,idy,'mx',mew=4.0,ms=20.0) |
---|
[475] | 1290 | mpl.show() |
---|
| 1291 | return idy,idx |
---|
[399] | 1292 | |
---|
[345] | 1293 | ## Author: TN |
---|
[399] | 1294 | def getsindex(saxis,index,axis): |
---|
[345] | 1295 | # input : all the desired slices and the good index |
---|
| 1296 | # output : all indexes to be taken into account for reducing field |
---|
| 1297 | import numpy as np |
---|
[349] | 1298 | if ( np.array(axis).ndim == 2): |
---|
| 1299 | axis = axis[:,0] |
---|
[345] | 1300 | if saxis is None: |
---|
| 1301 | zeindex = None |
---|
| 1302 | else: |
---|
| 1303 | aaa = int(np.argmin(abs(saxis[index,0] - axis))) |
---|
| 1304 | bbb = int(np.argmin(abs(saxis[index,1] - axis))) |
---|
| 1305 | [imin,imax] = np.sort(np.array([aaa,bbb])) |
---|
| 1306 | zeindex = np.array(range(imax-imin+1))+imin |
---|
| 1307 | # because -180 and 180 are the same point in longitude, |
---|
| 1308 | # we get rid of one for averaging purposes. |
---|
| 1309 | if axis[imin] == -180 and axis[imax] == 180: |
---|
| 1310 | zeindex = zeindex[0:len(zeindex)-1] |
---|
[392] | 1311 | print "INFO: whole longitude averaging asked, so last point is not taken into account." |
---|
[345] | 1312 | return zeindex |
---|
| 1313 | |
---|
| 1314 | ## Author: TN |
---|
[763] | 1315 | def define_axis(lon,lat,vert,time,indexlon,indexlat,indexvert,indextime,what_I_plot,dim0,vertmode,redope): |
---|
[345] | 1316 | # Purpose of define_axis is to find x and y axis scales in a smart way |
---|
| 1317 | # x axis priority: 1/time 2/lon 3/lat 4/vertical |
---|
| 1318 | # To be improved !!!... |
---|
| 1319 | from numpy import array,swapaxes |
---|
| 1320 | x = None |
---|
| 1321 | y = None |
---|
| 1322 | count = 0 |
---|
| 1323 | what_I_plot = array(what_I_plot) |
---|
| 1324 | shape = what_I_plot.shape |
---|
[477] | 1325 | if indextime is None and len(time) > 1: |
---|
[392] | 1326 | print "AXIS is time" |
---|
[345] | 1327 | x = time |
---|
| 1328 | count = count+1 |
---|
[763] | 1329 | if indexlon is None and len(lon) > 1 and redope not in ['edge_x1','edge_x2']: |
---|
[392] | 1330 | print "AXIS is lon" |
---|
[345] | 1331 | if count == 0: x = lon |
---|
| 1332 | else: y = lon |
---|
| 1333 | count = count+1 |
---|
[763] | 1334 | if indexlat is None and len(lat) > 1 and redope not in ['edge_y1','edge_y2']: |
---|
[392] | 1335 | print "AXIS is lat" |
---|
[345] | 1336 | if count == 0: x = lat |
---|
| 1337 | else: y = lat |
---|
| 1338 | count = count+1 |
---|
[579] | 1339 | if indexvert is None and ((dim0 == 4) or (y is None)): |
---|
[392] | 1340 | print "AXIS is vert" |
---|
[345] | 1341 | if vertmode == 0: # vertical axis is as is (GCM grid) |
---|
| 1342 | if count == 0: x=range(len(vert)) |
---|
| 1343 | else: y=range(len(vert)) |
---|
| 1344 | count = count+1 |
---|
| 1345 | else: # vertical axis is in kms |
---|
| 1346 | if count == 0: x = vert |
---|
| 1347 | else: y = vert |
---|
| 1348 | count = count+1 |
---|
| 1349 | x = array(x) |
---|
| 1350 | y = array(y) |
---|
[468] | 1351 | print "CHECK SHAPE: what_I_plot, x, y", what_I_plot.shape, x.shape, y.shape |
---|
[345] | 1352 | if len(shape) == 1: |
---|
[562] | 1353 | if shape[0] != len(x): print "WARNING: shape[0] != len(x). Correcting." ; what_I_plot = what_I_plot[0:len(x)] |
---|
[579] | 1354 | if len(y.shape) > 0: y = () |
---|
[345] | 1355 | elif len(shape) == 2: |
---|
[562] | 1356 | if shape[1] == len(y) and shape[0] == len(x) and shape[0] != shape[1]: |
---|
| 1357 | print "INFO: swapaxes: ",what_I_plot.shape,shape ; what_I_plot = swapaxes(what_I_plot,0,1) |
---|
| 1358 | else: |
---|
| 1359 | if shape[0] != len(y): print "WARNING: shape[0] != len(y). Correcting." ; what_I_plot = what_I_plot[0:len(y),:] |
---|
| 1360 | elif shape[1] != len(x): print "WARNING: shape[1] != len(x). Correcting." ; what_I_plot = what_I_plot[:,0:len(x)] |
---|
| 1361 | elif len(shape) == 3: |
---|
| 1362 | if vertmode < 0: print "not supported. must check array dimensions at some point. not difficult to implement though." |
---|
[345] | 1363 | return what_I_plot,x,y |
---|
[349] | 1364 | |
---|
[763] | 1365 | # Author: TN + AS + AC |
---|
| 1366 | def determineplot(slon, slat, svert, stime, redope): |
---|
[349] | 1367 | nlon = 1 # number of longitudinal slices -- 1 is None |
---|
| 1368 | nlat = 1 |
---|
| 1369 | nvert = 1 |
---|
| 1370 | ntime = 1 |
---|
| 1371 | nslices = 1 |
---|
| 1372 | if slon is not None: |
---|
[770] | 1373 | length=len(slon[:,0]) |
---|
[763] | 1374 | nslices = nslices*length |
---|
[349] | 1375 | nlon = len(slon) |
---|
| 1376 | if slat is not None: |
---|
[770] | 1377 | length=len(slat[:,0]) |
---|
[763] | 1378 | nslices = nslices*length |
---|
[349] | 1379 | nlat = len(slat) |
---|
| 1380 | if svert is not None: |
---|
[770] | 1381 | length=len(slvert[:,0]) |
---|
[763] | 1382 | nslices = nslices*length |
---|
[349] | 1383 | nvert = len(svert) |
---|
| 1384 | if stime is not None: |
---|
[770] | 1385 | length=len(stime[:,0]) |
---|
[763] | 1386 | nslices = nslices*length |
---|
[349] | 1387 | ntime = len(stime) |
---|
| 1388 | #else: |
---|
| 1389 | # nslices = 2 |
---|
| 1390 | mapmode = 0 |
---|
[763] | 1391 | if slon is None and slat is None and redope not in ['edge_x1','edge_x2','edge_y1','edge_y2']: |
---|
[349] | 1392 | mapmode = 1 # in this case we plot a map, with the given projection |
---|
| 1393 | return nlon, nlat, nvert, ntime, mapmode, nslices |
---|
[440] | 1394 | |
---|
[638] | 1395 | ## Author : AS |
---|
| 1396 | def maplatlon( lon,lat,field,\ |
---|
| 1397 | proj="cyl",colorb="jet",ndiv=10,zeback="molabw",trans=0.6,title="",\ |
---|
| 1398 | vecx=None,vecy=None,stride=2 ): |
---|
| 1399 | ### an easy way to map a field over lat/lon grid |
---|
| 1400 | import numpy as np |
---|
| 1401 | import matplotlib.pyplot as mpl |
---|
| 1402 | from matplotlib.cm import get_cmap |
---|
| 1403 | ## get lon and lat in 2D version. get lat/lon intervals |
---|
| 1404 | numdim = len(np.array(lon).shape) |
---|
| 1405 | if numdim == 2: [lon2d,lat2d] = [lon,lat] |
---|
| 1406 | elif numdim == 1: [lon2d,lat2d] = np.meshgrid(lon,lat) |
---|
| 1407 | else: errormess("lon and lat arrays must be 1D or 2D") |
---|
| 1408 | [wlon,wlat] = latinterv() |
---|
| 1409 | ## define projection and background. define x and y given the projection |
---|
| 1410 | m = define_proj(proj,wlon,wlat,back=zeback,blat=None,blon=None) |
---|
| 1411 | x, y = m(lon2d, lat2d) |
---|
| 1412 | ## define field. bound field. |
---|
| 1413 | what_I_plot = np.transpose(field) |
---|
| 1414 | zevmin, zevmax = calculate_bounds(what_I_plot) ## vmin=min(what_I_plot_frame), vmax=max(what_I_plot_frame)) |
---|
| 1415 | what_I_plot = bounds(what_I_plot,zevmin,zevmax) |
---|
| 1416 | ## define contour field levels. define color palette |
---|
| 1417 | ticks = ndiv + 1 |
---|
| 1418 | zelevels = np.linspace(zevmin,zevmax,ticks) |
---|
| 1419 | palette = get_cmap(name=colorb) |
---|
| 1420 | ## contour field |
---|
| 1421 | m.contourf( x, y, what_I_plot, zelevels, cmap = palette, alpha = trans ) |
---|
| 1422 | ## draw colorbar |
---|
| 1423 | if proj in ['moll','cyl']: zeorientation="horizontal" ; zepad = 0.07 |
---|
| 1424 | else: zeorientation="vertical" ; zepad = 0.03 |
---|
| 1425 | #daformat = fmtvar(fvar.upper()) |
---|
| 1426 | daformat = "%.0f" |
---|
| 1427 | zecb = mpl.colorbar( fraction=0.05,pad=zepad,format=daformat,orientation=zeorientation,\ |
---|
| 1428 | ticks=np.linspace(zevmin,zevmax,num=min([ticks/2+1,21])),extend='neither',spacing='proportional' ) |
---|
| 1429 | ## give a title |
---|
| 1430 | if zeorientation == "horizontal": zecb.ax.set_xlabel(title) |
---|
| 1431 | else: ptitle(title) |
---|
| 1432 | ## draw vector |
---|
| 1433 | if vecx is not None and vecy is not None: |
---|
| 1434 | [vecx_frame,vecy_frame] = m.rotate_vector( np.transpose(vecx), np.transpose(vecy), lon2d, lat2d ) ## for metwinds |
---|
| 1435 | vectorfield(vecx_frame, vecy_frame, x, y, stride=stride, csmooth=2,\ |
---|
| 1436 | scale=30., factor=500., color=definecolorvec(colorb), key=True) |
---|
| 1437 | ## scale regle la reference du vecteur. factor regle toutes les longueurs (dont la reference). l'AUGMENTER pour raccourcir les vecteurs. |
---|
| 1438 | return |
---|
[754] | 1439 | ## Author : AC |
---|
| 1440 | ## Handles calls to specific computations (e.g. wind norm, enrichment factor...) |
---|
[763] | 1441 | def select_getfield(zvarname=None,znc=None,ztypefile=None,mode=None,ztsat=None,ylon=None,ylat=None,yalt=None,ytime=None,analysis=None): |
---|
[754] | 1442 | from mymath import get_tsat |
---|
| 1443 | |
---|
| 1444 | ## Specific variables are described here: |
---|
| 1445 | # for the mesoscale: |
---|
[763] | 1446 | specificname_meso = ['UV','uv','uvmet','slopexy','SLOPEXY','deltat','DELTAT','hodograph','tk','hodograph_2'] |
---|
[754] | 1447 | # for the gcm: |
---|
| 1448 | specificname_gcm = ['enfact'] |
---|
| 1449 | |
---|
| 1450 | ## Check for variable in file: |
---|
| 1451 | if mode == 'check': |
---|
| 1452 | varname = zvarname |
---|
| 1453 | varinfile=znc.variables.keys() |
---|
| 1454 | logical_novarname = zvarname not in znc.variables |
---|
| 1455 | logical_nospecificname_meso = not ((ztypefile in ['meso']) and (zvarname in specificname_meso)) |
---|
| 1456 | logical_nospecificname_gcm = not ((ztypefile in ['gcm']) and (zvarname in specificname_gcm)) |
---|
| 1457 | if ( logical_novarname and logical_nospecificname_meso and logical_nospecificname_gcm ): |
---|
| 1458 | if len(varinfile) == 1: varname = varinfile[0] |
---|
| 1459 | else: varname = False |
---|
| 1460 | ## Return the variable name: |
---|
| 1461 | return varname |
---|
| 1462 | |
---|
| 1463 | ## Get the corresponding variable: |
---|
| 1464 | if mode == 'getvar': |
---|
[763] | 1465 | plot_x = None ; plot_y = None ; |
---|
[754] | 1466 | ### ----------- 1. saturation temperature |
---|
| 1467 | if zvarname in ["temp","t","T_nadir_nit","T_nadir_day","temp_day","temp_night"] and ztsat: |
---|
| 1468 | tt=getfield(znc,zvarname) ; print "computing Tsat-T, I ASSUME Z-AXIS IS PRESSURE" |
---|
| 1469 | if type(tt).__name__=='MaskedArray': tt.set_fill_value([np.NaN]) ; tinput=tt.filled() |
---|
| 1470 | else: tinput=tt |
---|
| 1471 | all_var=get_tsat(yalt,tinput,zlon=ylon,zlat=ylat,zalt=yalt,ztime=ytime) |
---|
| 1472 | ### ----------- 2. wind amplitude |
---|
| 1473 | elif ((zvarname in ['UV','uv','uvmet']) and (ztypefile in ['meso']) and (zvarname not in znc.variables)): |
---|
[763] | 1474 | all_var=windamplitude(znc,'amplitude') |
---|
| 1475 | elif ((zvarname in ['hodograph','hodograph_2']) and (ztypefile in ['meso']) and (zvarname not in znc.variables)): |
---|
| 1476 | plot_x, plot_y = windamplitude(znc,zvarname) |
---|
| 1477 | if plot_x is not None: all_var=plot_x # dummy |
---|
| 1478 | else: all_var=plot_y ; plot_x = None ; plot_y = None # Hodograph type 2 is not 'xy' mode |
---|
[754] | 1479 | elif ((zvarname in ['slopexy','SLOPEXY']) and (ztypefile in ['meso']) and (zvarname not in znc.variables)): |
---|
| 1480 | all_var=slopeamplitude(znc) |
---|
| 1481 | ### ------------ 3. Near surface instability |
---|
| 1482 | elif ((zvarname in ['DELTAT','deltat']) and (ztypefile in ['meso']) and (zvarname not in znc.variables)): |
---|
| 1483 | all_var=deltat0t1(znc) |
---|
| 1484 | ### ------------ 4. Enrichment factor |
---|
| 1485 | elif ((ztypefile in ['gcm']) and (zvarname in ['enfact'])): |
---|
| 1486 | all_var=enrichment_factor(znc,ylon,ylat,ytime) |
---|
[763] | 1487 | ### ------------ 5. teta -> temp |
---|
| 1488 | elif ((ztypefile in ['meso']) and (zvarname in ['tk']) and ('tk' not in znc.variables.keys())): |
---|
| 1489 | all_var=teta_to_tk(znc) |
---|
[754] | 1490 | else: |
---|
| 1491 | ### ----------- 999. Normal case |
---|
| 1492 | all_var = getfield(znc,zvarname) |
---|
[763] | 1493 | if analysis is not None: |
---|
| 1494 | if analysis in ['histo','density','histodensity']: plot_y=all_var ; plot_x = plot_y |
---|
| 1495 | elif analysis == 'fft': plot_y, plot_x = spectrum(all_var,ytime,yalt,ylat,ylon) ; all_var = plot_y |
---|
| 1496 | return all_var, plot_x, plot_y |
---|
| 1497 | |
---|
| 1498 | # Author : A.C |
---|
| 1499 | # FFT is computed before reducefield voluntarily, because we dont want to compute |
---|
| 1500 | # ffts on averaged fields (which would kill all waves). Instead, we take the fft everywhere |
---|
| 1501 | # (which is not efficient but it is still ok) and then, make the average (if the user wants to) |
---|
| 1502 | def spectrum(var,time,vert,lat,lon): |
---|
| 1503 | import numpy as np |
---|
| 1504 | fft=np.fft.fft(var,axis=1) |
---|
| 1505 | N=len(vert) |
---|
| 1506 | step=(vert[1]-vert[0])*1000. |
---|
| 1507 | print "step is: ",step |
---|
| 1508 | fftfreq=np.fft.fftfreq(N,d=step) |
---|
| 1509 | fftfreq=np.fft.fftshift(fftfreq) # spatial FFT => this is the wavenumber |
---|
| 1510 | fft=np.fft.fftshift(fft) |
---|
| 1511 | fftfreq = 1./fftfreq # => wavelength (div by 0 expected, don't panic) |
---|
| 1512 | fft=np.abs(fft) # => amplitude spectrum |
---|
| 1513 | # fft=np.abs(fft)**2 # => power spectrum |
---|
| 1514 | return fft,fftfreq |
---|
| 1515 | |
---|
| 1516 | # Author : A.C. |
---|
| 1517 | # Computes temperature from potential temperature for mesoscale files, without the need to use API, i.e. using natural vertical grid |
---|
| 1518 | def teta_to_tk(nc): |
---|
| 1519 | import numpy as np |
---|
| 1520 | varinfile = nc.variables.keys() |
---|
| 1521 | p0=610. |
---|
| 1522 | t0=220. |
---|
| 1523 | r_cp=1./3.89419 |
---|
| 1524 | if "T" in varinfile: zteta=getfield(nc,'T') |
---|
| 1525 | else: errormess("you need T in your file.") |
---|
| 1526 | if "PTOT" in varinfile: zptot=getfield(nc,'PTOT') |
---|
| 1527 | else: errormess("you need PTOT in your file.") |
---|
| 1528 | zt=(zteta+220.)*(zptot/p0)**(r_cp) |
---|
| 1529 | return zt |
---|