1 | ## Author: AS |
---|
2 | def errormess(text,printvar=None): |
---|
3 | print text |
---|
4 | if printvar is not None: print printvar |
---|
5 | exit() |
---|
6 | return |
---|
7 | |
---|
8 | ## Author: AS |
---|
9 | def adjust_length (tab, zelen): |
---|
10 | from numpy import ones |
---|
11 | if tab is None: |
---|
12 | outtab = ones(zelen) * -999999 |
---|
13 | else: |
---|
14 | if zelen != len(tab): |
---|
15 | print "not enough or too much values... setting same values all variables" |
---|
16 | outtab = ones(zelen) * tab[0] |
---|
17 | else: |
---|
18 | outtab = tab |
---|
19 | return outtab |
---|
20 | |
---|
21 | ## Author: AS |
---|
22 | def getname(var=False,var2=False,winds=False,anomaly=False): |
---|
23 | if var and winds: basename = var + '_UV' |
---|
24 | elif var: basename = var |
---|
25 | elif winds: basename = 'UV' |
---|
26 | else: errormess("please set at least winds or var",printvar=nc.variables) |
---|
27 | if anomaly: basename = 'd' + basename |
---|
28 | if var2: basename = basename + '_' + var2 |
---|
29 | return basename |
---|
30 | |
---|
31 | ## Author: AS |
---|
32 | def localtime(utc,lon): |
---|
33 | ltst = utc + lon / 15. |
---|
34 | ltst = int (ltst * 10) / 10. |
---|
35 | ltst = ltst % 24 |
---|
36 | return ltst |
---|
37 | |
---|
38 | ## Author: AC |
---|
39 | def check_localtime(time): |
---|
40 | a=-1 |
---|
41 | for i in range(len(time)-1): |
---|
42 | if (time[i] > time[i+1]): a=i |
---|
43 | if a >= 0 and a < (len(time)-1)/2.: |
---|
44 | print "Sorry, time axis is not regular." |
---|
45 | print "Contourf needs regular axis... recasting" |
---|
46 | for i in range(a+1): |
---|
47 | time[i]=time[i]-24. |
---|
48 | if a >= 0 and a >= (len(time)-1)/2.: |
---|
49 | print "Sorry, time axis is not regular." |
---|
50 | print "Contourf needs regular axis... recasting" |
---|
51 | for i in range((len(time)-1) - a): |
---|
52 | time[a+1+i]=time[a+1+i]+24. |
---|
53 | return time |
---|
54 | |
---|
55 | ## Author: AS, AC, JL |
---|
56 | def whatkindfile (nc): |
---|
57 | typefile = 'gcm' # default |
---|
58 | if 'controle' in nc.variables: typefile = 'gcm' |
---|
59 | elif 'phisinit' in nc.variables: typefile = 'gcm' |
---|
60 | elif 'phis' in nc.variables: typefile = 'gcm' |
---|
61 | elif 'time_counter' in nc.variables: typefile = 'earthgcm' |
---|
62 | elif hasattr(nc,'START_DATE'): typefile = 'meso' |
---|
63 | elif 'HGT_M' in nc.variables: typefile = 'geo' |
---|
64 | elif hasattr(nc,'institution'): |
---|
65 | if "European Centre" in getattr(nc,'institution'): typefile = 'ecmwf' |
---|
66 | return typefile |
---|
67 | |
---|
68 | ## Author: AS |
---|
69 | def getfield (nc,var): |
---|
70 | ## this allows to get much faster (than simply referring to nc.variables[var]) |
---|
71 | import numpy as np |
---|
72 | dimension = len(nc.variables[var].dimensions) |
---|
73 | #print " Opening variable with", dimension, "dimensions ..." |
---|
74 | if dimension == 2: field = nc.variables[var][:,:] |
---|
75 | elif dimension == 3: field = nc.variables[var][:,:,:] |
---|
76 | elif dimension == 4: field = nc.variables[var][:,:,:,:] |
---|
77 | elif dimension == 1: field = nc.variables[var][:] |
---|
78 | # if there are NaNs in the ncdf, they should be loaded as a masked array which will be |
---|
79 | # recasted as a regular array later in reducefield |
---|
80 | if (np.isnan(np.sum(field)) and (type(field).__name__ not in 'MaskedArray')): |
---|
81 | print "Warning: netcdf as nan values but is not loaded as a Masked Array." |
---|
82 | print "recasting array type" |
---|
83 | out=np.ma.masked_invalid(field) |
---|
84 | out.set_fill_value([np.NaN]) |
---|
85 | else: |
---|
86 | # missing values from zrecast or hrecast are -1e-33 |
---|
87 | masked=np.ma.masked_where(field < -1e30,field) |
---|
88 | masked2=np.ma.masked_where(field > 1e35,field) |
---|
89 | masked.set_fill_value([np.NaN]) ; masked2.set_fill_value([np.NaN]) |
---|
90 | mask = np.ma.getmask(masked) ; mask2 = np.ma.getmask(masked2) |
---|
91 | if (True in np.array(mask)): |
---|
92 | out=masked |
---|
93 | print "Masked array... Missing value is NaN" |
---|
94 | elif (True in np.array(mask2)): |
---|
95 | out=masked2 |
---|
96 | print "Masked array... Missing value is NaN" |
---|
97 | # else: |
---|
98 | # # missing values from api are 1e36 |
---|
99 | # masked=np.ma.masked_where(field > 1e35,field) |
---|
100 | # masked.set_fill_value([np.NaN]) |
---|
101 | # mask = np.ma.getmask(masked) |
---|
102 | # if (True in np.array(mask)):out=masked |
---|
103 | # else:out=field |
---|
104 | else:out=field |
---|
105 | return out |
---|
106 | |
---|
107 | ## Author: AC |
---|
108 | # Compute the norm of the winds |
---|
109 | # The corresponding variable to call is UV or uvmet (to use api) |
---|
110 | def windamplitude (nc): |
---|
111 | import numpy as np |
---|
112 | varinfile = nc.variables.keys() |
---|
113 | if "U" in varinfile: zu=getfield(nc,'U') |
---|
114 | elif "Um" in varinfile: zu=getfield(nc,'Um') |
---|
115 | if "V" in varinfile: zv=getfield(nc,'V') |
---|
116 | elif "Vm" in varinfile: zv=getfield(nc,'Vm') |
---|
117 | znt,znz,zny,znx = np.array(zu).shape |
---|
118 | if "U" in varinfile:znx=znx-1 |
---|
119 | zuint = np.zeros([znt,znz,zny,znx]) |
---|
120 | zvint = np.zeros([znt,znz,zny,znx]) |
---|
121 | if "U" in varinfile: |
---|
122 | for xx in np.arange(znx): zuint[:,:,:,xx] = (zu[:,:,:,xx] + zu[:,:,:,xx+1])/2. |
---|
123 | for yy in np.arange(zny): zvint[:,:,yy,:] = (zv[:,:,yy,:] + zv[:,:,yy+1,:])/2. |
---|
124 | else: |
---|
125 | zuint=zu |
---|
126 | zvint=zv |
---|
127 | return np.sqrt(zuint**2 + zvint**2) |
---|
128 | |
---|
129 | ## Author: AC |
---|
130 | # Compute the enrichment factor of non condensible gases |
---|
131 | # The corresponding variable to call is enfact |
---|
132 | # enrichment factor is computed as in Yuan Lian et al. 2012 |
---|
133 | # i.e. you need to have VL2 site at LS 135 in your data |
---|
134 | # this only requires co2col so that you can concat.nc at low cost |
---|
135 | def enrichment_factor(nc,lon,lat,time): |
---|
136 | import numpy as np |
---|
137 | from myplot import reducefield |
---|
138 | varinfile = nc.variables.keys() |
---|
139 | if "co2col" in varinfile: co2col=getfield(nc,'co2col') |
---|
140 | else: print "error, you need co2col var in your file" |
---|
141 | if "ps" in varinfile: ps=getfield(nc,'ps') |
---|
142 | else: print "error, you need ps var in your file" |
---|
143 | dimension = len(nc.variables['co2col'].dimensions) |
---|
144 | if dimension == 2: |
---|
145 | zny,znx = np.array(co2col).shape |
---|
146 | znt=1 |
---|
147 | elif dimension == 3: znt,zny,znx = np.array(co2col).shape |
---|
148 | mmrarcol = np.zeros([znt,zny,znx]) |
---|
149 | enfact = np.zeros([znt,zny,znx]) |
---|
150 | grav=3.72 |
---|
151 | mmrarcol[:,:,:] = 1. - grav*co2col[:,:,:]/ps[:,:,:] |
---|
152 | # Computation with reference argon mmr at VL2 Ls 135 (as in Yuan Lian et al 2012) |
---|
153 | lonvl2=np.zeros([1,2]) |
---|
154 | latvl2=np.zeros([1,2]) |
---|
155 | timevl2=np.zeros([1,2]) |
---|
156 | lonvl2[0,0]=-180 |
---|
157 | lonvl2[0,1]=180 |
---|
158 | latvl2[:,:]=48.16 |
---|
159 | timevl2[:,:]=135. |
---|
160 | indexlon = getsindex(lonvl2,0,lon) |
---|
161 | indexlat = getsindex(latvl2,0,lat) |
---|
162 | indextime = getsindex(timevl2,0,time) |
---|
163 | mmrvl2, error = reducefield( mmrarcol, d4=indextime, d1=indexlon, d2=indexlat) |
---|
164 | print "VL2 Ls 135 mmr arcol:", mmrvl2 |
---|
165 | enfact[:,:,:] = mmrarcol[:,:,:]/mmrvl2 |
---|
166 | return enfact |
---|
167 | |
---|
168 | ## Author: AC |
---|
169 | # Compute the norm of the slope angles |
---|
170 | # The corresponding variable to call is SLOPEXY |
---|
171 | def slopeamplitude (nc): |
---|
172 | import numpy as np |
---|
173 | varinfile = nc.variables.keys() |
---|
174 | if "slopex" in varinfile: zu=getfield(nc,'slopex') |
---|
175 | elif "SLOPEX" in varinfile: zu=getfield(nc,'SLOPEX') |
---|
176 | if "slopey" in varinfile: zv=getfield(nc,'slopey') |
---|
177 | elif "SLOPEY" in varinfile: zv=getfield(nc,'SLOPEY') |
---|
178 | znt,zny,znx = np.array(zu).shape |
---|
179 | zuint = np.zeros([znt,zny,znx]) |
---|
180 | zvint = np.zeros([znt,zny,znx]) |
---|
181 | zuint=zu |
---|
182 | zvint=zv |
---|
183 | return np.sqrt(zuint**2 + zvint**2) |
---|
184 | |
---|
185 | ## Author: AC |
---|
186 | # Compute the temperature difference between surface and first level. |
---|
187 | # API is automatically called to get TSURF and TK. |
---|
188 | # The corresponding variable to call is DELTAT |
---|
189 | def deltat0t1 (nc): |
---|
190 | import numpy as np |
---|
191 | varinfile = nc.variables.keys() |
---|
192 | if "tsurf" in varinfile: zu=getfield(nc,'tsurf') |
---|
193 | elif "TSURF" in varinfile: zu=getfield(nc,'TSURF') |
---|
194 | if "tk" in varinfile: zv=getfield(nc,'tk') |
---|
195 | elif "TK" in varinfile: zv=getfield(nc,'TK') |
---|
196 | znt,zny,znx = np.array(zu).shape |
---|
197 | zuint = np.zeros([znt,zny,znx]) |
---|
198 | zuint=zu - zv[:,0,:,:] |
---|
199 | return zuint |
---|
200 | |
---|
201 | ## Author: AS + TN + AC |
---|
202 | def reducefield (input,d4=None,d3=None,d2=None,d1=None,yint=False,alt=None,anomaly=False,redope=None,mesharea=None,unidim=999): |
---|
203 | ### we do it the reverse way to be compliant with netcdf "t z y x" or "t y x" or "y x" |
---|
204 | ### it would be actually better to name d4 d3 d2 d1 as t z y x |
---|
205 | ### ... note, anomaly is only computed over d1 and d2 for the moment |
---|
206 | import numpy as np |
---|
207 | from mymath import max,mean,min,sum,getmask |
---|
208 | csmooth = 12 ## a fair amount of grid points (too high results in high computation time) |
---|
209 | if redope is not None: |
---|
210 | if redope == "mint": input = min(input,axis=0) ; d1 = None |
---|
211 | elif redope == "maxt": input = max(input,axis=0) ; d1 = None |
---|
212 | else: errormess("not supported. but try lines in reducefield beforehand.") |
---|
213 | #elif redope == "minz": input = min(input,axis=1) ; d2 = None |
---|
214 | #elif redope == "maxz": input = max(input,axis=1) ; d2 = None |
---|
215 | #elif redope == "miny": input = min(input,axis=2) ; d3 = None |
---|
216 | #elif redope == "maxy": input = max(input,axis=2) ; d3 = None |
---|
217 | #elif redope == "minx": input = min(input,axis=3) ; d4 = None |
---|
218 | #elif redope == "maxx": input = max(input,axis=3) ; d4 = None |
---|
219 | dimension = np.array(input).ndim |
---|
220 | shape = np.array(np.array(input).shape) |
---|
221 | #print 'd1,d2,d3,d4: ',d1,d2,d3,d4 |
---|
222 | if anomaly: print 'ANOMALY ANOMALY' |
---|
223 | output = input |
---|
224 | error = False |
---|
225 | #### this is needed to cope the case where d4,d3,d2,d1 are single integers and not arrays |
---|
226 | if d4 is not None and not isinstance(d4, np.ndarray): d4=[d4] |
---|
227 | if d3 is not None and not isinstance(d3, np.ndarray): d3=[d3] |
---|
228 | if d2 is not None and not isinstance(d2, np.ndarray): d2=[d2] |
---|
229 | if d1 is not None and not isinstance(d1, np.ndarray): d1=[d1] |
---|
230 | ### now the main part |
---|
231 | if dimension == 2: |
---|
232 | #### this is needed for 1d-type files (where dim=2 but axes are time-vert and not lat-lon) |
---|
233 | if unidim==1: d2=d4 ; d1=d3 ; d4 = None ; d3 = None |
---|
234 | if mesharea is None: mesharea=np.ones(shape) |
---|
235 | if max(d2) >= shape[0]: error = True |
---|
236 | elif max(d1) >= shape[1]: error = True |
---|
237 | elif d1 is not None and d2 is not None: |
---|
238 | try: |
---|
239 | totalarea = np.ma.masked_where(getmask(output),mesharea) |
---|
240 | totalarea = mean(totalarea[d2,:],axis=0);totalarea = mean(totalarea[d1]) |
---|
241 | except: print "(problem with areas. I skip this)" ; mesharea = 1. ; totalarea = 1. |
---|
242 | output = output*mesharea; output = mean(output[d2,:],axis=0); output = mean(output[d1])/totalarea |
---|
243 | elif d1 is not None: output = mean(input[:,d1],axis=1) |
---|
244 | elif d2 is not None: |
---|
245 | try: |
---|
246 | totalarea = np.ma.masked_where(getmask(output),mesharea) |
---|
247 | totalarea = mean(totalarea[d2,:],axis=0) |
---|
248 | except: print "(problem with areas. I skip this)" ; mesharea = 1. ; totalarea = 1. |
---|
249 | output = output*mesharea; output = mean(output[d2,:],axis=0)/totalarea |
---|
250 | elif dimension == 3: |
---|
251 | if mesharea is None: mesharea=np.ones(shape[[1,2]]) |
---|
252 | if max(d4) >= shape[0]: error = True |
---|
253 | elif max(d2) >= shape[1]: error = True |
---|
254 | elif max(d1) >= shape[2]: error = True |
---|
255 | elif d4 is not None and d2 is not None and d1 is not None: |
---|
256 | output = mean(input[d4,:,:],axis=0) |
---|
257 | try: |
---|
258 | totalarea = np.ma.masked_where(getmask(output),mesharea) |
---|
259 | totalarea = mean(totalarea[d2,:],axis=0);totalarea = mean(totalarea[d1]) |
---|
260 | except: print "(problem with areas. I skip this)" ; mesharea = 1. ; totalarea = 1. |
---|
261 | output = output*mesharea; output = mean(output[d2,:],axis=0); output = mean(output[d1])/totalarea |
---|
262 | elif d4 is not None and d2 is not None: |
---|
263 | output = mean(input[d4,:,:],axis=0) |
---|
264 | try: |
---|
265 | totalarea = np.ma.masked_where(getmask(output),mesharea) |
---|
266 | totalarea = mean(totalarea[d2,:],axis=0) |
---|
267 | except: print "(problem with areas. I skip this)" ; mesharea = 1. ; totalarea = 1. |
---|
268 | output = output*mesharea; output = mean(output[d2,:],axis=0)/totalarea |
---|
269 | elif d4 is not None and d1 is not None: output = mean(input[d4,:,:],axis=0); output=mean(output[:,d1],axis=1) |
---|
270 | elif d2 is not None and d1 is not None: |
---|
271 | try: |
---|
272 | totalarea = np.tile(mesharea,(output.shape[0],1,1)) |
---|
273 | totalarea = np.ma.masked_where(getmask(output),totalarea) |
---|
274 | totalarea = mean(totalarea[:,d2,:],axis=1);totalarea = mean(totalarea[:,d1],axis=1) |
---|
275 | except: print "(problem with areas. I skip this)" ; mesharea = 1. ; totalarea = 1. |
---|
276 | output = output*mesharea; output = mean(output[:,d2,:],axis=1); output = mean(output[:,d1],axis=1)/totalarea |
---|
277 | elif d1 is not None: output = mean(input[:,:,d1],axis=2) |
---|
278 | elif d2 is not None: |
---|
279 | try: |
---|
280 | totalarea = np.tile(mesharea,(output.shape[0],1,1)) |
---|
281 | totalarea = np.ma.masked_where(getmask(output),totalarea) |
---|
282 | totalarea = mean(totalarea[:,d2,:],axis=1) |
---|
283 | except: print "(problem with areas. I skip this)" ; mesharea = 1. ; totalarea = 1. |
---|
284 | output = output*mesharea; output = mean(output[:,d2,:],axis=1)/totalarea |
---|
285 | elif d4 is not None: output = mean(input[d4,:,:],axis=0) |
---|
286 | elif dimension == 4: |
---|
287 | if mesharea is None: mesharea=np.ones(shape[[2,3]]) # mesharea=np.random.random_sample(shape[[2,3]])*5. + 2. # pour tester |
---|
288 | if max(d4) >= shape[0]: error = True |
---|
289 | elif max(d3) >= shape[1]: error = True |
---|
290 | elif max(d2) >= shape[2]: error = True |
---|
291 | elif max(d1) >= shape[3]: error = True |
---|
292 | elif d4 is not None and d3 is not None and d2 is not None and d1 is not None: |
---|
293 | output = mean(input[d4,:,:,:],axis=0) |
---|
294 | output = reduce_zaxis(output[d3,:,:],ax=0,yint=yint,vert=alt,indice=d3) |
---|
295 | if anomaly: output = 100. * ((output / smooth(output,csmooth)) - 1.) |
---|
296 | try: |
---|
297 | totalarea = np.ma.masked_where(np.isnan(output),mesharea) |
---|
298 | totalarea = mean(totalarea[d2,:],axis=0); totalarea = mean(totalarea[d1]) |
---|
299 | except: print "(problem with areas. I skip this)" ; mesharea = 1. ; totalarea = 1. |
---|
300 | output = output*mesharea; output = mean(output[d2,:],axis=0); output = mean(output[d1])/totalarea |
---|
301 | elif d4 is not None and d3 is not None and d2 is not None: |
---|
302 | output = mean(input[d4,:,:,:],axis=0) |
---|
303 | output = reduce_zaxis(output[d3,:,:],ax=0,yint=yint,vert=alt,indice=d3) |
---|
304 | if anomaly: output = 100. * ((output / smooth(output,csmooth)) - 1.) |
---|
305 | try: |
---|
306 | totalarea = np.ma.masked_where(np.isnan(output),mesharea) |
---|
307 | totalarea = mean(totalarea[d2,:],axis=0) |
---|
308 | except: print "(problem with areas. I skip this)" ; mesharea = 1. ; totalarea = 1. |
---|
309 | output = output*mesharea; output = mean(output[d2,:],axis=0)/totalarea |
---|
310 | elif d4 is not None and d3 is not None and d1 is not None: |
---|
311 | output = mean(input[d4,:,:,:],axis=0) |
---|
312 | output = reduce_zaxis(output[d3,:,:],ax=0,yint=yint,vert=alt,indice=d3) |
---|
313 | if anomaly: output = 100. * ((output / smooth(output,csmooth)) - 1.) |
---|
314 | output = mean(output[:,d1],axis=1) |
---|
315 | elif d4 is not None and d2 is not None and d1 is not None: |
---|
316 | output = mean(input[d4,:,:,:],axis=0) |
---|
317 | if anomaly: |
---|
318 | for k in range(output.shape[0]): output[k,:,:] = 100. * ((output[k,:,:] / smooth(output[k,:,:],csmooth)) - 1.) |
---|
319 | try: |
---|
320 | totalarea = np.tile(mesharea,(output.shape[0],1,1)) |
---|
321 | totalarea = np.ma.masked_where(getmask(output),mesharea) |
---|
322 | totalarea = mean(totalarea[:,d2,:],axis=1); totalarea = mean(totalarea[:,d1],axis=1) |
---|
323 | except: print "(problem with areas. I skip this)" ; mesharea = 1. ; totalarea = 1. |
---|
324 | output = output*mesharea; output = mean(output[:,d2,:],axis=1); output = mean(output[:,d1],axis=1)/totalarea |
---|
325 | #noperturb = smooth1d(output,window_len=7) |
---|
326 | #lenlen = len(output) ; output = output[1:lenlen-7] ; yeye = noperturb[4:lenlen-4] |
---|
327 | #plot(output) ; plot(yeye) ; show() ; plot(output-yeye) ; show() |
---|
328 | elif d3 is not None and d2 is not None and d1 is not None: |
---|
329 | output = reduce_zaxis(input[:,d3,:,:],ax=1,yint=yint,vert=alt,indice=d3) |
---|
330 | if anomaly: |
---|
331 | for k in range(output.shape[0]): output[k,:,:] = 100. * ((output[k,:,:] / smooth(output[k,:,:],csmooth)) - 1.) |
---|
332 | try: |
---|
333 | totalarea = np.tile(mesharea,(output.shape[0],1,1)) |
---|
334 | totalarea = np.ma.masked_where(getmask(output),totalarea) |
---|
335 | totalarea = mean(totalarea[:,d2,:],axis=1); totalarea = mean(totalarea[:,d1],axis=1) |
---|
336 | except: print "(problem with areas. I skip this)" ; mesharea = 1. ; totalarea = 1. |
---|
337 | output = output*mesharea; output = mean(output[:,d2,:],axis=1); output = mean(output[:,d1],axis=1)/totalarea |
---|
338 | elif d4 is not None and d3 is not None: |
---|
339 | output = mean(input[d4,:,:,:],axis=0) |
---|
340 | output = reduce_zaxis(output[d3,:,:],ax=0,yint=yint,vert=alt,indice=d3) |
---|
341 | if anomaly: output = 100. * ((output / smooth(output,csmooth)) - 1.) |
---|
342 | elif d4 is not None and d2 is not None: |
---|
343 | output = mean(input[d4,:,:,:],axis=0) |
---|
344 | try: |
---|
345 | totalarea = np.tile(mesharea,(output.shape[0],1,1)) |
---|
346 | totalarea = np.ma.masked_where(getmask(output),mesharea) |
---|
347 | totalarea = mean(totalarea[:,d2,:],axis=1) |
---|
348 | except: print "(problem with areas. I skip this)" ; mesharea = 1. ; totalarea = 1. |
---|
349 | output = output*mesharea; output = mean(output[:,d2,:],axis=1)/totalarea |
---|
350 | elif d4 is not None and d1 is not None: |
---|
351 | output = mean(input[d4,:,:,:],axis=0) |
---|
352 | output = mean(output[:,:,d1],axis=2) |
---|
353 | elif d3 is not None and d2 is not None: |
---|
354 | output = reduce_zaxis(input[:,d3,:,:],ax=1,yint=yint,vert=alt,indice=d3) |
---|
355 | try: |
---|
356 | totalarea = np.tile(mesharea,(output.shape[0],1,1)) |
---|
357 | totalarea = np.ma.masked_where(getmask(output),mesharea) |
---|
358 | totalarea = mean(totalarea[:,d2,:],axis=1) |
---|
359 | except: print "(problem with areas. I skip this)" ; mesharea = 1. ; totalarea = 1. |
---|
360 | output = output*mesharea; output = mean(output[:,d2,:],axis=1)/totalarea |
---|
361 | elif d3 is not None and d1 is not None: |
---|
362 | output = reduce_zaxis(input[:,d3,:,:],ax=1,yint=yint,vert=alt,indice=d3) |
---|
363 | output = mean(output[:,:,d1],axis=2) |
---|
364 | elif d2 is not None and d1 is not None: |
---|
365 | try: |
---|
366 | totalarea = np.tile(mesharea,(output.shape[0],output.shape[1],1,1)) |
---|
367 | totalarea = np.ma.masked_where(getmask(output),totalarea) |
---|
368 | totalarea = mean(totalarea[:,:,d2,:],axis=2); totalarea = mean(totalarea[:,:,d1],axis=1) |
---|
369 | except: print "(problem with areas. I skip this)" ; mesharea = 1. ; totalarea = 1. |
---|
370 | output = output*mesharea; output = mean(output[:,:,d2,:],axis=2); output = mean(output[:,:,d1],axis=1)/totalarea |
---|
371 | elif d1 is not None: output = mean(input[:,:,:,d1],axis=3) |
---|
372 | elif d2 is not None: |
---|
373 | try: |
---|
374 | totalarea = np.tile(mesharea,(output.shape[0],output.shape[1],1,output.shape[3])) |
---|
375 | totalarea = np.ma.masked_where(getmask(output),totalarea) |
---|
376 | totalarea = mean(totalarea[:,:,d2,:],axis=2) |
---|
377 | except: print "(problem with areas. I skip this)" ; mesharea = 1. ; totalarea = 1. |
---|
378 | output = output*mesharea; output = mean(output[:,:,d2,:],axis=2)/totalarea |
---|
379 | elif d3 is not None: output = reduce_zaxis(input[:,d3,:,:],ax=1,yint=yint,vert=alt,indice=d3) |
---|
380 | elif d4 is not None: output = mean(input[d4,:,:,:],axis=0) |
---|
381 | dimension2 = np.array(output).ndim |
---|
382 | shape2 = np.array(output).shape |
---|
383 | print 'REDUCEFIELD dim,shape: ',dimension,shape,' >>> ',dimension2,shape2 |
---|
384 | return output, error |
---|
385 | |
---|
386 | ## Author: AC + AS |
---|
387 | def reduce_zaxis (input,ax=None,yint=False,vert=None,indice=None): |
---|
388 | from mymath import max,mean |
---|
389 | from scipy import integrate |
---|
390 | import numpy as np |
---|
391 | if yint and vert is not None and indice is not None: |
---|
392 | if type(input).__name__=='MaskedArray': |
---|
393 | input.set_fill_value([np.NaN]) |
---|
394 | output = integrate.trapz(input.filled(),x=vert[indice],axis=ax) |
---|
395 | else: |
---|
396 | output = integrate.trapz(input,x=vert[indice],axis=ax) |
---|
397 | else: |
---|
398 | output = mean(input,axis=ax) |
---|
399 | return output |
---|
400 | |
---|
401 | ## Author: AS + TN |
---|
402 | def definesubplot ( numplot, fig ): |
---|
403 | from matplotlib.pyplot import rcParams |
---|
404 | rcParams['font.size'] = 12. ## default (important for multiple calls) |
---|
405 | if numplot <= 0: |
---|
406 | subv = 99999 |
---|
407 | subh = 99999 |
---|
408 | elif numplot == 1: |
---|
409 | subv = 1 |
---|
410 | subh = 1 |
---|
411 | elif numplot == 2: |
---|
412 | subv = 1 #2 |
---|
413 | subh = 2 #1 |
---|
414 | fig.subplots_adjust(wspace = 0.35) |
---|
415 | rcParams['font.size'] = int( rcParams['font.size'] * 3. / 4. ) |
---|
416 | elif numplot == 3: |
---|
417 | subv = 3 |
---|
418 | subh = 1 |
---|
419 | fig.subplots_adjust(hspace = 0.5) |
---|
420 | rcParams['font.size'] = int( rcParams['font.size'] * 1. / 2. ) |
---|
421 | elif numplot == 4: |
---|
422 | subv = 2 |
---|
423 | subh = 2 |
---|
424 | fig.subplots_adjust(wspace = 0.4, hspace = 0.3) |
---|
425 | rcParams['font.size'] = int( rcParams['font.size'] * 2. / 3. ) |
---|
426 | elif numplot <= 6: |
---|
427 | subv = 2 |
---|
428 | subh = 3 |
---|
429 | #fig.subplots_adjust(wspace = 0.4, hspace = 0.0) |
---|
430 | fig.subplots_adjust(wspace = 0.5, hspace = 0.3) |
---|
431 | rcParams['font.size'] = int( rcParams['font.size'] * 1. / 2. ) |
---|
432 | elif numplot <= 8: |
---|
433 | subv = 2 |
---|
434 | subh = 4 |
---|
435 | fig.subplots_adjust(wspace = 0.3, hspace = 0.3) |
---|
436 | rcParams['font.size'] = int( rcParams['font.size'] * 1. / 2. ) |
---|
437 | elif numplot <= 9: |
---|
438 | subv = 3 |
---|
439 | subh = 3 |
---|
440 | fig.subplots_adjust(wspace = 0.3, hspace = 0.3) |
---|
441 | rcParams['font.size'] = int( rcParams['font.size'] * 1. / 2. ) |
---|
442 | elif numplot <= 12: |
---|
443 | subv = 3 |
---|
444 | subh = 4 |
---|
445 | fig.subplots_adjust(wspace = 0, hspace = 0.1) |
---|
446 | rcParams['font.size'] = int( rcParams['font.size'] * 1. / 2. ) |
---|
447 | elif numplot <= 16: |
---|
448 | subv = 4 |
---|
449 | subh = 4 |
---|
450 | fig.subplots_adjust(wspace = 0.3, hspace = 0.3) |
---|
451 | rcParams['font.size'] = int( rcParams['font.size'] * 1. / 2. ) |
---|
452 | else: |
---|
453 | print "number of plot supported: 1 to 16" |
---|
454 | exit() |
---|
455 | return subv,subh |
---|
456 | |
---|
457 | ## Author: AS |
---|
458 | def getstralt(nc,nvert): |
---|
459 | varinfile = nc.variables.keys() |
---|
460 | if 'vert' not in varinfile: |
---|
461 | stralt = "_lvl" + str(nvert) |
---|
462 | else: |
---|
463 | zelevel = int(nc.variables['vert'][nvert]) |
---|
464 | if abs(zelevel) < 10000.: strheight=str(zelevel)+"m" |
---|
465 | else: strheight=str(int(zelevel/1000.))+"km" |
---|
466 | if 'altitude' in nc.dimensions: stralt = "_"+strheight+"-AMR" |
---|
467 | elif 'altitude_abg' in nc.dimensions: stralt = "_"+strheight+"-ALS" |
---|
468 | elif 'bottom_top' in nc.dimensions: stralt = "_"+strheight |
---|
469 | elif 'pressure' in nc.dimensions: stralt = "_"+str(zelevel)+"Pa" |
---|
470 | else: stralt = "" |
---|
471 | return stralt |
---|
472 | |
---|
473 | ## Author: AS |
---|
474 | def getlschar ( namefile, getaxis=False ): |
---|
475 | from netCDF4 import Dataset |
---|
476 | from timestuff import sol2ls |
---|
477 | from numpy import array |
---|
478 | from string import rstrip |
---|
479 | import os as daos |
---|
480 | namefiletest = rstrip( rstrip( rstrip( namefile, chars="_z"), chars="_zabg"), chars="_p") |
---|
481 | testexist = daos.path.isfile(namefiletest) |
---|
482 | zetime = None |
---|
483 | if testexist: |
---|
484 | namefile = namefiletest |
---|
485 | #### we assume that wrfout is next to wrfout_z and wrfout_zabg |
---|
486 | nc = Dataset(namefile) |
---|
487 | zetime = None |
---|
488 | days_in_month = [61, 66, 66, 65, 60, 54, 50, 46, 47, 47, 51, 56] |
---|
489 | plus_in_month = [ 0, 61,127,193,258,318,372,422,468,515,562,613] |
---|
490 | if 'Times' in nc.variables: |
---|
491 | zetime = nc.variables['Times'][0] |
---|
492 | shape = array(nc.variables['Times']).shape |
---|
493 | if shape[0] < 2: zetime = None |
---|
494 | if zetime is not None \ |
---|
495 | and 'vert' not in nc.variables: |
---|
496 | ##### strangely enough this does not work for api or ncrcat results! |
---|
497 | zesol = plus_in_month[ int(zetime[5]+zetime[6])-1 ] + int(zetime[8]+zetime[9]) - 1 ##les sols GCM commencent a 0 |
---|
498 | dals = int( 10. * sol2ls ( zesol ) ) / 10. |
---|
499 | ### |
---|
500 | zetime2 = nc.variables['Times'][1] |
---|
501 | one = int(zetime[11]+zetime[12]) + int(zetime[14]+zetime[15])/37. |
---|
502 | next = int(zetime2[11]+zetime2[12]) + int(zetime2[14]+zetime2[15])/37. |
---|
503 | zehour = one |
---|
504 | zehourin = abs ( next - one ) |
---|
505 | if not getaxis: |
---|
506 | lschar = "_Ls"+str(dals) |
---|
507 | else: |
---|
508 | zelen = len(nc.variables['Times'][:]) |
---|
509 | yeye = range(zelen) ; lsaxis = range(zelen) ; solaxis = range(zelen) ; ltaxis = range(zelen) |
---|
510 | for iii in yeye: |
---|
511 | zetime = nc.variables['Times'][iii] |
---|
512 | ltaxis[iii] = int(zetime[11]+zetime[12]) + int(zetime[14]+zetime[15])/37. |
---|
513 | solaxis[iii] = ltaxis[iii] / 24. + plus_in_month[ int(zetime[5]+zetime[6])-1 ] + int(zetime[8]+zetime[9]) - 1 ##les sols GCM commencent a 0 |
---|
514 | lsaxis[iii] = sol2ls ( solaxis[iii] ) |
---|
515 | if ltaxis[iii] < ltaxis[iii-1]: ltaxis[iii] = ltaxis[iii] + 24. |
---|
516 | #print ltaxis[iii], solaxis[iii], lsaxis[iii], getattr( nc, 'JULDAY' ) |
---|
517 | lschar = lsaxis ; zehour = solaxis ; zehourin = ltaxis |
---|
518 | else: |
---|
519 | lschar="" |
---|
520 | zehour = 0 |
---|
521 | zehourin = 1 |
---|
522 | return lschar, zehour, zehourin |
---|
523 | |
---|
524 | ## Author: AS |
---|
525 | def getprefix (nc): |
---|
526 | prefix = 'LMD_MMM_' |
---|
527 | prefix = prefix + 'd'+str(getattr(nc,'GRID_ID'))+'_' |
---|
528 | prefix = prefix + str(int(getattr(nc,'DX')/1000.))+'km_' |
---|
529 | return prefix |
---|
530 | |
---|
531 | ## Author: AS |
---|
532 | def getproj (nc): |
---|
533 | typefile = whatkindfile(nc) |
---|
534 | if typefile in ['meso','geo']: |
---|
535 | ### (il faudrait passer CEN_LON dans la projection ?) |
---|
536 | map_proj = getattr(nc, 'MAP_PROJ') |
---|
537 | cen_lat = getattr(nc, 'CEN_LAT') |
---|
538 | if map_proj == 2: |
---|
539 | if cen_lat > 10.: |
---|
540 | proj="npstere" |
---|
541 | #print "NP stereographic polar domain" |
---|
542 | else: |
---|
543 | proj="spstere" |
---|
544 | #print "SP stereographic polar domain" |
---|
545 | elif map_proj == 1: |
---|
546 | #print "lambert projection domain" |
---|
547 | proj="lcc" |
---|
548 | elif map_proj == 3: |
---|
549 | #print "mercator projection" |
---|
550 | proj="merc" |
---|
551 | else: |
---|
552 | proj="merc" |
---|
553 | elif typefile in ['gcm']: proj="cyl" ## pb avec les autres (de trace derriere la sphere ?) |
---|
554 | else: proj="ortho" |
---|
555 | return proj |
---|
556 | |
---|
557 | ## Author: AS |
---|
558 | def ptitle (name): |
---|
559 | from matplotlib.pyplot import title |
---|
560 | title(name) |
---|
561 | print name |
---|
562 | |
---|
563 | ## Author: AS |
---|
564 | def polarinterv (lon2d,lat2d): |
---|
565 | import numpy as np |
---|
566 | wlon = [np.min(lon2d),np.max(lon2d)] |
---|
567 | ind = np.array(lat2d).shape[0] / 2 ## to get a good boundlat and to get the pole |
---|
568 | wlat = [np.min(lat2d[ind,:]),np.max(lat2d[ind,:])] |
---|
569 | return [wlon,wlat] |
---|
570 | |
---|
571 | ## Author: AS |
---|
572 | def simplinterv (lon2d,lat2d): |
---|
573 | import numpy as np |
---|
574 | return [[np.min(lon2d),np.max(lon2d)],[np.min(lat2d),np.max(lat2d)]] |
---|
575 | |
---|
576 | ## Author: AS |
---|
577 | def wrfinterv (lon2d,lat2d): |
---|
578 | nx = len(lon2d[0,:])-1 |
---|
579 | ny = len(lon2d[:,0])-1 |
---|
580 | lon1 = lon2d[0,0] |
---|
581 | lon2 = lon2d[nx,ny] |
---|
582 | lat1 = lat2d[0,0] |
---|
583 | lat2 = lat2d[nx,ny] |
---|
584 | if abs(0.5*(lat1+lat2)) > 60.: wider = 0.5 * (abs(lon1)+abs(lon2)) * 0.1 |
---|
585 | else: wider = 0. |
---|
586 | if lon1 < lon2: wlon = [lon1, lon2 + wider] |
---|
587 | else: wlon = [lon2, lon1 + wider] |
---|
588 | if lat1 < lat2: wlat = [lat1, lat2] |
---|
589 | else: wlat = [lat2, lat1] |
---|
590 | return [wlon,wlat] |
---|
591 | |
---|
592 | ## Author: AS |
---|
593 | def makeplotres (filename,res=None,pad_inches_value=0.25,folder='',disp=True,ext='png',erase=False): |
---|
594 | import matplotlib.pyplot as plt |
---|
595 | from os import system |
---|
596 | addstr = "" |
---|
597 | if res is not None: |
---|
598 | res = int(res) |
---|
599 | addstr = "_"+str(res) |
---|
600 | name = filename+addstr+"."+ext |
---|
601 | if folder != '': name = folder+'/'+name |
---|
602 | plt.savefig(name,dpi=res,bbox_inches='tight',pad_inches=pad_inches_value) |
---|
603 | if disp: display(name) |
---|
604 | if ext in ['eps','ps','svg']: system("tar czvf "+name+".tar.gz "+name+" ; rm -f "+name) |
---|
605 | if erase: system("mv "+name+" to_be_erased") |
---|
606 | return |
---|
607 | |
---|
608 | ## Author: AS + AC |
---|
609 | def dumpbdy (field,n,stag=None,condition=False,onlyx=False,onlyy=False): |
---|
610 | nx = len(field[0,:])-1 |
---|
611 | ny = len(field[:,0])-1 |
---|
612 | if condition: |
---|
613 | if stag == 'U': nx = nx-1 |
---|
614 | if stag == 'V': ny = ny-1 |
---|
615 | if stag == 'W': nx = nx+1 #special les case when we dump stag on W |
---|
616 | if onlyx: result = field[:,n:nx-n] |
---|
617 | elif onlyy: result = field[n:ny-n,:] |
---|
618 | else: result = field[n:ny-n,n:nx-n] |
---|
619 | return result |
---|
620 | |
---|
621 | ## Author: AS + AC |
---|
622 | def getcoorddef ( nc ): |
---|
623 | import numpy as np |
---|
624 | ## getcoord2d for predefined types |
---|
625 | typefile = whatkindfile(nc) |
---|
626 | if typefile in ['meso']: |
---|
627 | if '9999' not in getattr(nc,'START_DATE') : |
---|
628 | ## regular mesoscale |
---|
629 | [lon2d,lat2d] = getcoord2d(nc) |
---|
630 | else: |
---|
631 | ## idealized mesoscale |
---|
632 | nx=getattr(nc,'WEST-EAST_GRID_DIMENSION') |
---|
633 | ny=getattr(nc,'SOUTH-NORTH_GRID_DIMENSION') |
---|
634 | dlat=getattr(nc,'DX') |
---|
635 | ## this is dirty because Martian-specific |
---|
636 | # ... but this just intended to get "lat-lon" like info |
---|
637 | falselon = np.arange(-dlat*(nx-1)/2.,dlat*(nx-1)/2.,dlat)/60000. |
---|
638 | falselat = np.arange(-dlat*(ny-1)/2.,dlat*(ny-1)/2.,dlat)/60000. |
---|
639 | [lon2d,lat2d] = np.meshgrid(falselon,falselat) ## dummy coordinates |
---|
640 | print "WARNING: domain plot artificially centered on lat,lon 0,0" |
---|
641 | elif typefile in ['gcm','earthgcm','ecmwf']: |
---|
642 | #### n est ce pas nc.variables ? |
---|
643 | if "longitude" in nc.dimensions: dalon = "longitude" |
---|
644 | elif "lon" in nc.dimensions: dalon = "lon" |
---|
645 | else: dalon = "nothing" |
---|
646 | if "latitude" in nc.dimensions: dalat = "latitude" |
---|
647 | elif "lat" in nc.dimensions: dalat = "lat" |
---|
648 | else: dalat = "nothing" |
---|
649 | [lon2d,lat2d] = getcoord2d(nc,nlat=dalat,nlon=dalon,is1d=True) |
---|
650 | elif typefile in ['geo']: |
---|
651 | [lon2d,lat2d] = getcoord2d(nc,nlat='XLAT_M',nlon='XLONG_M') |
---|
652 | return lon2d,lat2d |
---|
653 | |
---|
654 | ## Author: AS |
---|
655 | def getcoord2d (nc,nlat='XLAT',nlon='XLONG',is1d=False): |
---|
656 | import numpy as np |
---|
657 | if nlon == "nothing" or nlat == "nothing": |
---|
658 | print "NO LAT LON FIELDS. I AM TRYING MY BEST. I ASSUME GLOBAL FIELD." |
---|
659 | lon = np.linspace(-180.,180.,getdimfromvar(nc)[-1]) |
---|
660 | lat = np.linspace(-90.,90.,getdimfromvar(nc)[-2]) |
---|
661 | [lon2d,lat2d] = np.meshgrid(lon,lat) |
---|
662 | else: |
---|
663 | if is1d: |
---|
664 | lat = nc.variables[nlat][:] |
---|
665 | lon = nc.variables[nlon][:] |
---|
666 | [lon2d,lat2d] = np.meshgrid(lon,lat) |
---|
667 | else: |
---|
668 | lat = nc.variables[nlat][0,:,:] |
---|
669 | lon = nc.variables[nlon][0,:,:] |
---|
670 | [lon2d,lat2d] = [lon,lat] |
---|
671 | return lon2d,lat2d |
---|
672 | |
---|
673 | ## Author: AS |
---|
674 | def getdimfromvar (nc): |
---|
675 | varinfile = nc.variables.keys() |
---|
676 | dim = nc.variables[varinfile[-1]].shape ## usually the last variable is 4D or 3D |
---|
677 | return dim |
---|
678 | |
---|
679 | ## FROM COOKBOOK http://www.scipy.org/Cookbook/SignalSmooth |
---|
680 | def smooth1d(x,window_len=11,window='hanning'): |
---|
681 | import numpy |
---|
682 | """smooth the data using a window with requested size. |
---|
683 | This method is based on the convolution of a scaled window with the signal. |
---|
684 | The signal is prepared by introducing reflected copies of the signal |
---|
685 | (with the window size) in both ends so that transient parts are minimized |
---|
686 | in the begining and end part of the output signal. |
---|
687 | input: |
---|
688 | x: the input signal |
---|
689 | window_len: the dimension of the smoothing window; should be an odd integer |
---|
690 | window: the type of window from 'flat', 'hanning', 'hamming', 'bartlett', 'blackman' |
---|
691 | flat window will produce a moving average smoothing. |
---|
692 | output: |
---|
693 | the smoothed signal |
---|
694 | example: |
---|
695 | t=linspace(-2,2,0.1) |
---|
696 | x=sin(t)+randn(len(t))*0.1 |
---|
697 | y=smooth(x) |
---|
698 | see also: |
---|
699 | numpy.hanning, numpy.hamming, numpy.bartlett, numpy.blackman, numpy.convolve |
---|
700 | scipy.signal.lfilter |
---|
701 | TODO: the window parameter could be the window itself if an array instead of a string |
---|
702 | """ |
---|
703 | x = numpy.array(x) |
---|
704 | if x.ndim != 1: |
---|
705 | raise ValueError, "smooth only accepts 1 dimension arrays." |
---|
706 | if x.size < window_len: |
---|
707 | raise ValueError, "Input vector needs to be bigger than window size." |
---|
708 | if window_len<3: |
---|
709 | return x |
---|
710 | if not window in ['flat', 'hanning', 'hamming', 'bartlett', 'blackman']: |
---|
711 | raise ValueError, "Window is on of 'flat', 'hanning', 'hamming', 'bartlett', 'blackman'" |
---|
712 | s=numpy.r_[x[window_len-1:0:-1],x,x[-1:-window_len:-1]] |
---|
713 | #print(len(s)) |
---|
714 | if window == 'flat': #moving average |
---|
715 | w=numpy.ones(window_len,'d') |
---|
716 | else: |
---|
717 | w=eval('numpy.'+window+'(window_len)') |
---|
718 | y=numpy.convolve(w/w.sum(),s,mode='valid') |
---|
719 | return y |
---|
720 | |
---|
721 | ## Author: AS |
---|
722 | def smooth (field, coeff): |
---|
723 | ## actually blur_image could work with different coeff on x and y |
---|
724 | if coeff > 1: result = blur_image(field,int(coeff)) |
---|
725 | else: result = field |
---|
726 | return result |
---|
727 | |
---|
728 | ## FROM COOKBOOK http://www.scipy.org/Cookbook/SignalSmooth |
---|
729 | def gauss_kern(size, sizey=None): |
---|
730 | import numpy as np |
---|
731 | # Returns a normalized 2D gauss kernel array for convolutions |
---|
732 | size = int(size) |
---|
733 | if not sizey: |
---|
734 | sizey = size |
---|
735 | else: |
---|
736 | sizey = int(sizey) |
---|
737 | x, y = np.mgrid[-size:size+1, -sizey:sizey+1] |
---|
738 | g = np.exp(-(x**2/float(size)+y**2/float(sizey))) |
---|
739 | return g / g.sum() |
---|
740 | |
---|
741 | ## FROM COOKBOOK http://www.scipy.org/Cookbook/SignalSmooth |
---|
742 | def blur_image(im, n, ny=None) : |
---|
743 | from scipy.signal import convolve |
---|
744 | # blurs the image by convolving with a gaussian kernel of typical size n. |
---|
745 | # The optional keyword argument ny allows for a different size in the y direction. |
---|
746 | g = gauss_kern(n, sizey=ny) |
---|
747 | improc = convolve(im, g, mode='same') |
---|
748 | return improc |
---|
749 | |
---|
750 | ## Author: AS |
---|
751 | def getwinddef (nc): |
---|
752 | ### |
---|
753 | varinfile = nc.variables.keys() |
---|
754 | if 'Um' in varinfile: [uchar,vchar] = ['Um','Vm'] #; print "this is API meso file" |
---|
755 | elif 'U' in varinfile: [uchar,vchar] = ['U','V'] #; print "this is RAW meso file" |
---|
756 | elif 'u' in varinfile: [uchar,vchar] = ['u','v'] #; print "this is GCM file" |
---|
757 | elif 'vitu' in varinfile: [uchar,vchar] = ['vitu','vitv'] #; print "this is GCM v5 file" |
---|
758 | ### you can add choices here ! |
---|
759 | else: [uchar,vchar] = ['not found','not found'] |
---|
760 | ### |
---|
761 | if uchar in ['U']: metwind = False ## geometrical (wrt grid) |
---|
762 | else: metwind = True ## meteorological (zon/mer) |
---|
763 | if metwind is False: print "Not using meteorological winds. You trust numerical grid as being (x,y)" |
---|
764 | ### |
---|
765 | return uchar,vchar,metwind |
---|
766 | |
---|
767 | ## Author: AS |
---|
768 | def vectorfield (u, v, x, y, stride=3, scale=15., factor=250., color='black', csmooth=1, key=True): |
---|
769 | ## scale regle la reference du vecteur |
---|
770 | ## factor regle toutes les longueurs (dont la reference). l'AUGMENTER pour raccourcir les vecteurs. |
---|
771 | import matplotlib.pyplot as plt |
---|
772 | import numpy as np |
---|
773 | #posx = np.min(x) - np.std(x) / 10. |
---|
774 | #posy = np.min(y) - np.std(y) / 10. |
---|
775 | posx = np.min(x) |
---|
776 | posy = np.min(y) - 4.*np.std(y) / 10. |
---|
777 | u = smooth(u,csmooth) |
---|
778 | v = smooth(v,csmooth) |
---|
779 | widthvec = 0.003 #0.005 #0.003 |
---|
780 | q = plt.quiver( x[::stride,::stride],\ |
---|
781 | y[::stride,::stride],\ |
---|
782 | u[::stride,::stride],\ |
---|
783 | v[::stride,::stride],\ |
---|
784 | angles='xy',color=color,pivot='middle',\ |
---|
785 | scale=factor,width=widthvec ) |
---|
786 | if color in ['white','yellow']: kcolor='black' |
---|
787 | else: kcolor=color |
---|
788 | if key: p = plt.quiverkey(q,posx,posy,scale,\ |
---|
789 | str(int(scale)),coordinates='data',color=kcolor,labelpos='S',labelsep = 0.03) |
---|
790 | return |
---|
791 | |
---|
792 | ## Author: AS |
---|
793 | def display (name): |
---|
794 | from os import system |
---|
795 | system("display "+name+" > /dev/null 2> /dev/null &") |
---|
796 | return name |
---|
797 | |
---|
798 | ## Author: AS |
---|
799 | def findstep (wlon): |
---|
800 | steplon = int((wlon[1]-wlon[0])/4.) #3 |
---|
801 | step = 120. |
---|
802 | while step > steplon and step > 15. : step = step / 2. |
---|
803 | if step <= 15.: |
---|
804 | while step > steplon and step > 5. : step = step - 5. |
---|
805 | if step <= 5.: |
---|
806 | while step > steplon and step > 1. : step = step - 1. |
---|
807 | if step <= 1.: |
---|
808 | step = 1. |
---|
809 | return step |
---|
810 | |
---|
811 | ## Author: AS |
---|
812 | def define_proj (char,wlon,wlat,back=None,blat=None,blon=None): |
---|
813 | from mpl_toolkits.basemap import Basemap |
---|
814 | import numpy as np |
---|
815 | import matplotlib as mpl |
---|
816 | from mymath import max |
---|
817 | meanlon = 0.5*(wlon[0]+wlon[1]) |
---|
818 | meanlat = 0.5*(wlat[0]+wlat[1]) |
---|
819 | zewidth = np.abs(wlon[0]-wlon[1])*60000.*np.cos(3.14*meanlat/180.) |
---|
820 | zeheight = np.abs(wlat[0]-wlat[1])*60000. |
---|
821 | if blat is None: |
---|
822 | ortholat=meanlat |
---|
823 | if wlat[0] >= 80.: blat = -40. |
---|
824 | elif wlat[1] <= -80.: blat = -40. |
---|
825 | elif wlat[1] >= 0.: blat = wlat[0] |
---|
826 | elif wlat[0] <= 0.: blat = wlat[1] |
---|
827 | else: ortholat=blat |
---|
828 | if blon is None: ortholon=meanlon |
---|
829 | else: ortholon=blon |
---|
830 | h = 50. ## en km |
---|
831 | radius = 3397200. |
---|
832 | #print meanlat, meanlon |
---|
833 | if char == "cyl": m = Basemap(rsphere=radius,projection='cyl',\ |
---|
834 | llcrnrlat=wlat[0],urcrnrlat=wlat[1],llcrnrlon=wlon[0],urcrnrlon=wlon[1]) |
---|
835 | elif char == "moll": m = Basemap(rsphere=radius,projection='moll',lon_0=meanlon) |
---|
836 | elif char == "ortho": m = Basemap(rsphere=radius,projection='ortho',lon_0=ortholon,lat_0=ortholat) |
---|
837 | elif char == "lcc": m = Basemap(rsphere=radius,projection='lcc',lat_1=meanlat,lat_0=meanlat,lon_0=meanlon,\ |
---|
838 | width=zewidth,height=zeheight) |
---|
839 | #llcrnrlat=wlat[0],urcrnrlat=wlat[1],llcrnrlon=wlon[0],urcrnrlon=wlon[1]) |
---|
840 | elif char == "npstere": m = Basemap(rsphere=radius,projection='npstere', boundinglat=blat, lon_0=0.) |
---|
841 | elif char == "spstere": m = Basemap(rsphere=radius,projection='spstere', boundinglat=blat, lon_0=180.) |
---|
842 | elif char == "nplaea": m = Basemap(rsphere=radius,projection='nplaea', boundinglat=wlat[0], lon_0=meanlon) |
---|
843 | elif char == "laea": m = Basemap(rsphere=radius,projection='laea',lon_0=meanlon,lat_0=meanlat,lat_ts=meanlat,\ |
---|
844 | width=zewidth,height=zeheight) |
---|
845 | #llcrnrlat=wlat[0],urcrnrlat=wlat[1],llcrnrlon=wlon[0],urcrnrlon=wlon[1]) |
---|
846 | elif char == "nsper": m = Basemap(rsphere=radius,projection='nsper',lon_0=meanlon,lat_0=meanlat,satellite_height=h*1000.) |
---|
847 | elif char == "merc": m = Basemap(rsphere=radius,projection='merc',lat_ts=0.,\ |
---|
848 | llcrnrlat=wlat[0],urcrnrlat=wlat[1],llcrnrlon=wlon[0],urcrnrlon=wlon[1]) |
---|
849 | fontsizemer = int(mpl.rcParams['font.size']*3./4.) |
---|
850 | if char in ["cyl","lcc","merc","nsper","laea"]: step = findstep(wlon) |
---|
851 | else: step = 10. |
---|
852 | steplon = step*2. |
---|
853 | zecolor ='grey' |
---|
854 | zelinewidth = 1 |
---|
855 | zelatmax = 80 |
---|
856 | if meanlat > 75.: zelatmax = 90. ; step = step/2. |
---|
857 | # to show gcm grid: |
---|
858 | #zecolor = 'r' |
---|
859 | #zelinewidth = 1 |
---|
860 | #step = 180./48. |
---|
861 | #steplon = 360./64. |
---|
862 | #zelatmax = 90. - step/3 |
---|
863 | if char not in ["moll"]: |
---|
864 | m.drawmeridians(np.r_[-180.:180.:steplon], labels=[0,0,0,1], color=zecolor, linewidth=zelinewidth, fontsize=fontsizemer, latmax=zelatmax) |
---|
865 | m.drawparallels(np.r_[-90.:90.:step], labels=[1,0,0,0], color=zecolor, linewidth=zelinewidth, fontsize=fontsizemer, latmax=zelatmax) |
---|
866 | if back: m.warpimage(marsmap(back),scale=0.75) |
---|
867 | #if not back: |
---|
868 | # if not var: back = "mola" ## if no var: draw mola |
---|
869 | # elif typefile in ['mesoapi','meso','geo'] \ |
---|
870 | # and proj not in ['merc','lcc','nsper','laea']: back = "molabw" ## if var but meso: draw molabw |
---|
871 | # else: pass ## else: draw None |
---|
872 | return m |
---|
873 | |
---|
874 | ## Author: AS |
---|
875 | #### test temporaire |
---|
876 | def putpoints (map,plot): |
---|
877 | #### from http://www.scipy.org/Cookbook/Matplotlib/Maps |
---|
878 | # lat/lon coordinates of five cities. |
---|
879 | lats = [18.4] |
---|
880 | lons = [-134.0] |
---|
881 | points=['Olympus Mons'] |
---|
882 | # compute the native map projection coordinates for cities. |
---|
883 | x,y = map(lons,lats) |
---|
884 | # plot filled circles at the locations of the cities. |
---|
885 | map.plot(x,y,'bo') |
---|
886 | # plot the names of those five cities. |
---|
887 | wherept = 0 #1000 #50000 |
---|
888 | for name,xpt,ypt in zip(points,x,y): |
---|
889 | plot.text(xpt+wherept,ypt+wherept,name) |
---|
890 | ## le nom ne s'affiche pas... |
---|
891 | return |
---|
892 | |
---|
893 | ## Author: AS |
---|
894 | def calculate_bounds(field,vmin=None,vmax=None): |
---|
895 | import numpy as np |
---|
896 | from mymath import max,min,mean |
---|
897 | ind = np.where(field < 9e+35) |
---|
898 | fieldcalc = field[ ind ] # la syntaxe compacte ne marche si field est un tuple |
---|
899 | ### |
---|
900 | dev = np.std(fieldcalc)*3.0 |
---|
901 | ### |
---|
902 | if vmin is None: zevmin = mean(fieldcalc) - dev |
---|
903 | else: zevmin = vmin |
---|
904 | ### |
---|
905 | if vmax is None: zevmax = mean(fieldcalc) + dev |
---|
906 | else: zevmax = vmax |
---|
907 | if vmin == vmax: |
---|
908 | zevmin = mean(fieldcalc) - dev ### for continuity |
---|
909 | zevmax = mean(fieldcalc) + dev ### for continuity |
---|
910 | ### |
---|
911 | if zevmin < 0. and min(fieldcalc) > 0.: zevmin = 0. |
---|
912 | print "BOUNDS field ", min(fieldcalc), max(fieldcalc), " //// adopted", zevmin, zevmax |
---|
913 | return zevmin, zevmax |
---|
914 | |
---|
915 | ## Author: AS |
---|
916 | def bounds(what_I_plot,zevmin,zevmax): |
---|
917 | from mymath import max,min,mean |
---|
918 | ### might be convenient to add the missing value in arguments |
---|
919 | #what_I_plot[ what_I_plot < zevmin ] = zevmin#*(1. + 1.e-7) |
---|
920 | if zevmin < 0: what_I_plot[ what_I_plot < zevmin*(1. - 1.e-7) ] = zevmin*(1. - 1.e-7) |
---|
921 | else: what_I_plot[ what_I_plot < zevmin*(1. + 1.e-7) ] = zevmin*(1. + 1.e-7) |
---|
922 | #print "NEW MIN ", min(what_I_plot) |
---|
923 | what_I_plot[ what_I_plot > 9e+35 ] = -9e+35 |
---|
924 | what_I_plot[ what_I_plot > zevmax ] = zevmax*(1. - 1.e-7) |
---|
925 | #print "NEW MAX ", max(what_I_plot) |
---|
926 | return what_I_plot |
---|
927 | |
---|
928 | ## Author: AS |
---|
929 | def nolow(what_I_plot): |
---|
930 | from mymath import max,min |
---|
931 | lim = 0.15*0.5*(abs(max(what_I_plot))+abs(min(what_I_plot))) |
---|
932 | print "NO PLOT BELOW VALUE ", lim |
---|
933 | what_I_plot [ abs(what_I_plot) < lim ] = 1.e40 |
---|
934 | return what_I_plot |
---|
935 | |
---|
936 | |
---|
937 | ## Author : AC |
---|
938 | def hole_bounds(what_I_plot,zevmin,zevmax): |
---|
939 | import numpy as np |
---|
940 | zi=0 |
---|
941 | for i in what_I_plot: |
---|
942 | zj=0 |
---|
943 | for j in i: |
---|
944 | if ((j < zevmin) or (j > zevmax)):what_I_plot[zi,zj]=np.NaN |
---|
945 | zj=zj+1 |
---|
946 | zi=zi+1 |
---|
947 | |
---|
948 | return what_I_plot |
---|
949 | |
---|
950 | ## Author: AS |
---|
951 | def zoomset (wlon,wlat,zoom): |
---|
952 | dlon = abs(wlon[1]-wlon[0])/2. |
---|
953 | dlat = abs(wlat[1]-wlat[0])/2. |
---|
954 | [wlon,wlat] = [ [wlon[0]+zoom*dlon/100.,wlon[1]-zoom*dlon/100.],\ |
---|
955 | [wlat[0]+zoom*dlat/100.,wlat[1]-zoom*dlat/100.] ] |
---|
956 | print "ZOOM %",zoom,wlon,wlat |
---|
957 | return wlon,wlat |
---|
958 | |
---|
959 | ## Author: AS |
---|
960 | def fmtvar (whichvar="def"): |
---|
961 | fmtvar = { \ |
---|
962 | "MIXED": "%.0f",\ |
---|
963 | "UPDRAFT": "%.0f",\ |
---|
964 | "DOWNDRAFT": "%.0f",\ |
---|
965 | "TK": "%.0f",\ |
---|
966 | "T": "%.0f",\ |
---|
967 | #"ZMAX_TH": "%.0f",\ |
---|
968 | #"WSTAR": "%.0f",\ |
---|
969 | # Variables from TES ncdf format |
---|
970 | "T_NADIR_DAY": "%.0f",\ |
---|
971 | "T_NADIR_NIT": "%.0f",\ |
---|
972 | # Variables from tes.py ncdf format |
---|
973 | "TEMP_DAY": "%.0f",\ |
---|
974 | "TEMP_NIGHT": "%.0f",\ |
---|
975 | # Variables from MCS and mcs.py ncdf format |
---|
976 | "DTEMP": "%.0f",\ |
---|
977 | "NTEMP": "%.0f",\ |
---|
978 | "DNUMBINTEMP": "%.0f",\ |
---|
979 | "NNUMBINTEMP": "%.0f",\ |
---|
980 | # other stuff |
---|
981 | "TPOT": "%.0f",\ |
---|
982 | "TSURF": "%.0f",\ |
---|
983 | "U_OUT1": "%.0f",\ |
---|
984 | "T_OUT1": "%.0f",\ |
---|
985 | "def": "%.1e",\ |
---|
986 | "PTOT": "%.0f",\ |
---|
987 | "HGT": "%.1e",\ |
---|
988 | "USTM": "%.2f",\ |
---|
989 | "HFX": "%.0f",\ |
---|
990 | "ICETOT": "%.1e",\ |
---|
991 | "TAU_ICE": "%.2f",\ |
---|
992 | "TAUICE": "%.2f",\ |
---|
993 | "VMR_ICE": "%.1e",\ |
---|
994 | "MTOT": "%.1f",\ |
---|
995 | "ANOMALY": "%.1f",\ |
---|
996 | "W": "%.1f",\ |
---|
997 | "WMAX_TH": "%.1f",\ |
---|
998 | "WSTAR": "%.1f",\ |
---|
999 | "QSURFICE": "%.0f",\ |
---|
1000 | "UM": "%.0f",\ |
---|
1001 | "WIND": "%.0f",\ |
---|
1002 | "UVMET": "%.0f",\ |
---|
1003 | "UV": "%.0f",\ |
---|
1004 | "ALBBARE": "%.2f",\ |
---|
1005 | "TAU": "%.1f",\ |
---|
1006 | "CO2": "%.2f",\ |
---|
1007 | "ENFACT": "%.1f",\ |
---|
1008 | #### T.N. |
---|
1009 | "TEMP": "%.0f",\ |
---|
1010 | "VMR_H2OICE": "%.0f",\ |
---|
1011 | "VMR_H2OVAP": "%.0f",\ |
---|
1012 | "TAUTES": "%.2f",\ |
---|
1013 | "TAUTESAP": "%.2f",\ |
---|
1014 | |
---|
1015 | } |
---|
1016 | if "TSURF" in whichvar: whichvar = "TSURF" |
---|
1017 | if whichvar not in fmtvar: |
---|
1018 | whichvar = "def" |
---|
1019 | return fmtvar[whichvar] |
---|
1020 | |
---|
1021 | ## Author: AS |
---|
1022 | #################################################################################################################### |
---|
1023 | ### Colorbars http://www.scipy.org/Cookbook/Matplotlib/Show_colormaps?action=AttachFile&do=get&target=colormaps3.png |
---|
1024 | def defcolorb (whichone="def"): |
---|
1025 | whichcolorb = { \ |
---|
1026 | "def": "spectral",\ |
---|
1027 | "HGT": "spectral",\ |
---|
1028 | "HGT_M": "spectral",\ |
---|
1029 | "TK": "gist_heat",\ |
---|
1030 | "TPOT": "Paired",\ |
---|
1031 | "TSURF": "RdBu_r",\ |
---|
1032 | "QH2O": "PuBu",\ |
---|
1033 | "USTM": "YlOrRd",\ |
---|
1034 | "WIND": "YlOrRd",\ |
---|
1035 | #"T_nadir_nit": "RdBu_r",\ |
---|
1036 | #"T_nadir_day": "RdBu_r",\ |
---|
1037 | "HFX": "RdYlBu",\ |
---|
1038 | "ICETOT": "YlGnBu_r",\ |
---|
1039 | #"MTOT": "PuBu",\ |
---|
1040 | "CCNQ": "YlOrBr",\ |
---|
1041 | "CCNN": "YlOrBr",\ |
---|
1042 | "TEMP": "Jet",\ |
---|
1043 | "TAU_ICE": "Blues",\ |
---|
1044 | "TAUICE": "Blues",\ |
---|
1045 | "VMR_ICE": "Blues",\ |
---|
1046 | "W": "jet",\ |
---|
1047 | "WMAX_TH": "spectral",\ |
---|
1048 | "ANOMALY": "RdBu_r",\ |
---|
1049 | "QSURFICE": "hot_r",\ |
---|
1050 | "ALBBARE": "spectral",\ |
---|
1051 | "TAU": "YlOrBr_r",\ |
---|
1052 | "CO2": "YlOrBr_r",\ |
---|
1053 | "MIXED": "GnBu",\ |
---|
1054 | #### T.N. |
---|
1055 | "MTOT": "spectral",\ |
---|
1056 | "H2O_ICE_S": "RdBu",\ |
---|
1057 | "VMR_H2OICE": "PuBu",\ |
---|
1058 | "VMR_H2OVAP": "PuBu",\ |
---|
1059 | "WATERCAPTAG": "Blues",\ |
---|
1060 | } |
---|
1061 | #W --> spectral ou jet |
---|
1062 | #spectral BrBG RdBu_r |
---|
1063 | #print "predefined colorbars" |
---|
1064 | if "TSURF" in whichone: whichone = "TSURF" |
---|
1065 | if whichone not in whichcolorb: |
---|
1066 | whichone = "def" |
---|
1067 | return whichcolorb[whichone] |
---|
1068 | |
---|
1069 | ## Author: AS |
---|
1070 | def definecolorvec (whichone="def"): |
---|
1071 | whichcolor = { \ |
---|
1072 | "def": "black",\ |
---|
1073 | "vis": "yellow",\ |
---|
1074 | "vishires": "yellow",\ |
---|
1075 | "molabw": "yellow",\ |
---|
1076 | "mola": "black",\ |
---|
1077 | "gist_heat": "white",\ |
---|
1078 | "hot": "tk",\ |
---|
1079 | "gist_rainbow": "black",\ |
---|
1080 | "spectral": "black",\ |
---|
1081 | "gray": "red",\ |
---|
1082 | "PuBu": "black",\ |
---|
1083 | "titan": "red",\ |
---|
1084 | } |
---|
1085 | if whichone not in whichcolor: |
---|
1086 | whichone = "def" |
---|
1087 | return whichcolor[whichone] |
---|
1088 | |
---|
1089 | ## Author: AS |
---|
1090 | def marsmap (whichone="vishires"): |
---|
1091 | from os import uname |
---|
1092 | mymachine = uname()[1] |
---|
1093 | ### not sure about speed-up with this method... looks the same |
---|
1094 | if "lmd.jussieu.fr" in mymachine: domain = "/u/aslmd/WWW/maps/" |
---|
1095 | elif "aymeric-laptop" in mymachine: domain = "/home/aymeric/Dropbox/Public/" |
---|
1096 | else: domain = "http://www.lmd.jussieu.fr/~aslmd/maps/" |
---|
1097 | whichlink = { \ |
---|
1098 | #"vis": "http://maps.jpl.nasa.gov/pix/mar0kuu2.jpg",\ |
---|
1099 | #"vishires": "http://www.lmd.jussieu.fr/~aslmd/maps/MarsMap_2500x1250.jpg",\ |
---|
1100 | #"geolocal": "http://dl.dropbox.com/u/11078310/geolocal.jpg",\ |
---|
1101 | #"mola": "http://www.lns.cornell.edu/~seb/celestia/mars-mola-2k.jpg",\ |
---|
1102 | #"molabw": "http://dl.dropbox.com/u/11078310/MarsElevation_2500x1250.jpg",\ |
---|
1103 | "thermalday": domain+"thermalday.jpg",\ |
---|
1104 | "thermalnight": domain+"thermalnight.jpg",\ |
---|
1105 | "tesalbedo": domain+"tesalbedo.jpg",\ |
---|
1106 | "vis": domain+"mar0kuu2.jpg",\ |
---|
1107 | "vishires": domain+"MarsMap_2500x1250.jpg",\ |
---|
1108 | "geolocal": domain+"geolocal.jpg",\ |
---|
1109 | "mola": domain+"mars-mola-2k.jpg",\ |
---|
1110 | "molabw": domain+"MarsElevation_2500x1250.jpg",\ |
---|
1111 | "clouds": "http://www.johnstonsarchive.net/spaceart/marswcloudmap.jpg",\ |
---|
1112 | "jupiter": "http://www.mmedia.is/~bjj/data/jupiter_css/jupiter_css.jpg",\ |
---|
1113 | "jupiter_voy": "http://www.mmedia.is/~bjj/data/jupiter/jupiter_vgr2.jpg",\ |
---|
1114 | #"bw": domain+"EarthElevation_2500x1250.jpg",\ |
---|
1115 | "bw": "http://users.info.unicaen.fr/~karczma/TEACH/InfoGeo/Images/Planets/EarthElevation_2500x1250.jpg",\ |
---|
1116 | "contrast": "http://users.info.unicaen.fr/~karczma/TEACH/InfoGeo/Images/Planets/EarthMapAtmos_2500x1250.jpg",\ |
---|
1117 | "nice": "http://users.info.unicaen.fr/~karczma/TEACH/InfoGeo/Images/Planets/earthmap1k.jpg",\ |
---|
1118 | "blue": "http://eoimages.gsfc.nasa.gov/ve/2430/land_ocean_ice_2048.jpg",\ |
---|
1119 | "blueclouds": "http://eoimages.gsfc.nasa.gov/ve/2431/land_ocean_ice_cloud_2048.jpg",\ |
---|
1120 | "justclouds": "http://eoimages.gsfc.nasa.gov/ve/2432/cloud_combined_2048.jpg",\ |
---|
1121 | "pluto": "http://www.boulder.swri.edu/~buie/pluto/pluto_all.png",\ |
---|
1122 | "triton": "http://laps.noaa.gov/albers/sos/neptune/triton/triton_rgb_cyl_www.jpg",\ |
---|
1123 | "titan": "http://laps.noaa.gov/albers/sos/saturn/titan/titan_rgb_cyl_www.jpg",\ |
---|
1124 | #"titan": "http://laps.noaa.gov/albers/sos/celestia/titan_50.jpg",\ |
---|
1125 | "titanuni": "http://maps.jpl.nasa.gov/pix/sat6fss1.jpg",\ |
---|
1126 | "venus": "http://laps.noaa.gov/albers/sos/venus/venus4/venus4_rgb_cyl_www.jpg",\ |
---|
1127 | "cosmic": "http://laps.noaa.gov/albers/sos/universe/wmap/wmap_rgb_cyl_www.jpg",\ |
---|
1128 | } |
---|
1129 | ### see http://www.mmedia.is/~bjj/planetary_maps.html |
---|
1130 | if whichone not in whichlink: |
---|
1131 | print "marsmap: choice not defined... you'll get the default one... " |
---|
1132 | whichone = "vishires" |
---|
1133 | return whichlink[whichone] |
---|
1134 | |
---|
1135 | #def earthmap (whichone): |
---|
1136 | # if whichone == "contrast": whichlink="http://users.info.unicaen.fr/~karczma/TEACH/InfoGeo/Images/Planets/EarthMapAtmos_2500x1250.jpg" |
---|
1137 | # elif whichone == "bw": whichlink="http://users.info.unicaen.fr/~karczma/TEACH/InfoGeo/Images/Planets/EarthElevation_2500x1250.jpg" |
---|
1138 | # elif whichone == "nice": whichlink="http://users.info.unicaen.fr/~karczma/TEACH/InfoGeo/Images/Planets/earthmap1k.jpg" |
---|
1139 | # return whichlink |
---|
1140 | |
---|
1141 | ## Author: AS |
---|
1142 | def latinterv (area="Whole"): |
---|
1143 | list = { \ |
---|
1144 | "Europe": [[ 20., 80.],[- 50., 50.]],\ |
---|
1145 | "Central_America": [[-10., 40.],[ 230., 300.]],\ |
---|
1146 | "Africa": [[-20., 50.],[- 50., 50.]],\ |
---|
1147 | "Whole": [[-90., 90.],[-180., 180.]],\ |
---|
1148 | "Southern_Hemisphere": [[-90., 60.],[-180., 180.]],\ |
---|
1149 | "Northern_Hemisphere": [[-60., 90.],[-180., 180.]],\ |
---|
1150 | "Tharsis": [[-30., 60.],[-170.,- 10.]],\ |
---|
1151 | "Whole_No_High": [[-60., 60.],[-180., 180.]],\ |
---|
1152 | "Chryse": [[-60., 60.],[- 60., 60.]],\ |
---|
1153 | "North_Pole": [[ 50., 90.],[-180., 180.]],\ |
---|
1154 | "Close_North_Pole": [[ 75., 90.],[-180., 180.]],\ |
---|
1155 | "Far_South_Pole": [[-90.,-40.],[-180., 180.]],\ |
---|
1156 | "South_Pole": [[-90.,-50.],[-180., 180.]],\ |
---|
1157 | "Close_South_Pole": [[-90.,-75.],[-180., 180.]],\ |
---|
1158 | "Sirenum_Crater_large": [[-46.,-34.],[-166.,-151.]],\ |
---|
1159 | "Sirenum_Crater_small": [[-36.,-26.],[-168.,-156.]],\ |
---|
1160 | "Rupes": [[ 72., 90.],[-120.,- 20.]],\ |
---|
1161 | "Xanadu": [[-40., 20.],[ 40., 120.]],\ |
---|
1162 | } |
---|
1163 | if area not in list: area = "Whole" |
---|
1164 | [olat,olon] = list[area] |
---|
1165 | return olon,olat |
---|
1166 | |
---|
1167 | ## Author: TN |
---|
1168 | def separatenames (name): |
---|
1169 | from numpy import concatenate |
---|
1170 | # look for comas in the input name to separate different names (files, variables,etc ..) |
---|
1171 | if name is None: |
---|
1172 | names = None |
---|
1173 | else: |
---|
1174 | names = [] |
---|
1175 | stop = 0 |
---|
1176 | currentname = name |
---|
1177 | while stop == 0: |
---|
1178 | indexvir = currentname.find(',') |
---|
1179 | if indexvir == -1: |
---|
1180 | stop = 1 |
---|
1181 | name1 = currentname |
---|
1182 | else: |
---|
1183 | name1 = currentname[0:indexvir] |
---|
1184 | names = concatenate((names,[name1])) |
---|
1185 | currentname = currentname[indexvir+1:len(currentname)] |
---|
1186 | return names |
---|
1187 | |
---|
1188 | |
---|
1189 | ## Author: TN |
---|
1190 | def readslices(saxis): |
---|
1191 | from numpy import empty |
---|
1192 | if saxis == None: |
---|
1193 | zesaxis = None |
---|
1194 | else: |
---|
1195 | zesaxis = empty((len(saxis),2)) |
---|
1196 | for i in range(len(saxis)): |
---|
1197 | a = separatenames(saxis[i]) |
---|
1198 | if len(a) == 1: |
---|
1199 | zesaxis[i,:] = float(a[0]) |
---|
1200 | else: |
---|
1201 | zesaxis[i,0] = float(a[0]) |
---|
1202 | zesaxis[i,1] = float(a[1]) |
---|
1203 | |
---|
1204 | return zesaxis |
---|
1205 | |
---|
1206 | ## Author: TN |
---|
1207 | def readdata(data,datatype,coord1,coord2): |
---|
1208 | ## Read sparse data |
---|
1209 | from numpy import empty |
---|
1210 | if datatype == 'txt': |
---|
1211 | if len(data[coord1].shape) == 1: |
---|
1212 | return data[coord1][:] |
---|
1213 | elif len(data[coord1].shape) == 2: |
---|
1214 | return data[coord1][:,int(coord2)-1] |
---|
1215 | else: |
---|
1216 | errormess('error in readdata') |
---|
1217 | elif datatype == 'sav': |
---|
1218 | return data[coord1][coord2] |
---|
1219 | else: |
---|
1220 | errormess(datatype+' type is not supported!') |
---|
1221 | |
---|
1222 | |
---|
1223 | ## Author: AS |
---|
1224 | def bidimfind(lon2d,lat2d,vlon,vlat,file=None): |
---|
1225 | import numpy as np |
---|
1226 | import matplotlib.pyplot as mpl |
---|
1227 | if vlat is None: array = (lon2d - vlon)**2 |
---|
1228 | elif vlon is None: array = (lat2d - vlat)**2 |
---|
1229 | else: array = (lon2d - vlon)**2 + (lat2d - vlat)**2 |
---|
1230 | idy,idx = np.unravel_index( np.argmin(array), lon2d.shape ) |
---|
1231 | if vlon is not None: |
---|
1232 | if (np.abs(lon2d[idy,idx]-vlon)) > 5: errormess("longitude not found ",printvar=lon2d) |
---|
1233 | if vlat is not None: |
---|
1234 | if (np.abs(lat2d[idy,idx]-vlat)) > 5: errormess("latitude not found ",printvar=lat2d) |
---|
1235 | if file is not None: |
---|
1236 | print idx,idy,lon2d[idy,idx],vlon |
---|
1237 | print idx,idy,lat2d[idy,idx],vlat |
---|
1238 | var = file.variables["HGT"][:,:,:] |
---|
1239 | mpl.contourf(var[0,:,:],30,cmap = mpl.get_cmap(name="Greys_r") ) ; mpl.axis('off') ; mpl.plot(idx,idy,'mx',mew=4.0,ms=20.0) |
---|
1240 | mpl.show() |
---|
1241 | return idy,idx |
---|
1242 | |
---|
1243 | ## Author: TN |
---|
1244 | def getsindex(saxis,index,axis): |
---|
1245 | # input : all the desired slices and the good index |
---|
1246 | # output : all indexes to be taken into account for reducing field |
---|
1247 | import numpy as np |
---|
1248 | if ( np.array(axis).ndim == 2): |
---|
1249 | axis = axis[:,0] |
---|
1250 | if saxis is None: |
---|
1251 | zeindex = None |
---|
1252 | else: |
---|
1253 | aaa = int(np.argmin(abs(saxis[index,0] - axis))) |
---|
1254 | bbb = int(np.argmin(abs(saxis[index,1] - axis))) |
---|
1255 | [imin,imax] = np.sort(np.array([aaa,bbb])) |
---|
1256 | zeindex = np.array(range(imax-imin+1))+imin |
---|
1257 | # because -180 and 180 are the same point in longitude, |
---|
1258 | # we get rid of one for averaging purposes. |
---|
1259 | if axis[imin] == -180 and axis[imax] == 180: |
---|
1260 | zeindex = zeindex[0:len(zeindex)-1] |
---|
1261 | print "INFO: whole longitude averaging asked, so last point is not taken into account." |
---|
1262 | return zeindex |
---|
1263 | |
---|
1264 | ## Author: TN |
---|
1265 | def define_axis(lon,lat,vert,time,indexlon,indexlat,indexvert,indextime,what_I_plot,dim0,vertmode): |
---|
1266 | # Purpose of define_axis is to find x and y axis scales in a smart way |
---|
1267 | # x axis priority: 1/time 2/lon 3/lat 4/vertical |
---|
1268 | # To be improved !!!... |
---|
1269 | from numpy import array,swapaxes |
---|
1270 | x = None |
---|
1271 | y = None |
---|
1272 | count = 0 |
---|
1273 | what_I_plot = array(what_I_plot) |
---|
1274 | shape = what_I_plot.shape |
---|
1275 | if indextime is None and len(time) > 1: |
---|
1276 | print "AXIS is time" |
---|
1277 | x = time |
---|
1278 | count = count+1 |
---|
1279 | if indexlon is None and len(lon) > 1: |
---|
1280 | print "AXIS is lon" |
---|
1281 | if count == 0: x = lon |
---|
1282 | else: y = lon |
---|
1283 | count = count+1 |
---|
1284 | if indexlat is None and len(lat) > 1: |
---|
1285 | print "AXIS is lat" |
---|
1286 | if count == 0: x = lat |
---|
1287 | else: y = lat |
---|
1288 | count = count+1 |
---|
1289 | if indexvert is None and ((dim0 == 4) or (y is None)): |
---|
1290 | print "AXIS is vert" |
---|
1291 | if vertmode == 0: # vertical axis is as is (GCM grid) |
---|
1292 | if count == 0: x=range(len(vert)) |
---|
1293 | else: y=range(len(vert)) |
---|
1294 | count = count+1 |
---|
1295 | else: # vertical axis is in kms |
---|
1296 | if count == 0: x = vert |
---|
1297 | else: y = vert |
---|
1298 | count = count+1 |
---|
1299 | x = array(x) |
---|
1300 | y = array(y) |
---|
1301 | print "CHECK SHAPE: what_I_plot, x, y", what_I_plot.shape, x.shape, y.shape |
---|
1302 | if len(shape) == 1: |
---|
1303 | if shape[0] != len(x): print "WARNING: shape[0] != len(x). Correcting." ; what_I_plot = what_I_plot[0:len(x)] |
---|
1304 | if len(y.shape) > 0: y = () |
---|
1305 | elif len(shape) == 2: |
---|
1306 | if shape[1] == len(y) and shape[0] == len(x) and shape[0] != shape[1]: |
---|
1307 | print "INFO: swapaxes: ",what_I_plot.shape,shape ; what_I_plot = swapaxes(what_I_plot,0,1) |
---|
1308 | else: |
---|
1309 | if shape[0] != len(y): print "WARNING: shape[0] != len(y). Correcting." ; what_I_plot = what_I_plot[0:len(y),:] |
---|
1310 | elif shape[1] != len(x): print "WARNING: shape[1] != len(x). Correcting." ; what_I_plot = what_I_plot[:,0:len(x)] |
---|
1311 | elif len(shape) == 3: |
---|
1312 | if vertmode < 0: print "not supported. must check array dimensions at some point. not difficult to implement though." |
---|
1313 | return what_I_plot,x,y |
---|
1314 | |
---|
1315 | # Author: TN + AS |
---|
1316 | def determineplot(slon, slat, svert, stime): |
---|
1317 | nlon = 1 # number of longitudinal slices -- 1 is None |
---|
1318 | nlat = 1 |
---|
1319 | nvert = 1 |
---|
1320 | ntime = 1 |
---|
1321 | nslices = 1 |
---|
1322 | if slon is not None: |
---|
1323 | nslices = nslices*len(slon) |
---|
1324 | nlon = len(slon) |
---|
1325 | if slat is not None: |
---|
1326 | nslices = nslices*len(slat) |
---|
1327 | nlat = len(slat) |
---|
1328 | if svert is not None: |
---|
1329 | nslices = nslices*len(svert) |
---|
1330 | nvert = len(svert) |
---|
1331 | if stime is not None: |
---|
1332 | nslices = nslices*len(stime) |
---|
1333 | ntime = len(stime) |
---|
1334 | #else: |
---|
1335 | # nslices = 2 |
---|
1336 | mapmode = 0 |
---|
1337 | if slon is None and slat is None: |
---|
1338 | mapmode = 1 # in this case we plot a map, with the given projection |
---|
1339 | |
---|
1340 | return nlon, nlat, nvert, ntime, mapmode, nslices |
---|
1341 | |
---|
1342 | ## Author: AC |
---|
1343 | ## Reduce complexity of main script by moving the contour part here. Also allow to call it from elsewhere |
---|
1344 | ## which can be usefull |
---|
1345 | # |
---|
1346 | #def call_contour(what_I_plot,error,x,y,m,lon,lat,vert,time,vertmode,ze_var2,indextime,indexlon,indexlat,indexvert,yintegral,mapmode,typefile,var2,ticks): |
---|
1347 | # from matplotlib.pyplot import contour, plot, clabel |
---|
1348 | # import numpy as np |
---|
1349 | # #what_I_plot = what_I_plot*mult |
---|
1350 | # if not error: |
---|
1351 | # if mapmode == 1: |
---|
1352 | # if typefile in ['mesoapi','meso']: what_I_plot = dumpbdy(what_I_plot,6) |
---|
1353 | # zevmin, zevmax = calculate_bounds(what_I_plot) |
---|
1354 | # zelevels = np.linspace(zevmin,zevmax,ticks) #20) |
---|
1355 | # if var2 == 'HGT': zelevels = np.arange(-10000.,30000.,2000.) |
---|
1356 | # if mapmode == 0: |
---|
1357 | # #if typefile in ['mesoideal']: what_I_plot = dumpbdy(what_I_plot,0,stag='W') |
---|
1358 | # itime=indextime |
---|
1359 | # if len(what_I_plot.shape) is 3: itime=[0] |
---|
1360 | # what_I_plot, x, y = define_axis(lon,lat,vert,time,indexlon,indexlat,indexvert,\ |
---|
1361 | # itime,what_I_plot, len(ze_var2.shape),vertmode) |
---|
1362 | # ### If we plot a 2-D field |
---|
1363 | # if len(what_I_plot.shape) is 2: |
---|
1364 | # #zelevels=[1.] |
---|
1365 | # if mapmode == 0:cs = contour(x,y,what_I_plot, zelevels, colors='k', linewidths = 1 ) #0.33 colors='w' )# , alpha=0.5) |
---|
1366 | # elif mapmode == 1:cs = m.contour(x,y,what_I_plot, zelevels, colors='k', linewidths = 1 ) #0.33 colors='w' )# , alpha=0.5) |
---|
1367 | # #clabel(cs,zelevels,inline=3,fmt='%1.1f',fontsize=7) |
---|
1368 | # ### If we plot a 1-D field |
---|
1369 | # elif len(what_I_plot.shape) is 1: |
---|
1370 | # plot(what_I_plot,x) |
---|
1371 | # else: |
---|
1372 | # errormess("There is an error in reducing field !") |
---|
1373 | # return error |
---|
1374 | |
---|
1375 | ## Author : AS |
---|
1376 | def maplatlon( lon,lat,field,\ |
---|
1377 | proj="cyl",colorb="jet",ndiv=10,zeback="molabw",trans=0.6,title="",\ |
---|
1378 | vecx=None,vecy=None,stride=2 ): |
---|
1379 | ### an easy way to map a field over lat/lon grid |
---|
1380 | import numpy as np |
---|
1381 | import matplotlib.pyplot as mpl |
---|
1382 | from matplotlib.cm import get_cmap |
---|
1383 | ## get lon and lat in 2D version. get lat/lon intervals |
---|
1384 | numdim = len(np.array(lon).shape) |
---|
1385 | if numdim == 2: [lon2d,lat2d] = [lon,lat] |
---|
1386 | elif numdim == 1: [lon2d,lat2d] = np.meshgrid(lon,lat) |
---|
1387 | else: errormess("lon and lat arrays must be 1D or 2D") |
---|
1388 | [wlon,wlat] = latinterv() |
---|
1389 | ## define projection and background. define x and y given the projection |
---|
1390 | m = define_proj(proj,wlon,wlat,back=zeback,blat=None,blon=None) |
---|
1391 | x, y = m(lon2d, lat2d) |
---|
1392 | ## define field. bound field. |
---|
1393 | what_I_plot = np.transpose(field) |
---|
1394 | zevmin, zevmax = calculate_bounds(what_I_plot) ## vmin=min(what_I_plot_frame), vmax=max(what_I_plot_frame)) |
---|
1395 | what_I_plot = bounds(what_I_plot,zevmin,zevmax) |
---|
1396 | ## define contour field levels. define color palette |
---|
1397 | ticks = ndiv + 1 |
---|
1398 | zelevels = np.linspace(zevmin,zevmax,ticks) |
---|
1399 | palette = get_cmap(name=colorb) |
---|
1400 | ## contour field |
---|
1401 | m.contourf( x, y, what_I_plot, zelevels, cmap = palette, alpha = trans ) |
---|
1402 | ## draw colorbar |
---|
1403 | if proj in ['moll','cyl']: zeorientation="horizontal" ; zepad = 0.07 |
---|
1404 | else: zeorientation="vertical" ; zepad = 0.03 |
---|
1405 | #daformat = fmtvar(fvar.upper()) |
---|
1406 | daformat = "%.0f" |
---|
1407 | zecb = mpl.colorbar( fraction=0.05,pad=zepad,format=daformat,orientation=zeorientation,\ |
---|
1408 | ticks=np.linspace(zevmin,zevmax,num=min([ticks/2+1,21])),extend='neither',spacing='proportional' ) |
---|
1409 | ## give a title |
---|
1410 | if zeorientation == "horizontal": zecb.ax.set_xlabel(title) |
---|
1411 | else: ptitle(title) |
---|
1412 | ## draw vector |
---|
1413 | if vecx is not None and vecy is not None: |
---|
1414 | [vecx_frame,vecy_frame] = m.rotate_vector( np.transpose(vecx), np.transpose(vecy), lon2d, lat2d ) ## for metwinds |
---|
1415 | vectorfield(vecx_frame, vecy_frame, x, y, stride=stride, csmooth=2,\ |
---|
1416 | scale=30., factor=500., color=definecolorvec(colorb), key=True) |
---|
1417 | ## scale regle la reference du vecteur. factor regle toutes les longueurs (dont la reference). l'AUGMENTER pour raccourcir les vecteurs. |
---|
1418 | return |
---|