source: trunk/UTIL/PYTHON/mcs.py @ 469

Last change on this file since 469 was 465, checked in by acolaitis, 14 years ago

Fixed the hrecast for mcs.py. Fixed mcs.py with the -H mode which uses hrecast. Added a README for MCS.PY

  • Property svn:executable set to *
File size: 13.6 KB
Line 
1#!/usr/bin/env python
2
3### A. Colaitis
4
5##
6#   This routine transforms a diagfi.nc file into a diagfi_MCS.nc file where
7#   the fields are directly comparable to those contained in MCS data, i.e.
8#   fields are re-binned at times over the ranges specified in the MCS file.
9###
10
11###########################################################################################
12###########################################################################################
13### What is below relate to running the file as a command line executable (very convenient)
14if __name__ == "__main__":
15   import sys
16   from optparse import OptionParser    ### to be replaced by argparse
17   from netCDF4 import Dataset
18   from os import system,path
19   from times import sol2ls
20   import numpy as np
21   from mymath import find_nearest
22   from myplot import getfield,separatenames
23   from make_netcdf import make_gcm_netcdf
24   from gcm_transformations import call_zrecast,call_hrecast
25   parser = OptionParser() 
26
27   #############################
28   ### Options
29   parser.add_option('-f', '--file',   action='store',dest='file',     type="string",  default=None,  help='[NEEDED] filename.')
30   parser.add_option('-m', '--mfile',  action='store',dest='mcsfile',     type="string",  default=None,  help='[NEEDED] filename for MCS comparison.')
31   parser.add_option('-v', '--var',    action='append',dest='var',      type="string",  default=None,  help='[NEEDED] Variables to process. (coma-separated list. aps and bps are always included.)')
32   parser.add_option('-x', action='store_false',dest='recast',     default=True,  help='Force aps and bps to be included in output file (usefull if your file is already recasted along z) [True]')
33   parser.add_option('-i', '--zrecast', action='store_true', dest='zrecast', default=False, help='Cast zrecast.e on diagfi file with MCS pressure levels. Will pass this operation is recasted file is already present, unless --override is specified. [False]')
34   parser.add_option('-H', '--hrecast', action='store_true', dest='hrecast', default=False, help='Cast hrecast.e on diagfi file on MCS lat/lon grid. Will pass this operation is recasted file is already present, unless --override is specified. [False]')
35   parser.add_option('--override', action='store_true', dest='override', default=False, help='Force zrecast.e to act even if recasted file is already present(will erase previous recasted file) [False]')
36   parser.add_option('--ditch', action='store_true', dest='ditch', default=False, help='Ditch recasted file when interpolation is performed. [False]')
37   parser.add_option('--latreverse', action='store_true', dest='latreverse', default=False, help='Reverse the latitude axis in output diagfi. [False]')
38
39   #############################
40   ### Get options and variables
41   (opt,args) = parser.parse_args()
42
43   #############################
44   ### Load and check data
45
46   if opt.var is None:
47      print "You must specify at least a field to process with -v."
48      exit()
49
50   # Hrecast & Zrecast
51
52   varznames=separatenames(opt.var[0])
53   filename=opt.file
54
55   if opt.hrecast:
56      if (path.exists(filename[0:len(filename)-3]+"_h.nc") and (not opt.override)):
57         print "--> "+filename[0:len(filename)-3]+"_h.nc"
58         print "Recasted file is already there, skipping interpolation. [use --override to force interpolation]"
59         filename=filename[0:len(filename)-3]+"_h.nc"
60      else:
61         print "--> "+filename[0:len(filename)-3]+"_h.nc"
62         filename=call_hrecast (  input_name      = [filename], \
63                    fields  = varznames, \
64                    predefined = 'mcs')[0]
65
66   if opt.zrecast:
67      if (path.exists(filename[0:len(filename)-3]+"_P.nc") and (not opt.override)):
68         print "--> "+filename[0:len(filename)-3]+"_P.nc"
69         print "Recasted file is already there, skipping interpolation. [use --override to force interpolation]"
70         filename=filename[0:len(filename)-3]+"_P.nc"
71      else:
72         print "--> "+filename[0:len(filename)-3]+"_P.nc"
73         filename=call_zrecast (  interp_mode   = 2, \
74                    input_name      = [filename], \
75                    fields  = varznames, \
76                    predefined = 'mcs')[0]
77   # Files
78
79   print "--> Loading diagfi dataset."
80
81   nc=Dataset(filename)
82   ncmcs=Dataset(opt.mcsfile)
83
84   # Dimensions
85
86   lon=nc.variables["longitude"][:]
87   lat=nc.variables["latitude"][:]
88   alt=nc.variables["altitude"][:]
89   time=nc.variables["Time"][:] # in fraction of sols
90   if "controle" in nc.variables:
91      controle=nc.variables["controle"][:]
92      day_ini=controle[3]%669
93   else:
94      if opt.zrecast:
95         nccontrol=Dataset(opt.file)
96         if "controle" in nccontrol.variables:
97            controle=nccontrol.variables["controle"][:]
98            day_ini=controle[3]%669
99         else:
100            print "Error: could not find controle variable in diagfi."
101            day_ini=input("Please type initial sol number:")%669
102      else:
103         print "Error: could not find controle variable in diagfi."
104         day_ini=input("Please type initial sol number:")%669
105   time[:]=time[:]+day_ini
106   nx=len(lon)
107   ny=len(lat)
108   nz=len(alt)
109   nt=len(time)
110   lstime=sol2ls(time)
111
112   # MCS
113
114   print "--> Loading and preparing MCS dataset."
115
116   dtimemintmp=ncmcs.variables["dtimemin"][:,:,:]
117   dtimemaxtmp=ncmcs.variables["dtimemax"][:,:,:]
118   ntimemintmp=ncmcs.variables["ntimemin"][:,:,:]
119   ntimemaxtmp=ncmcs.variables["ntimemax"][:,:,:]
120   lonmcs=ncmcs.variables["longitude"][:]
121   latmcs=ncmcs.variables["latitude"][:]
122   timemcs=ncmcs.variables["time"][:]%360 # IN LS
123
124   dtimemin=np.ma.masked_where(dtimemintmp < 0.,dtimemintmp)
125   dtimemin.set_fill_value([np.NaN])
126   dtimemax=np.ma.masked_where(dtimemaxtmp < 0.,dtimemaxtmp)
127   dtimemax.set_fill_value([np.NaN])
128   ntimemin=np.ma.masked_where(ntimemintmp < 0.,ntimemintmp)
129   ntimemin.set_fill_value([np.NaN])
130   ntimemax=np.ma.masked_where(ntimemaxtmp < 0.,ntimemaxtmp)
131   ntimemax.set_fill_value([np.NaN])
132
133   # Variables to treat
134
135   print "--> Preparing diagfi dataset."
136
137   varz=[]
138   n=0
139   for zn in varznames:
140       load=getfield(nc,zn)
141       load=np.ma.masked_where(load < -1.e-20,load)
142       load.set_fill_value([np.NaN])
143       load=load.filled()
144       load=np.ma.masked_invalid(load)
145       load.set_fill_value([np.NaN])
146       load=load.filled()
147       varz.append(load)
148       load=0.
149       print "Found: "+zn+" with dimensions: "
150       print np.array(varz[n]).shape
151       n=n+1
152
153   nzvar=len(varz)
154   dimensions={}
155   vv=0
156   for var in varz:
157       a=len(np.array(var).shape)
158       if a == 3: dimensions[vv]=a
159       elif a == 4: dimensions[vv]=a
160       else:
161          print "Warning, only 3d and 4d variables are supported for time-recasting"
162          exit()
163       vv=vv+1
164
165   # Variables to save but not treated (only along z, or phisinit-like files)
166
167   aps=nc.variables["aps"][:]
168   bps=nc.variables["bps"][:]
169   fullnames=["aps","bps"]
170   for name in varznames:
171       fullnames.append("d"+name)
172       fullnames.append("n"+name)
173   print "Will output: "
174   if opt.recast: print fullnames[2:]
175   else: print fullnames
176   #############################
177   ### Building
178   #############################
179
180   ### We loop over chunks of gcm data corresponding to MCS time dimension
181   ### Bin sizes for mcs data is 5 degrees ls centered on value
182   varday=np.zeros([len(timemcs),nz,ny,nx])
183   varnight=np.zeros([len(timemcs),nz,ny,nx])
184   vardayout=np.zeros([nzvar,len(timemcs),nz,ny,nx])
185   varnightout=np.zeros([nzvar,len(timemcs),nz,ny,nx])
186   vardayout=np.ma.masked_invalid(vardayout)
187   varnightout=np.ma.masked_invalid(varnightout)
188   i=0
189   for ls in timemcs:
190       lsstart=ls-2.5
191       lsstop=ls+2.5
192       istart=find_nearest(lstime,lsstart,strict=True)
193       istop=find_nearest(lstime,lsstop,strict=True)
194       varchk=0
195       if ((istart is np.NaN) or (istop is np.NaN)):
196          vardayout[:,i,:,:,:]=np.NaN
197          varnightout[:,i,:,:,:]=np.NaN
198          print "Time interval skipped. Ls MCS: (",lsstart,';',lsstop,')',"// Ls diagfi: (",lstime.min(),';',lstime.max(),')'
199          i=i+1
200          continue
201       print "--->> Processing Data. Ls MCS: (",lsstart,';',lsstop,')',"// Ls diagfi: (",lstime.min(),';',lstime.max(),')'
202       # warning, python's convention is that the second index of array[a:b] is the array index of element after the one being picked last.
203       # for that reason, array[0:0] is nan and array[0:1] is only one value. Hence, len(array[a:b+1]) is b-a+1 and not b-a+2
204       print "     .initialisation."
205       
206       
207       varchk=np.zeros([nzvar,istop-istart+1,nz,ny,nx])
208       vv=0
209       for variable in varz:
210           if dimensions[vv] is 3:
211              varchk[vv,:,0,:,:]=variable[istart:istop+1,:,:]
212           else:
213              varchk[vv,:,:,:,:]=variable[istart:istop+1,:,:,:]
214           vv=vv+1
215       varchk=np.ma.masked_invalid(varchk)
216       varchk.set_fill_value([np.NaN])
217       varchktime=time[istart:istop+1]
218       ndays=np.floor(varchktime[len(varchktime)-1])-np.floor(varchktime[0])
219       dtmichk=dtimemin[i,:,:]
220       dtmachk=dtimemax[i,:,:]
221       ntmichk=ntimemin[i,:,:]
222       ntmachk=ntimemax[i,:,:]
223       dtmichk.set_fill_value([np.NaN])
224       dtmachk.set_fill_value([np.NaN])
225       ntmichk.set_fill_value([np.NaN])
226       ntmachk.set_fill_value([np.NaN])
227       dtmichk=dtmichk.filled()
228       dtmachk=dtmachk.filled()
229       ntmichk=ntmichk.filled()
230       ntmachk=ntmachk.filled()
231
232   ### We iterate for each day in the chunk, on each grid point we find
233   ### the closest corresponding MCS grid point and the index of the
234   ### time in the chunk closest to the time in the closest MCS grid point.
235   ### (yea it's complicated)
236
237       vartmpnight=np.zeros([nzvar,ndays,nz,ny,nx])
238       vartmpday=np.zeros([nzvar,ndays,nz,ny,nx])
239       vartmpnight=np.ma.masked_invalid(vartmpnight)
240       vartmpday=np.ma.masked_invalid(vartmpday)
241       vartmpnight.set_fill_value([np.NaN])
242       vartmpday.set_fill_value([np.NaN])
243
244       nd=0
245       print "     .time indices MCS grid -> diagfi grid."
246       while nd < ndays:
247
248          daystart=find_nearest(varchktime-varchktime[0],nd)
249          daystop=find_nearest(varchktime-varchktime[0],nd+1)
250#          varchktime_lon=np.zeros([daystop-daystart+1,len(lon)])
251          ix=0
252          for x in lon:
253
254             varchktime_lon = 24.*(varchktime[daystart:daystop+1]-varchktime[daystart]) + x/15.
255
256             iy=0
257             for y in lat:
258                niy=find_nearest(latmcs,y)
259                nix=find_nearest(lonmcs,x)
260                nitdtmichk=find_nearest(varchktime_lon,dtmichk[niy,nix])
261                nitdtmachk=find_nearest(varchktime_lon,dtmachk[niy,nix])
262                nitntmichk=find_nearest(varchktime_lon,ntmichk[niy,nix])
263                nitntmachk=find_nearest(varchktime_lon,ntmachk[niy,nix])
264                for vv in np.arange(nzvar):
265                   if ((nitdtmichk is np.NaN) or (nitdtmachk is np.NaN)):
266                       vartmpday[vv,nd,:,iy,ix]=np.NaN
267                   elif nitdtmichk > nitdtmachk:
268                       vartmpday[vv,nd,:,iy,ix]=(np.ma.mean(varchk[vv,daystart+nitdtmichk:daystop+1,:,iy,ix],axis=0)+np.ma.mean(varchk[vv,daystart:daystart+nitdtmachk+1,:,iy,ix],axis=0))/2.
269                   else:
270                       vartmpday[vv,nd,:,iy,ix]=np.ma.mean(varchk[vv,daystart+nitdtmichk:daystart+nitdtmachk+1,:,iy,ix],axis=0)
271                   if ((nitntmichk is np.NaN) or (nitntmachk is np.NaN)):
272                       vartmpnight[vv,nd,:,iy,ix]=np.NaN
273                   elif nitntmichk > nitntmachk:
274                       vartmpnight[vv,nd,:,iy,ix]=(np.ma.mean(varchk[vv,daystart+nitntmichk:daystop+1,:,iy,ix],axis=0)+np.ma.mean(varchk[vv,daystart:daystart+nitntmachk+1,:,iy,ix],axis=0))/2.
275                   else:                                                           
276                       vartmpnight[vv,nd,:,iy,ix]=np.ma.mean(varchk[vv,daystart+nitntmichk:daystart+nitntmachk+1,:,iy,ix],axis=0)
277                iy=iy+1
278             ix=ix+1
279          nd=nd+1
280
281       print "     .creating bins."
282
283       vartmpdaymasked=np.ma.masked_invalid(vartmpday)
284       vartmpnightmasked=np.ma.masked_invalid(vartmpnight)
285       vartmpdaymasked.set_fill_value([np.NaN])
286       vartmpnightmasked.set_fill_value([np.NaN])
287       for vv in np.arange(nzvar):
288          vardayout[vv,i,:,:,:]=np.ma.mean(vartmpdaymasked[vv,:,:,:,:],axis=0)
289          varnightout[vv,i,:,:,:]=np.ma.mean(vartmpnightmasked[vv,:,:,:,:],axis=0)
290          print "          ."+varznames[vv]+".done"
291       i=i+1
292
293   print "--->> Preparing Data for ncdf. Missing value is NaN."
294
295   vardayout=np.ma.masked_invalid(vardayout)
296   varnightout=np.ma.masked_invalid(varnightout)
297   vardayout.set_fill_value([np.NaN])
298   varnightout.set_fill_value([np.NaN])
299
300   if opt.latreverse:
301       vardayout[:,:,:,:,:]=vardayout[:,:,:,::-1,:]
302       varnightout[:,:,:,:,:]=varnightout[:,:,:,::-1,:]
303
304   all=[aps,bps]
305   for vv in np.arange(nzvar):
306       if dimensions[vv] == 3:
307          all.append(vardayout[vv,:,0,:,:].filled())
308          all.append(varnightout[vv,:,0,:,:].filled())
309       elif dimensions[vv] == 4:
310          all.append(vardayout[vv,:,:,:,:].filled())
311          all.append(varnightout[vv,:,:,:,:].filled())
312
313   if opt.recast:
314      all=all[2:]
315      fullnames=fullnames[2:] 
316
317   if opt.latreverse:
318      lat=lat[::-1]
319
320   make_gcm_netcdf (zfilename=filename[0:len(filename)-3]+"_MCS.nc", \
321                        zdescription="Temperatures from diagfi reworked to match MCS format", \
322                        zlon=lon, \
323                        zlat=lat, \
324                        zalt=alt, \
325                        ztime=timemcs, \
326                        zvariables=all, \
327                        znames=fullnames)
328   if opt.zrecast and opt.ditch:
329      print "removing interpolated file"
330      system("rm -f "+opt.file[0:len(opt.file)-3]+"_P.nc")
Note: See TracBrowser for help on using the repository browser.