source: trunk/UTIL/PYTHON/Intercomparison/File_conversion/ic.py @ 803

Last change on this file since 803 was 764, checked in by acolaitis, 12 years ago

###################################################
# PYTHON / PLANETPLOT #
###################################################

# ------------------- XY plots ------------------ #

# Added a new category of plot to unidim, contour, etc... called "xy"
# - xy plots are plots that do not use time,vert,lat or lon as axis
# - variables to be plotted are stored in plot_x and plot_y, which is done in
# select_getfield. (there is no "what_I_plot" var for "wy" plots)
# - plot_x and plot_y are also subject to reduce field. A None value indicates
# the plot is not 'xy'
# - "xy" plots are used for a specific subset of plots : histograms, fourier
# transforms, hodographs
# - added option --analysis to perform certain kinds of analysis on the data
# (corresponding to xy plots).
# - One selects the fields he wants to plot (e.g. -v UV --lon 0 --lat 20) and
# chooses a kind of analysis :

--analysis fft # in the particular case given above, this mode corresponds

# to the mean of the ampitude spectrum of the vertical spatial
# fast fourier transform taken at each time index
# note that for now, fft are always done along the vertical
# axis. This could be made more flexible.
# not that the minimum wavelength you can plot depends on the
# vertical step of your simulation. THIS STEP HAS TO BE
# CONSTANT, hence you MUST use API or ZRECAST with constant
# spacing.

--analysis histo # histogram on the flattened data. If the user asks for --lon 0,20,

# the average is done before computing the histogram (contrary to the fft).
# However, if given a 2D array (with only --lon and --lat on a
# 4D field for example), the data is flattened before computation
# so that the result is still a 1D histogram.

--analysis histodensity # histogram with a kernel density estimate to a

# gaussian distribution, also giving the mean, variance,
# skewness and kurtosis. Other distributions are available in
# the scipy.stats package and could be implemented.

# - added variables "-v hodograph" and "-v hodograph_2". First one is a
# regular hodograph with "u" and "v" as axis, with labels of the local times
# (use --axtime lt). This is a "xy" plot (you must specify a vertical level
# as well, usually --vert 0 with an interpolation at 5m (-i 4 -l 0.005)).
# Second one is the variation with local time (for exemple) of the wind
# rotation (arctan(v/u)). This is not a "xy" plot but "unidim".
# For --ope plots in "xy" cases, only the operation plot is displayed.

# ------------------- Operations ------------------ #

# - For operations --ope -, the histogram (fourth plot) has been removed. To get it
# back, call --ope -_histo.
# - _only has been added to "+","-","-%" operations (eg "-_only")
# - For operations "-","+","-%","-_only","+_only","-%_only","-_histo", it is now
# possible to call multiple files (sill one variable) and compare each of them
# with the unique given reference file. Ex: -f file1,file2 --fref file3 --ope - will give 6 plots:

file1 file3 file1-file3
file2 file3 file2-file3

# In the case of "xy" plots, the multiple operation plots are regrouped on a
# single plot (using multiple lines). the title of this plot is not 'fig(2)-fig(1)'
# (default) but the argument of the --title command. Labels have to be given
# as following : --labels "dummy","dummy","file1-file3 label","dummy","dummy","file2-file3 label" (dummy can be anything. this is to be improved)
# To be able to run on multiple files and easily introduce the correct number and order of plots, these operations have been moved outside of the main loop
# on namefiles and variables. Operation of the type "add_var" or "cat" have
# been left inside the loop and are unchanged.

# ------------------- Localtime ------------------ #

# Changed the way localtime is computed. Reasons:
# - it was assuming one timestep per hour in computations mixing indices and
# actual times
# - to determine the starting date, it was using the name of the file (for Meso), instead of using the netcdf
# attribute (START_DATE)
# - it was using the computed mean longitude of the domain, which is not correct for
# hemispheric domain. => better to use the netcdf attribute CEN_LON
# - it was using this mean longitude even for profile plots at given
# longitude => better to get the local time at this given lon, especially
# important for large domains
# => new localtime() is in myplot (old one is commented). Interv is obsolete (but not removed yet). Case "Geo" has not been looked at.
# new localtime in myplot correctly account for starting date of the file in
# all cases
# accounts for local longitude of the plot
# accounts for files that do not have per hour outputs, but per timestep.
# specific cases can be added in myplot in localtime()

# ------------------ Misc ------------------ #

# - added option --xlog to get x logarithmic axis (--ylog already existed)
# - added the possibility to use 2 files of different gridding (-f
# file1,file2) although of the same type (meso for exemple). For that purpose,
# lon, lat, alt and vert arrays are now indexed with 'index_f', as for
# all_var. => all_lon, all_lat, all_lat, all_vert
# - added function teta_to_tk in myplot so that a call to pp.py on standard
# LMD mesoscale file with "-v tk" can be done without the need to call API.
# The temperature is computed from T and PTOT, knowing P0, T0 and R_CP.
# - bug correction in determineplot() that was causing wrong plot number and
# slices number when not calling averaged lon, lat, vert or times. (which is
# often !)
# - added new options to redope: --redope edge_x1, edge_x2, edge_y1, edge_y2
# which plots the boundaries of the domain. This is different compared to
# asking for a fixed longitude, because the domain boundary might not be at constant
# longitude (hemispheric domain for exemple). x1 is the western boundary, x2
# the eastern, y1 the southern and y2 the northern. x1 and x2 reduce the
# dimension along --lon, y1 and y2 reduce the dimension along --lat.
# - added control in windamplitude() to determine whether winds are staggered or
# unstaggered. This is usefull when dealing with non LMD_MMM files.
# - corrected a bug in reduce_field where the mean was computed on the wrong
# axis !!! (pretty serious bug)

# Exemple of plots you can do with these new options can be found in
$YOUR_SVN/trunk/UTIL/PYTHON/Intercomparison/Plots_MasterScript/bam.sh

# ------------------ API ------------------ #

# - changed maximum of levels from 299 to 1000 in API (interpolation on 1000 levels is
# usefull to get larger bandwidth in fourier transform)
# the following concerns users of MRAMS files.
# - API has not been modified for MRAMS files. Instead, a python script
# (ic.py) is run on MRAMS .ctl and .dat files, which automatically format those files
# to be API and pp.py compatible.
# - ic.py is in $YOUR_SVN/trunk/UTIL/PYTHON/Intercomparison/File_conversion

###################################################
# INTERCOMPARISON TOOLS: #
###################################################

#ic.py in
$YOUR_SVN/trunk/UTIL/PYTHON/Intercomparison/File_conversion
#CDO installer with import_binary in
$YOUR_SVN/trunk/UTIL/PYTHON/Intercomparison/CDO
#Plotting scripts in
$YOUR_SVN/trunk/UTIL/PYTHON/Intercomparison/Plots_MasterScript
#1D sensibility tool in
$YOUR_SVN/trunk/UTIL/PYTHON/Intercomparison/1D_sensibility_tool

# See README in each of these folders for details.

  • Property svn:executable set to *
File size: 13.5 KB
Line 
1#!/u/acolmd/san0/Python/64bits/epd-7.1-2-rh5-x86_64/bin/python
2if __name__ == "__main__":
3    import subprocess
4    import sys
5    from os import system,unlink
6    from glob import glob
7    from optparse import OptionParser
8    from myplot import errormess
9    from ic import check_file, merge_atm_sfc
10    #############################
11    ### Get options and variables
12    parser = OptionParser()
13    parser.add_option('-g','--generate',  action='store_true',dest='generate',          default=False,help='Generate netcdf files')
14    parser.add_option('-c','--clean',     action='store_true',dest='clean',            default=False,help='Clean mrams_ files')
15    parser.add_option('--deepclean',      action='store_true',dest='deepclean',            default=False,help='Clean everything')
16    (opt,args) = parser.parse_args()
17    if opt.deepclean:
18       for zfile in ['converted_files','figures']:
19           subprocess.call(['touch',zfile],shell=False)
20           subprocess.call(['rm','-rf',zfile],shell=False)
21           subprocess.call(['mkdir',zfile],shell=False)
22    if opt.clean:
23       subprocess.call(['touch','converted_files/mramsout_d_dummy'],shell=False)
24       for f in glob ('converted_files/mramsout_d*'):
25           unlink(f)
26    if opt.generate:
27       opath='./converted_files/'
28       ipath='./vis/'
29#       for grid in ['g1','g2','g3','g4']: ## INDEX 4 OF TEST CASE HOLDEN DOES NOT WORK (CDO ERROR)
30       for grid in ['g1','g2','g3']:
31           outfileatm='holden_ls150-atm-S-'+grid+'.nc'
32           outfilesfc='holden_ls150-sfc-S-'+grid+'.nc'
33           outfilecat='mramsout_d'
34           infileatm='holden_ls150-atm-S-'+grid+'.ctl'
35           infilesfc='holden_ls150-sfc-S-'+grid+'.ctl'
36           print '- - - - - - - - - - - - - - - - - - - - - - - - - - - - - -'
37           err=check_file(opath+outfileatm)
38           print '=> Begining conversion for File '+infileatm+' -> in -> '+outfileatm
39           if err == 1: subprocess.call(['cdo','-f','nc','import_binary',ipath+infileatm,opath+outfileatm],shell=False)
40           print '=> File '+infileatm+' -> converted in -> '+outfileatm
41           err=check_file(opath+outfilesfc)
42           print '=> Begining conversion for File '+infilesfc+' -> in -> '+outfilesfc
43           if err == 1: subprocess.call(['cdo','-f','nc','import_binary',ipath+infilesfc,opath+outfilesfc],shell=False)
44           print '=> File '+infilesfc+' -> converted in -> '+outfilesfc
45           print '- - - - - - - - - - - - - - - - - - - - - - - - - - - - - -'
46           print '=> Begining concatenation for Files '+outfilesfc+' and '+outfileatm
47           merge_atm_sfc(opath+outfilesfc,opath+outfileatm,opath+outfilecat)
48           print '=> Files concatenated !'
49           print '- - - - - - - - - - - - - - - - - - - - - - - - - - - - - -'
50
51#   subprocess.call(cmdstring,shell=False)
52
53# Check if a file is present and readable
54def check_file(filename):
55    from os import path, access, R_OK  # W_OK for write permission.
56    PATH=filename
57    if path.exists(PATH) and path.isfile(PATH) and access(PATH, R_OK): 
58       return 0  # file exists and is readable
59    else: 
60       return 1  # file is not present and/or readable
61
62# Find lat lon center of the grid
63def find_center(lon,lat,xlon,xlat):
64    import numpy as np
65    i_lon=0 ; i_lat = 0 ; i_lon2 = 0 ; i_lat2=0
66    if len(lon)%2!=0:i_lon=np.floor(len(lon)/2) # no +1 because arrays start from 0
67    if len(lat)%2!=0:i_lat=np.floor(len(lat)/2) # no +1 because arrays start from 0
68    if i_lon == 0:
69         i_lon=len(lon)/2 -1
70         i_lon2=len(lon)/2
71    if i_lat == 0:
72         i_lat=len(lat)/2 -1
73         i_lat2=len(lat)/2
74    if i_lon2 == 0 and i_lat2 == 0:
75         cen_lon = xlon[i_lat,i_lon]
76         cen_lat = xlat[i_lat,i_lon]
77    if i_lon2 != 0 and i_lat2 != 0:
78         cen_lon = (xlon[i_lat,i_lon] + xlon[i_lat2,i_lon2] + xlon[i_lat,i_lon2] + xlon[i_lat2,i_lon])/4.
79         cen_lat = (xlat[i_lat,i_lon] + xlat[i_lat2,i_lon2] + xlat[i_lat,i_lon2] + xlat[i_lat2,i_lon])/4.
80    if i_lon2 != 0 and i_lat2 == 0:
81         cen_lon = (xlon[i_lat,i_lon] + xlon[i_lat,i_lon2])/2.
82         cen_lon = (xlat[i_lat,i_lon] + xlat[i_lat,i_lon2])/2.
83    if i_lon2 == 0 and i_lat2 != 0:
84         cen_lon = (xlon[i_lat,i_lon] + xlon[i_lat2,i_lon])/2.
85         cen_lon = (xlat[i_lat,i_lon] + xlat[i_lat2,i_lon])/2.
86    return cen_lon,cen_lat
87
88# Merge atm and sfc netcdf. cdo cat does not work for that. (neither does ncecat)
89def merge_atm_sfc(fsfc,fatm,fmerged):
90    from os import system
91    from netCDF4 import Dataset
92    import numpy as np
93    import subprocess
94
95    # Calendar
96    path_calendar='/san0/acolmd/SVN/trunk/MESOSCALE/LMD_MM_MARS/SIMU/calendar'
97    gcm_sol=[]
98    gcm_ls=[]
99    mmm_date=[]
100    calendar = open(path_calendar, 'r')
101    calendar.readline() #get rid of the first line
102    calendar_l=calendar.readlines()
103    for line in calendar_l:
104        s=str.split(line)
105        gcm_sol=np.append(gcm_sol,np.float(s[1]))
106        gcm_ls=np.append(gcm_ls,np.float(s[2]))
107        mmm_date=np.append(mmm_date,s[3])
108    calendar.close()
109    calendar_l=0.
110
111    # Constants
112    grav=3.72
113
114    # Grid properties
115    ls_simu_start=147.  #ls of start simu
116#    lt_start="07:30:00" #local time of start simu at cen_lon # read in .ctl
117#   NOTE: I ASSUME THE LT_START IN .CTL FOR MRAMS IS LOCAL TIME OF START AT LONGITUDE 0
118    dx=[240000.,80000.,26667,8889]
119    dy=dx
120   
121    # Date management
122    idx=np.abs(gcm_ls - ls_simu_start).argmin()
123    date_start=mmm_date[idx]
124    # USER: fill the date you want to analyse:
125    days=[16,17,18]
126
127    # Grid id
128    if 'g1' in fatm:
129        gid=1
130    elif 'g2' in fatm:
131        gid=2
132    elif 'g3' in fatm:
133        gid=3
134    else:
135        gid=4
136
137    # Load datasets   
138    atm=Dataset(fatm) ; sfc=Dataset(fsfc)
139
140    # reconstruct dimensions
141    lon = atm.variables['lon'] ; nlon = len(lon)
142    lat = atm.variables['lat'] ; nlat = len(lat)
143    lev = atm.variables['lev'] ; nlev = len(lev)
144    time = atm.variables['time'] ; ntime = len(time)
145    units_time=time.units[12:len(time.units)]
146
147    # date from mrams file:
148    year_mrams, month_mrams, day_mrams, hour_mrams, minute_mrams, second_mrams = date_to_time(units_time)
149    lt_start = hour_mrams+':'+minute_mrams+':'+second_mrams
150    # or date from calendar with initial Ls:
151    year, month, day, hour, minute, second = date_to_time(date_start,lt_start)
152    dt=(time[1]-time[0])*60.
153    daylength=3600.*24./(dt*60.)
154
155    i=0
156    imax=np.floor(len(time)/daylength)
157    while (i < imax+1):
158        i=i+1
159        dayslab=[int((i-1)*daylength),int(daylength*i)]
160        if i==imax+1: dayslab=[int((i-1)*daylength),len(time)]
161        fmerged_slab=fmerged+'0'+str(gid)+'_'+time_to_date(mmm_date[idx+i-1],lt_start)
162        err=check_file(fmerged_slab)
163        year, month, day, hour, minute, second = date_to_time(mmm_date[idx+i-1],lt_start)
164        if int(day) in days:
165           print '  -->> hyperslab : ',dayslab,' => mramsout_d0'+str(gid)+'_'+time_to_date(mmm_date[idx+i-1],lt_start)
166        else: 
167           err=0
168           print '  -->> hyperslab : ',dayslab,' => mramsout_d0'+str(gid)+'_'+time_to_date(mmm_date[idx+i-1],lt_start)+' --> Skipped'
169        if err == 1:
170            # prepare merged file
171            subprocess.call(['touch',fmerged_slab],shell=False)
172            subprocess.call(['rm','-rf',fmerged_slab],shell=False)
173            merged_file = Dataset(fmerged_slab, 'w', format='NETCDF3_64BIT')
174            merged_file.description = 'MRAMS merged atm and sfc converted (netcdf) file.'
175            ## Dimensions
176            merged_file.createDimension('Time', None)
177            merged_file.createDimension('bottom_top',nlev)
178            merged_file.createDimension('south_north',nlat)
179            merged_file.createDimension('west_east',nlon)
180            merged_file.createDimension('bottom_top_stag',nlev+1) # very important for PHTOT in API
181#            merged_file.createDimension('south_north_stag',nlat+1)
182#            merged_file.createDimension('west_east_stag',nlon+1)
183            ## Variables for dimensions
184            times = merged_file.createVariable('Time', 'f', ('Time',))
185            times[:] = time[dayslab[0]:dayslab[1]]
186            altitudes = merged_file.createVariable('bottom_top', 'f', ('bottom_top',))
187            altitudes[:] = lev[:]
188            latitudes = merged_file.createVariable('south_north', 'f', ('south_north',))
189            latitudes[:] = lat[:]
190            longitudes = merged_file.createVariable('west_east', 'f', ('west_east',))
191            longitudes[:] = lon[:]
192            ## Set attributes
193            setattr(merged_file, 'TITLE', 'OUTPUT FROM MRAMS MODEL')
194            setattr(merged_file, 'START_DATE', time_to_date(mmm_date[idx],lt_start))
195            setattr(merged_file, 'SIMULATION_START_DATE', time_to_date(mmm_date[idx],lt_start))
196            setattr(merged_file, 'WEST-EAST_GRID_DIMENSION', nlon+1)
197            setattr(merged_file, 'SOUTH-NORTH_GRID_DIMENSION', nlat+1)
198            setattr(merged_file, 'BOTTOM-TOP_GRID_DIMENSION', nlev+1)
199            setattr(merged_file, 'DX', np.float(dx[gid-1])) 
200            setattr(merged_file, 'DY', np.float(dy[gid-1]))
201            setattr(merged_file, 'DT', np.float(dt))
202            setattr(merged_file, 'GRID_TYPE', 'C') #
203            setattr(merged_file, 'GRID_ID', gid) 
204            setattr(merged_file, 'PARENT_ID', gid-1) 
205            setattr(merged_file, 'MAP_PROJ', 2) #
206            setattr(merged_file, 'JULYR', year) 
207            setattr(merged_file, 'JULDAY', gcm_sol[idx+i-1])
208            setattr(merged_file, 'WEST-EAST_PATCH_START_UNSTAG' , 1)
209            setattr(merged_file, 'WEST-EAST_PATCH_END_UNSTAG' , nlon)
210            setattr(merged_file, 'WEST-EAST_PATCH_START_STAG' , 1)
211            setattr(merged_file, 'WEST-EAST_PATCH_END_STAG' , nlon) ###
212            setattr(merged_file, 'SOUTH-NORTH_PATCH_START_UNSTAG' , 1)
213            setattr(merged_file, 'SOUTH-NORTH_PATCH_END_UNSTAG' , nlat)
214            setattr(merged_file, 'SOUTH-NORTH_PATCH_START_STAG' , 1)
215            setattr(merged_file, 'SOUTH-NORTH_PATCH_END_STAG' , nlat) ###
216            setattr(merged_file, 'BOTTOM-TOP_PATCH_START_UNSTAG' , 1)
217            setattr(merged_file, 'BOTTOM-TOP_PATCH_END_UNSTAG' , nlev)
218            setattr(merged_file, 'BOTTOM-TOP_PATCH_START_STAG' , 1)
219            setattr(merged_file, 'BOTTOM-TOP_PATCH_END_STAG' , nlev+1)
220            ## Set grid variables
221            xlon = merged_file.createVariable('XLONG', 'f', ('Time','south_north','west_east',))
222            xlat = merged_file.createVariable('XLAT', 'f', ('Time','south_north','west_east',))
223            print '    glon -> XLON' ; xlon[:,:,:] = sfc.variables['glon'][dayslab[0]:dayslab[1],:,:]
224            print '    glat -> XLAT' ; xlat[:,:,:] = sfc.variables['glat'][dayslab[0]:dayslab[1],:,:]
225            cen_lon, cen_lat = find_center(lon,lat,xlon[0,:,:],xlat[0,:,:])
226            setattr(merged_file, 'CEN_LAT', cen_lat)
227            setattr(merged_file, 'CEN_LON', cen_lon)
228            ## Renaming
229            print '    topo -> HGT' ; hgt = merged_file.createVariable('HGT', 'f', ('Time','south_north','west_east',))
230            hgt[:,:,:] =  sfc.variables['topo'][dayslab[0]:dayslab[1],:,:]
231            print '    tempk -> tk' ; tk = merged_file.createVariable('tk', 'f', ('Time','bottom_top','south_north','west_east',))
232            tk[:,:,:,:] = atm.variables['tempk'][dayslab[0]:dayslab[1],:,:,:]
233            print '    press -> PTOT' ; ptot = merged_file.createVariable('PTOT', 'f', ('Time','bottom_top','south_north','west_east',))
234            ptot[:,:,:,:] = atm.variables['press'][dayslab[0]:dayslab[1],:,:,:]
235            u = merged_file.createVariable('U', 'f', ('Time','bottom_top','south_north','west_east',))
236            v = merged_file.createVariable('V', 'f', ('Time','bottom_top','south_north','west_east',))
237            w = merged_file.createVariable('W', 'f', ('Time','bottom_top','south_north','west_east',))
238            print '    u_avg -> U' ; u[:,:,:,:] = atm.variables['u_avg'][dayslab[0]:dayslab[1],:,:,:]
239            print '    v_avg -> V' ; v[:,:,:,:] = atm.variables['v_avg'][dayslab[0]:dayslab[1],:,:,:]
240            print '    w_avg -> W' ; w[:,:,:,:] = atm.variables['w_avg'][dayslab[0]:dayslab[1],:,:,:]
241            tke = merged_file.createVariable('TKE', 'f', ('Time','bottom_top','south_north','west_east',))
242            print '    sgs_tke -> TKE' ; tke[:,:,:,:] = atm.variables['sgs_tke'][dayslab[0]:dayslab[1],:,:,:]
243            tau_dust = merged_file.createVariable('TAU_DUST', 'f', ('Time','south_north','west_east',))
244            print '    dust_sfc_od_vis -> TAU_DUST' ; tau_dust[:,:,:] = atm.variables['dust_sfc_od_vis'][dayslab[0]:dayslab[1],:,:]
245            z_lyrmid_agl = merged_file.createVariable('z_lyrmid_agl', 'f', ('Time','bottom_top','south_north','west_east',))
246            print '    z_lyrmid_agl -> z_lyrmid_agl' ; z_lyrmid_agl[:,:,:,:] = atm.variables['z_lyrmid_agl'][dayslab[0]:dayslab[1],:,:,:]
247            # Compute PHTOT for API
248            print '    Computing PHTOT'
249            phtot = merged_file.createVariable('PHTOT', 'f', ('Time','bottom_top_stag','south_north','west_east',))
250            phtot[:,0:nlev,:,:]=z_lyrmid_agl[:,:,:,:]*grav ; phtot[:,nlev,:,:]=0.
251            for l in np.arange(nlev):
252                phtot[:,l,:,:]=phtot[:,l,:,:] + grav*hgt[:,:,:]
253
254            merged_file.close()
255
256def date_to_time(date,ltstart=None):
257    year=date[0:4]
258    month=date[5:7]
259    day=date[8:10]
260    hour=date[11:13]
261    minute=date[14:16]
262    second=date[17:19]
263    if ltstart is not None:
264       hour=ltstart[0:2]
265       minute=ltstart[3:5]
266       second=ltstart[6:8]
267    return year,month,day,hour,minute,second
268
269def time_to_date(date,ltstart=None):
270    if ltstart is not None: return date[0:11]+ltstart
271    else: return date
Note: See TracBrowser for help on using the repository browser.