Changeset 3777 for trunk/LMDZ.COMMON/libf
- Timestamp:
- May 23, 2025, 4:51:04 PM (4 weeks ago)
- Location:
- trunk/LMDZ.COMMON/libf/evolution
- Files:
-
- 3 edited
Legend:
- Unmodified
- Added
- Removed
-
trunk/LMDZ.COMMON/libf/evolution/changelog.txt
r3771 r3777 660 660 == 21/05/2025 == JBC 661 661 Following r3770, adaptation and improvements of Python scripts to visualize the layered deposits. 662 663 == 23/05/2025 == JBC 664 Big improvements of Python scripts to output the stratifications (user-friendly prompt, error checks, code modularity, nice visualization, etc). -
trunk/LMDZ.COMMON/libf/evolution/deftank/visu_evol_layering.py
r3771 r3777 1 ############################################################################################## 2 ### Python script to output the stratification data over time from the "startpem.nc" files ### 3 ############################################################################################## 1 #!/usr/bin/env python3 2 ####################################################################################################### 3 ### Python script to output the stratification data over time from the "restartpem#.nc" files files ### 4 ####################################################################################################### 5 4 6 5 7 import os 6 8 import sys 7 9 import numpy as np 10 from glob import glob 8 11 from netCDF4 import Dataset 9 12 import matplotlib.pyplot as plt 10 from scipy import interpolate11 12 ### Function to get inputs 13 from scipy.interpolate import interp1d 14 15 13 16 def get_user_inputs(): 14 folder_path = input("Enter the folder path containing the NetCDF files (press the Enter key for default [starts]): ").strip() 15 if not folder_path: 16 folder_path = "starts" 17 """ 18 Prompt the user for: 19 - folder_path: directory containing NetCDF files (default: "starts") 20 - base_name: base filename (default: "restartpem") 21 - infofile: name of the PEM info file (default: "info_PEM.txt") 22 Validates existence of folder and infofile before returning. 23 """ 24 folder_path = input( 25 "Enter the folder path containing the NetCDF files " 26 "(press Enter for default [starts]): " 27 ).strip() or "starts" 17 28 while not os.path.isdir(folder_path): 18 print("Invalid folder path. Please try again.") 19 folder_path = input("Enter the folder path containing the NetCDF files (press the Enter key for default [starts]): ").strip() 20 if not folder_path: 21 folder_path = "starts" 22 23 base_name = input("Enter the base name of the NetCDF files (press the Enter key for default [restartpem]): ").strip() 24 if not base_name: 25 base_name = "restartpem" 26 27 infofile = input("Enter the name of the PEM info file (press the Enter key for default [info_PEM.txt]): ").strip() 28 if not infofile: 29 infofile = "info_PEM.txt" 29 print(f" » \"{folder_path}\" does not exist or is not a directory.") 30 folder_path = input( 31 "Enter a valid folder path (press Enter for default [starts]): " 32 ).strip() or "starts" 33 34 base_name = input( 35 "Enter the base name of the NetCDF files " 36 "(press Enter for default [restartpem]): " 37 ).strip() or "restartpem" 38 39 infofile = input( 40 "Enter the name of the PEM info file " 41 "(press Enter for default [info_PEM.txt]): " 42 ).strip() or "info_PEM.txt" 30 43 while not os.path.isfile(infofile): 31 print( "Invalid file path. Please try again.")32 infofile = input( "Enter the name of the PEM info file (press the Enter key for default [info_PEM.txt]): ").strip()33 if not infofile:34 infofile ="info_PEM.txt"44 print(f" » \"{infofile}\" does not exist or is not a file.") 45 infofile = input( 46 "Enter a valid PEM info filename (press Enter for default [info_PEM.txt]): " 47 ).strip() or "info_PEM.txt" 35 48 36 49 return folder_path, base_name, infofile 37 50 38 ### Function to read the "startpem.nc" files and process their stratification data 39 def process_files(folder_path,base_name): 40 # Find all files in the directory with the pattern {base_name}{num}.nc 41 nfile = 0 42 for file_name in sorted(os.listdir(folder_path)): 43 if file_name.startswith(base_name) and file_name.endswith('.nc'): 44 file_number = file_name[len(base_name):-3] 45 if file_number.isdigit(): 46 nfile += 1 47 48 if nfile == 0: 49 print("No files found. Exiting...") 50 return 51 52 # Process each file and collect data 53 datasets = [] 54 min_base_elevation = -56943.759374550937 # Base elevation of the deepest subsurface layer 55 max_top_elevation = 0. 51 52 def list_netcdf_files(folder_path, base_name): 53 """ 54 List and sort all NetCDF files matching the pattern {base_name}#.nc 55 in folder_path. Returns a sorted list of full file paths. 56 """ 57 pattern = os.path.join(folder_path, f"{base_name}[0-9]*.nc") 58 all_files = glob(pattern) 59 if not all_files: 60 return [] 61 62 def extract_index(pathname): 63 fname = os.path.basename(pathname) 64 idx_str = fname[len(base_name):-3] 65 return int(idx_str) if idx_str.isdigit() else float('inf') 66 67 sorted_files = sorted(all_files, key=extract_index) 68 return sorted_files 69 70 71 def open_sample_dataset(file_path): 72 """ 73 Open a single NetCDF file and extract: 74 - ngrid, nslope 75 - longitude, latitude 76 Returns (ngrid, nslope, longitude_array, latitude_array). 77 """ 78 with Dataset(file_path, 'r') as ds: 79 ngrid = ds.dimensions['physical_points'].size 80 nslope = ds.dimensions['nslope'].size 81 longitude = ds.variables['longitude'][:].copy() 82 latitude = ds.variables['latitude'][:].copy() 83 return ngrid, nslope, longitude, latitude 84 85 86 def collect_stratification_variables(files, base_name): 87 """ 88 Scan all files to collect: 89 - variable names for each stratification property 90 - max number of strata (max_nb_str) 91 - global min base elevation and max top elevation 92 Returns: 93 - var_info: dict mapping each property_name -> sorted list of var names 94 - max_nb_str: int 95 - min_base_elev: float 96 - max_top_elev: float 97 """ 56 98 max_nb_str = 0 57 with Dataset(os.path.join(folder_path, base_name + "1.nc"), 'r') as ds: 58 ngrid = ds.dimensions['physical_points'].size # ngrid is the same for all files 59 nslope = ds.dimensions['nslope'].size # nslope is the same for all files 60 longitude = ds.variables['longitude'][:] 61 latitude = ds.variables['latitude'][:] 62 63 for i in range(1,nfile + 1): 64 file_path = os.path.join(folder_path,base_name + str(i) + ".nc") 65 #print(f"Processing file: {file_path}") 66 ds = Dataset(file_path,'r') 67 datasets.append(ds) 68 69 # Track max of nb_str_max 70 max_nb_str = max(ds.dimensions['nb_str_max'].size,max_nb_str) 71 72 # Track max of top_elevation across all slopes 73 for k in range(1,nslope + 1): 74 slope_var_name = f"stratif_slope{k:02d}_top_elevation" 75 min_base_elevation = min(min_base_elevation,np.min(ds.variables[slope_var_name][:])) 76 max_top_elevation = max(max_top_elevation,np.max(ds.variables[slope_var_name][:])) 77 78 print(f"> number of files = {nfile}") 79 print(f"> ngrid = {ngrid}") 80 print(f"> nslope = {nslope}") 81 print(f"> max(nb_str_max) = {max_nb_str}") 82 print(f"> min(base_elevation) = {min_base_elevation}") 83 print(f"> max(top_elevation) = {max_top_elevation}") 84 85 # Concatenate stratif variables with dimension 'nb_str_max' along the "Time" dimension 86 stratif_data = [] 87 stratif_heights = np.zeros((ngrid,nfile,nslope,max_nb_str)) 88 stratif_co2ice = np.zeros((ngrid,nfile,nslope,max_nb_str)) 89 stratif_h2oice = np.zeros((ngrid,nfile,nslope,max_nb_str)) 90 stratif_dust = np.zeros((ngrid,nfile,nslope,max_nb_str)) 91 stratif_pore = np.zeros((ngrid,nfile,nslope,max_nb_str)) 92 stratif_poreice = np.zeros((ngrid,nfile,nslope,max_nb_str)) 93 for var_name in datasets[0].variables: 94 if 'top_elevation' in var_name: 95 for i in range(0,ngrid): 96 for j in range(0,nfile): 97 for k in range(0,nslope): 98 if f'slope{k + 1:02d}' in var_name: 99 stratif_heights[i,j,k,:datasets[j].variables[var_name].shape[1]] = datasets[j].variables[var_name][0,:,i] 100 print(f"Processed variable: {var_name}") 101 elif 'h_co2ice' in var_name: 102 for i in range(0,ngrid): 103 for j in range(0,nfile): 104 for k in range(0,nslope): 105 if f'slope{k + 1:02d}' in var_name: 106 stratif_co2ice[i,j,k,:datasets[j].variables[var_name].shape[1]] = datasets[j].variables[var_name][0,:,i] 107 print(f"Processed variable: {var_name}") 108 elif 'h_h2oice' in var_name: 109 for i in range(0,ngrid): 110 for j in range(0,nfile): 111 for k in range(0,nslope): 112 if f'slope{k + 1:02d}' in var_name: 113 stratif_h2oice[i,j,k,:datasets[j].variables[var_name].shape[1]] = datasets[j].variables[var_name][0,:,i] 114 print(f"Processed variable: {var_name}") 115 elif 'h_dust' in var_name: 116 for i in range(0,ngrid): 117 for j in range(0,nfile): 118 for k in range(0,nslope): 119 if f'slope{k + 1:02d}' in var_name: 120 stratif_dust[i,j,k,:datasets[j].variables[var_name].shape[1]] = datasets[j].variables[var_name][0,:,i] 121 print(f"Processed variable: {var_name}") 122 elif 'h_pore' in var_name: 123 for i in range(0,ngrid): 124 for j in range(0,nfile): 125 for k in range(0,nslope): 126 if f'slope{k + 1:02d}' in var_name: 127 stratif_pore[i,j,k,:datasets[j].variables[var_name].shape[1]] = datasets[j].variables[var_name][0,:,i] 128 print(f"Processed variable: {var_name}") 129 elif 'icepore_volfrac' in var_name: 130 for i in range(0,ngrid): 131 for j in range(0,nfile): 132 for k in range(0,nslope): 133 if f'slope{k + 1:02d}' in var_name: 134 stratif_poreice[i,j,k,:datasets[j].variables[var_name].shape[1]] = datasets[j].variables[var_name][0,:,i] 135 print(f"Processed variable: {var_name}") 136 137 # Close the datasets 99 min_base_elev = np.inf 100 max_top_elev = -np.inf 101 102 property_markers = { 103 'heights': 'stratif_slope', # "..._top_elevation" 104 'co2_ice': 'h_co2ice', 105 'h2o_ice': 'h_h2oice', 106 'dust': 'h_dust', 107 'pore': 'h_pore', 108 'pore_ice': 'icepore_volfrac' 109 } 110 var_info = {prop: set() for prop in property_markers} 111 112 for file_path in files: 113 with Dataset(file_path, 'r') as ds: 114 if 'nb_str_max' in ds.dimensions: 115 max_nb_str = max(max_nb_str, ds.dimensions['nb_str_max'].size) 116 117 nslope = ds.dimensions['nslope'].size 118 for k in range(1, nslope + 1): 119 var_name = f"stratif_slope{k:02d}_top_elevation" 120 if var_name in ds.variables: 121 arr = ds.variables[var_name][:] 122 min_base_elev = min(min_base_elev, np.min(arr)) 123 max_top_elev = max(max_top_elev, np.max(arr)) 124 var_info['heights'].add(var_name) 125 126 for full_var in ds.variables: 127 for prop, marker in property_markers.items(): 128 if (marker in full_var) and prop != 'heights': 129 var_info[prop].add(full_var) 130 131 for prop in var_info: 132 var_info[prop] = sorted(var_info[prop]) 133 134 return var_info, max_nb_str, min_base_elev, max_top_elev 135 136 137 def load_full_datasets(files): 138 """ 139 Open all NetCDF files and return a list of Dataset objects. 140 (They should be closed by the caller after use.) 141 """ 142 return [Dataset(fp, 'r') for fp in files] 143 144 145 def extract_stratification_data(datasets, var_info, ngrid, nslope, max_nb_str): 146 """ 147 Build: 148 - heights_data[t_idx][isl] = 2D array (ngrid, n_strata_current) of top_elevations. 149 - raw_prop_arrays[prop] = 4D array (ngrid, ntime, nslope, max_nb_str) of per-strata values. 150 Returns: 151 - heights_data: list (ntime) of lists (nslope) of 2D arrays 152 - raw_prop_arrays: dict mapping each property_name -> 4D array 153 - ntime: number of time steps (files) 154 """ 155 ntime = len(datasets) 156 157 heights_data = [ 158 [None for _ in range(nslope)] 159 for _ in range(ntime) 160 ] 161 for t_idx, ds in enumerate(datasets): 162 for var_name in var_info['heights']: 163 slope_idx = int(var_name.split("slope")[1].split("_")[0]) - 1 164 if 0 <= slope_idx < nslope: 165 raw = ds.variables[var_name][0, :, :] # (n_strata, ngrid) 166 heights_data[t_idx][slope_idx] = raw.T # (ngrid, n_strata) 167 168 raw_prop_arrays = {} 169 for prop in var_info: 170 if prop == 'heights': 171 continue 172 raw_prop_arrays[prop] = np.zeros((ngrid, ntime, nslope, max_nb_str), dtype=np.float32) 173 174 def slope_index_from_var(vname): 175 return int(vname.split("slope")[1].split("_")[0]) - 1 176 177 for prop in raw_prop_arrays: 178 slope_map = {} 179 for vname in var_info[prop]: 180 isl = slope_index_from_var(vname) 181 if 0 <= isl < nslope: 182 slope_map[isl] = vname 183 184 arr = raw_prop_arrays[prop] 185 for t_idx, ds in enumerate(datasets): 186 for isl, var_name in slope_map.items(): 187 raw = ds.variables[var_name][0, :, :] # (n_strata, ngrid) 188 n_strata_current = raw.shape[0] 189 arr[:, t_idx, isl, :n_strata_current] = raw.T 190 191 return heights_data, raw_prop_arrays, ntime 192 193 194 def normalize_to_fractions(raw_prop_arrays): 195 """ 196 Given raw_prop_arrays for 'co2_ice', 'h2o_ice', 'dust', 'pore' (in meters), 197 normalize each set of strata so that the sum of those four = 1 per strata. 198 Returns: 199 - frac_arrays: dict mapping same keys -> 4D arrays of fractions (0..1). 200 """ 201 co2 = raw_prop_arrays['co2_ice'] 202 h2o = raw_prop_arrays['h2o_ice'] 203 dust = raw_prop_arrays['dust'] 204 pore = raw_prop_arrays['pore'] 205 206 total = co2 + h2o + dust + pore 207 mask = total > 0.0 208 209 frac_co2 = np.zeros_like(co2, dtype=np.float32) 210 frac_h2o = np.zeros_like(h2o, dtype=np.float32) 211 frac_dust = np.zeros_like(dust, dtype=np.float32) 212 frac_pore = np.zeros_like(pore, dtype=np.float32) 213 214 frac_co2[mask] = co2[mask] / total[mask] 215 frac_h2o[mask] = h2o[mask] / total[mask] 216 frac_dust[mask] = dust[mask] / total[mask] 217 frac_pore[mask] = pore[mask] / total[mask] 218 219 return { 220 'co2_ice': frac_co2, 221 'h2o_ice': frac_h2o, 222 'dust': frac_dust, 223 'pore': frac_pore 224 } 225 226 227 def read_infofile(file_name): 228 """ 229 Reads "info_PEM.txt". Expects: 230 - First line: parameters (ignored). 231 - Each subsequent line: floats where first value is timestamp. 232 Returns: 1D numpy array of timestamps. 233 """ 234 date_time = [] 235 with open(file_name, 'r') as fp: 236 fp.readline() 237 for line in fp: 238 parts = line.strip().split() 239 if not parts: 240 continue 241 try: 242 date_time.append(float(parts[0])) 243 except ValueError: 244 continue 245 return np.array(date_time, dtype=np.float64) 246 247 248 def get_yes_no_input(prompt: str) -> bool: 249 """ 250 Prompt the user with a yes/no question. Returns True for yes, False for no. 251 """ 252 while True: 253 choice = input(f"{prompt} (y/n): ").strip().lower() 254 if choice in ['y', 'yes']: 255 return True 256 elif choice in ['n', 'no']: 257 return False 258 else: 259 print("Please respond with y or n.") 260 261 262 def prompt_discretization_step(max_top_elev): 263 """ 264 Prompt for a positive float dz such that 0 < dz <= max_top_elev. 265 """ 266 while True: 267 entry = input( 268 "Enter the discretization step of the reference grid for the elevation [m]: " 269 ).strip() 270 try: 271 dz = float(entry) 272 if dz <= 0: 273 print(" » Discretization step must be strictly positive!") 274 continue 275 if dz > max_top_elev: 276 print( 277 f" » {dz:.3e} m is greater than the maximum top elevation " 278 f"({max_top_elev:.3e} m). Please enter a smaller value." 279 ) 280 continue 281 return dz 282 except ValueError: 283 print(" » Invalid numeric value. Please try again.") 284 285 286 def interpolate_data_on_refgrid( 287 heights_data, 288 prop_arrays, 289 min_base_for_interp, 290 max_top_elev, 291 dz, 292 exclude_sub=False 293 ): 294 """ 295 Build a reference grid and interpolate strata fractions (0..1) onto it. 296 297 Also returns a 'top_index' array of shape (ngrid, ntime, nslope) that 298 indicates, for each (ig, t_idx, isl), the number of ref_grid levels 299 covered by the topmost valid stratum. 300 301 Args: 302 - heights_data: list of lists where heights_data[t][isl] is a 2D array 303 (ngrid, n_strata_current) of top_elevation values. 304 - prop_arrays: dict mapping each property_name to a 4D array of shape 305 (ngrid, ntime, nslope, max_nb_str) holding fractions [0..1]. 306 - min_base_for_interp: float; if exclude_sub=True, this is 0.0. 307 - max_top_elev: float 308 - dz: float 309 - exclude_sub: bool. If True, ignore strata with elevation < 0. 310 311 Returns: 312 - ref_grid: 1D array of elevations (nz,) 313 - gridded_data: dict mapping each property_name to a 4D array of shape 314 (ngrid, ntime, nslope, nz) with interpolated fractions. 315 - top_index: 3D array (ngrid, ntime, nslope) of ints: number of levels 316 of ref_grid covered by the topmost stratum. 317 """ 318 # Build ref_grid, ensuring at least two points if surface-only and dz > max_top_elev 319 if exclude_sub and (dz > max_top_elev): 320 ref_grid = np.array([0.0, max_top_elev], dtype=np.float32) 321 else: 322 ref_grid = np.arange(min_base_for_interp, max_top_elev + dz / 2, dz) 323 nz = len(ref_grid) 324 print(f"> Number of reference grid points = {nz}") 325 326 # Dimensions 327 sample_prop = next(iter(prop_arrays.values())) 328 ngrid, ntime, nslope, max_nb_str = sample_prop.shape[0:4] 329 330 # Prepare outputs 331 gridded_data = { 332 prop: np.full((ngrid, ntime, nslope, nz), -1.0, dtype=np.float32) 333 for prop in prop_arrays 334 } 335 top_index = np.zeros((ngrid, ntime, nslope), dtype=np.int32) 336 337 for ig in range(ngrid): 338 for t_idx in range(ntime): 339 for isl in range(nslope): 340 h_mat = heights_data[t_idx][isl] 341 if h_mat is None: 342 continue 343 344 raw_h = h_mat[ig, :] # (n_strata_current,) 345 # Create h_all of length max_nb_str, fill with NaN 346 h_all = np.full((max_nb_str,), np.nan, dtype=np.float32) 347 n_strata_current = raw_h.shape[0] 348 h_all[:n_strata_current] = raw_h 349 350 if exclude_sub: 351 epsilon = 1e-6 352 valid_mask = (h_all >= -epsilon) 353 else: 354 valid_mask = (~np.isnan(h_all)) & (h_all != 0.0) 355 356 if not np.any(valid_mask): 357 continue 358 359 h_valid = h_all[valid_mask] 360 top_h = np.max(h_valid) 361 362 # Find i_zmax = number of ref_grid levels z <= top_h 363 i_zmax = np.searchsorted(ref_grid, top_h, side='right') 364 top_index[ig, t_idx, isl] = i_zmax 365 366 if i_zmax == 0: 367 # top_h < ref_grid[0], skip interpolation 368 continue 369 370 for prop, arr in prop_arrays.items(): 371 prop_profile_all = arr[ig, t_idx, isl, :] # (max_nb_str,) 372 prop_profile = prop_profile_all[valid_mask] # (n_valid_strata,) 373 if prop_profile.size == 0: 374 continue 375 376 # Step‐wise interpolation (kind='next') 377 f_interp = interp1d( 378 h_valid, 379 prop_profile, 380 kind='next', 381 bounds_error=False, 382 fill_value=-1.0 383 ) 384 385 # Evaluate for ref_grid[0:i_zmax] 386 gridded_data[prop][ig, t_idx, isl, :i_zmax] = f_interp(ref_grid[:i_zmax]) 387 388 return ref_grid, gridded_data, top_index 389 390 391 def plot_stratification_over_time( 392 gridded_data, 393 ref_grid, 394 top_index, 395 heights_data, 396 date_time, 397 exclude_sub=False, 398 output_folder="." 399 ): 400 """ 401 For each grid point (ig) and slope (isl), generate a 2×2 figure: 402 - CO2 ice fraction 403 - H2O ice fraction 404 - Dust fraction 405 - Pore fraction 406 407 Fractions are in [0..1]. Values < 0 (fill) are masked. 408 Using top_index, any elevation above the last stratum is forced to NaN (white). 409 410 Additionally, draw horizontal violet line segments at each stratum top elevation 411 only over the interval [date_time[t_idx], date_time[t_idx+1]] where that stratum 412 exists at time t_idx. This way, boundaries appear only where the strata exist. 413 """ 414 import numpy as np 415 import matplotlib.pyplot as plt 416 417 prop_names = ['co2_ice', 'h2o_ice', 'dust', 'pore'] 418 titles = ["CO2 ice", "H2O ice", "Dust", "Pore"] 419 cmap = plt.get_cmap('hot_r').copy() 420 cmap.set_under('white') 421 vmin, vmax = 0.0, 1.0 422 423 sample_prop = next(iter(gridded_data.values())) 424 ngrid, ntime, nslope, nz = sample_prop.shape 425 426 if exclude_sub: 427 positive_indices = np.where(ref_grid >= 0.0)[0] 428 if positive_indices.size == 0: 429 print("Warning: no positive elevations in ref_grid → nothing to display.") 430 return 431 sub_ref_grid = ref_grid[positive_indices] 432 else: 433 positive_indices = np.arange(nz) 434 sub_ref_grid = ref_grid 435 436 for ig in range(ngrid): 437 for isl in range(nslope): 438 fig, axes = plt.subplots(2, 2, figsize=(10, 8)) 439 fig.suptitle( 440 f"Content variation over time for (Grid Point {ig+1}, Slope {isl+1})", 441 fontsize=14 442 ) 443 444 # For each time step t_idx, gather this stratum's valid tops 445 # and draw a line segment from date_time[t_idx] to date_time[t_idx+1]. 446 # We'll skip t_idx = ntime - 1 since no next point. 447 # Precompute, for each t_idx, the array of valid top elevations: 448 valid_tops_per_time = [] 449 for t_idx in range(ntime): 450 raw_h = heights_data[t_idx][isl][ig, :] # (n_strata_current,) 451 # Exclude NaNs or zeros 452 h_all = raw_h[~np.isnan(raw_h)] 453 if exclude_sub: 454 h_all = h_all[h_all >= 0.0] 455 valid_tops_per_time.append(np.unique(h_all)) 456 457 for idx, prop in enumerate(prop_names): 458 ax = axes.flat[idx] 459 data_3d = gridded_data[prop][ig, :, isl, :] # shape (ntime, nz) 460 mat_full = data_3d.T # shape (nz, ntime) 461 mat = mat_full[positive_indices, :].copy() # (nz_pos, ntime) 462 463 # Mask fill values (< 0) as NaN 464 mat[mat < 0.0] = np.nan 465 466 # Mask everything above the top stratum using top_index 467 for t_idx in range(ntime): 468 i_zmax = top_index[ig, t_idx, isl] 469 if i_zmax <= positive_indices[0]: 470 mat[:, t_idx] = np.nan 471 else: 472 count_z = np.count_nonzero(positive_indices < i_zmax) 473 mat[count_z:, t_idx] = np.nan 474 475 # Draw pcolormesh 476 im = ax.pcolormesh( 477 date_time, 478 sub_ref_grid, 479 mat, 480 cmap=cmap, 481 shading='auto', 482 vmin=vmin, 483 vmax=vmax 484 ) 485 ax.set_title(titles[idx], fontsize=12) 486 ax.set_xlabel("Time (y)") 487 ax.set_ylabel("Elevation (m)") 488 489 # Draw horizontal violet segments only where strata exist 490 for t_idx in range(ntime - 1): 491 h_vals = valid_tops_per_time[t_idx] 492 if h_vals.size == 0: 493 continue 494 t_left = date_time[t_idx] 495 t_right = date_time[t_idx + 1] 496 for h in h_vals: 497 # Only draw if h falls within sub_ref_grid 498 if h < sub_ref_grid[0] or h > sub_ref_grid[-1]: 499 continue 500 ax.hlines( 501 y=h, 502 xmin=t_left, 503 xmax=t_right, 504 color='violet', 505 linewidth=0.7, 506 linestyle='-' 507 ) 508 509 # Reserve extra space on the right for the colorbar 510 fig.subplots_adjust(right=0.88) 511 512 # Place a single shared colorbar in its own axes 513 cbar_ax = fig.add_axes([0.90, 0.15, 0.02, 0.7]) 514 fig.colorbar( 515 im, 516 cax=cbar_ax, 517 orientation='vertical', 518 label="Content" 519 ) 520 521 # Tight layout excluding the region we reserved (0.88) 522 fig.tight_layout(rect=[0, 0, 0.88, 1.0]) 523 524 fname = os.path.join( 525 output_folder, f"layering_evolution_ig{ig+1}_is{isl+1}.png" 526 ) 527 fig.savefig(fname, dpi=150) 528 plt.show() 529 plt.close(fig) 530 531 532 def main(): 533 # 1) Get user inputs 534 folder_path, base_name, infofile = get_user_inputs() 535 536 # 2) List and verify NetCDF files 537 files = list_netcdf_files(folder_path, base_name) 538 if not files: 539 print(f"No NetCDF files named \"{base_name}#.nc\" found in \"{folder_path}\". Exiting.") 540 sys.exit(1) 541 nfile = len(files) 542 print(f"> Found {nfile} NetCDF file(s) matching \"{base_name}#.nc\".") 543 544 # 3) Open one sample to get ngrid, nslope, lon/lat 545 sample_file = files[0] 546 ngrid, nslope, longitude, latitude = open_sample_dataset(sample_file) 547 print(f"> ngrid = {ngrid}") 548 print(f"> nslope = {nslope}") 549 550 # 4) Scan all files to collect variable info + global min/max elevations 551 var_info, max_nb_str, min_base_elev, max_top_elev = collect_stratification_variables( 552 files, base_name 553 ) 554 print(f"> max(nb_str_max) = {max_nb_str}") 555 print(f"> min(base_elevation) = {min_base_elev:.3f}") 556 print(f"> max(top_elevation) = {max_top_elev:.3f}") 557 558 # 5) Open all datasets for extraction 559 datasets = load_full_datasets(files) 560 561 # 6) Extract raw stratification data 562 heights_data, raw_prop_arrays, ntime = extract_stratification_data( 563 datasets, var_info, ngrid, nslope, max_nb_str 564 ) 565 566 # 7) Close all datasets 138 567 for ds in datasets: 139 568 ds.close() 140 569 141 stratif_data = [stratif_heights,stratif_co2ice,stratif_h2oice,stratif_dust,stratif_pore] 142 143 while True: 144 try: 145 dz = float(input("Enter the discretization step of the reference grid for the elevation [m]: ").strip()) 146 if dz <= 0: 147 print("Discretization step must be strictly positive!") 148 continue 149 if dz > max_top_elevation: 150 print("Discretization step is higher than the maximum top elevation: please provide a correct value!") 151 continue 152 break 153 except ValueError: 154 print("Invalid value.") 155 return stratif_data, min_base_elevation, max_top_elevation, longitude, latitude, dz 156 157 ### Function to interpolate the stratification data on a reference grid 158 def interpolate_data(stratif_data,min_base_elevation,max_top_elevation,dz): 159 # Define the reference ref_grid 160 ref_grid = np.arange(min_base_elevation,max_top_elevation,dz) 161 print(f"> Number of ref_grid points = {len(ref_grid)}") 162 163 # Interpolate the strata properties on the ref_grid 164 gridded_stratif_data = -1.*np.ones((np.shape(stratif_data)[0] - 1,np.shape(stratif_data)[1],np.shape(stratif_data)[2],np.shape(stratif_data)[3],len(ref_grid))) 165 for iprop in range(1,np.shape(stratif_data)[0]): 166 for i in range(np.shape(stratif_data)[1]): 167 for j in range(np.shape(stratif_data)[2]): 168 for k in range(np.shape(stratif_data)[3]): 169 i_hmax = np.max(np.nonzero(stratif_data[0][i,j,k,:])) 170 hmax = stratif_data[0][i,j,k,i_hmax] 171 i_zmax = np.searchsorted(ref_grid,hmax,'left') 172 f = interpolate.interp1d(stratif_data[0][i,j,k,:i_hmax + 1],stratif_data[iprop][i,j,k,:i_hmax + 1],kind = 'next')#,fill_value = "extrapolate") 173 gridded_stratif_data[iprop - 1,i,j,k,:i_zmax] = f(ref_grid[:i_zmax]) 174 175 return ref_grid, gridded_stratif_data 176 177 ### Function to read the "info_PEM.txt" file 178 def read_infofile(file_name): 179 with open(file_name,'r') as file: 180 # Read the first line to get the parameters 181 first_line = file.readline().strip() 182 parameters = list(map(float,first_line.split())) 183 184 # Read the following lines 185 data_lines = [] 186 date_time = [] 187 for line in file: 188 data = list(map(float,line.split())) 189 data_lines.append(data) 190 date_time.append(data[0]) 191 192 return date_time 193 194 ### Processing 195 folder_path, base_name, infofile = get_user_inputs() 196 stratif_data, min_base_elevation, max_top_elevation, longitude, latitude, dz = process_files(folder_path,base_name) 197 ref_grid, gridded_stratif_data = interpolate_data(stratif_data,min_base_elevation,max_top_elevation,dz) 198 date_time = read_infofile(infofile) 199 200 ### Figures plotting 201 subtitle = ['CO2 ice','H2O ice','Dust','Pore'] 202 cmap = plt.get_cmap('viridis').copy() 203 cmap.set_under('white') 204 for igr in range(np.shape(gridded_stratif_data)[1]): 205 for isl in range(np.shape(gridded_stratif_data)[3]): 206 fig, axes = plt.subplots(2,2,figsize = (10,8)) 207 fig.suptitle(f'Contents variation over time in the layered-deposit of grid point {igr + 1} and slope {isl + 1}') 208 iprop = 0 209 for ax in axes.flat: 210 time_mesh, elevation_mesh = np.meshgrid(date_time,ref_grid) 211 #im = ax.imshow(np.transpose(gridded_stratif_data[iprop][igr,:,isl,:]),aspect = 'auto',cmap = cmap,origin = 'lower',extent = [date_time[0],date_time[-1],ref_grid[0],ref_grid[-1]],vmin = 0,vmax = 1) 212 im = ax.pcolormesh(time_mesh,elevation_mesh,np.transpose(gridded_stratif_data[iprop][igr,:,isl,:]),cmap = cmap,shading = 'auto',vmin = 0,vmax = 1) 213 ax.set_title(subtitle[iprop]) 214 ax.set(xlabel = 'Time (y)',ylabel = 'Elevation (m)') 215 #ax.label_outer() 216 iprop += 1 217 cbar = fig.colorbar(im,ax = axes.ravel().tolist(),label = 'Content value') 218 plt.savefig(f"layering_evolution_ig{igr + 1}_is{isl + 1}.png") 219 plt.show() 570 # 8) Normalize raw prop arrays to volume fractions 571 frac_arrays = normalize_to_fractions(raw_prop_arrays) 572 573 # 9) Ask whether to show subsurface 574 show_subsurface = get_yes_no_input("Show subsurface layers?") 575 exclude_sub = not show_subsurface 576 577 if exclude_sub: 578 min_base_for_interp = 0.0 579 print("> Will interpolate only elevations >= 0 m (surface strata).") 580 else: 581 min_base_for_interp = min_base_elev 582 print(f"> Will interpolate full depth (min base = {min_base_elev:.3f} m).") 583 584 # 10) Prompt for discretization step 585 dz = prompt_discretization_step(max_top_elev) 586 587 # 11) Build reference grid and interpolate (returns top_index as well) 588 ref_grid, gridded_data, top_index = interpolate_data_on_refgrid( 589 heights_data, 590 frac_arrays, 591 min_base_for_interp, 592 max_top_elev, 593 dz, 594 exclude_sub=exclude_sub 595 ) 596 597 # 12) Read time stamps from "info_PEM.txt" 598 date_time = read_infofile(infofile) 599 if date_time.size != ntime: 600 print( 601 "Warning: number of timestamps does not match number of NetCDF files " 602 f"({date_time.size} vs {ntime})." 603 ) 604 605 # 13) Plot and save figures (passing top_index and heights_data) 606 plot_stratification_over_time( 607 gridded_data, 608 ref_grid, 609 top_index, 610 heights_data, 611 date_time, 612 exclude_sub=exclude_sub, 613 output_folder="." 614 ) 615 616 617 if __name__ == "__main__": 618 main() 619 -
trunk/LMDZ.COMMON/libf/evolution/deftank/visu_layering.py
r3771 r3777 1 #################################################################################### 2 ### Python script to output the stratification data from the "startpem.nc" files ### 3 #################################################################################### 1 #!/usr/bin/env python3 2 ####################################################################################### 3 ### Python script to output the stratification data from the "restartpem#.nc" files ### 4 ####################################################################################### 4 5 5 6 import os … … 316 317 if __name__ == '__main__': 317 318 main() 319
Note: See TracChangeset
for help on using the changeset viewer.