- Timestamp:
- Oct 28, 2011, 6:08:58 PM (14 years ago)
- Location:
- trunk
- Files:
-
- 6 edited
Legend:
- Unmodified
- Added
- Removed
-
TabularUnified trunk/MESOSCALE/LMD_MM_MARS/SRC/WRFV2/mars_lmd/makegcm_mpi ¶
r138 r335 179 179 set optim90=" -O2 -I. -Munroll -Mcache_align" 180 180 set optimtru90=" -O2 -I. -Munroll -Mcache_align" 181 #set optim90="-O3 -I. -tp=nehalem-64 -fastsse -Mscalarsse -Mvect=sse -Mipa=fast" 182 #set optimtru90="-O3 -I. -tp=nehalem-64 -fastsse -Mscalarsse -Mvect=sse -Mipa=fast" 181 183 set opt_link=" -L$NCDFLIB -lnetcdf -Bstatic" 182 184 set mod_loc_dir="" -
TabularUnified trunk/MESOSCALE/LMD_MM_MARS/SRC/WRFV2/mars_lmd/makegcm_pgf ¶
r69 r335 167 167 else if $DEC then 168 168 else if $LINUX then 169 ##-Ktrap=fp 170 set optim90=" -fast -I. -Mlarge_arrays -mcmodel=medium" 171 ## newmodif 01042009 172 set optimtru90=" -fast -c -Mfree -Mlarge_arrays -mcmodel=medium" 173 # set opt_link=" -Mfree -L/usr/local/pgi/linux86/lib -lpgf90 -lpgftnrtl -lpghpf -lpghpf2 -L$NCDFLIB -lnetcdf -Wl,-Bstatic" 174 set opt_link=" -Mlarge_arrays -mcmodel=medium -Mfree -L/usr/local/pgi/linux86/lib -lpgf90 -lpgftnrtl -lpghpf -lpghpf2 -L$NCDFLIB -lnetcdf" 169 ###-Ktrap=fp 170 # set optim90=" -fast -I. -Mlarge_arrays -mcmodel=medium" 171 ### newmodif 01042009 172 # set optimtru90=" -fast -c -Mfree -Mlarge_arrays -mcmodel=medium" 173 ## set opt_link=" -Mfree -L/usr/local/pgi/linux86/lib -lpgf90 -lpgftnrtl -lpghpf -lpghpf2 -L$NCDFLIB -lnetcdf -Wl,-Bstatic" 174 # set opt_link=" -Mlarge_arrays -mcmodel=medium -Mfree -L/usr/local/pgi/linux86/lib -lpgf90 -lpgftnrtl -lpghpf -lpghpf2 -L$NCDFLIB -lnetcdf" 175 set optim90=" -O2 -I. -Munroll -Mcache_align" 176 set optimtru90=" -O2 -I. -Munroll -Mcache_align" 177 #set optim90="-O3 -I. -tp=nehalem-64 -fastsse -Mscalarsse -Mvect=sse -Mipa=fast" 178 #set optimtru90="-O3 -I. -tp=nehalem-64 -fastsse -Mscalarsse -Mvect=sse -Mipa=fast" 179 set opt_link=" -L$NCDFLIB -lnetcdf -Bstatic" 175 180 set mod_loc_dir="" 176 181 set mod_suffix=mod -
TabularUnified trunk/MESOSCALE/LMD_MM_MARS/SRC/WRFV2/mars_lmd_new/makegcm_pgf ¶
r86 r335 221 221 set nomlib=${machine}64 222 222 ## Aymeric: added handling of large arrays and large memory 223 ## MAIS ATTENTION IL N Y A PAS MESOSCALE !!!! 223 224 set optim90=" -fast -I. -Mlarge_arrays -mcmodel=medium" 224 225 set optimtru90=" -fast -c -Mfree -Mlarge_arrays -mcmodel=medium" -
TabularUnified trunk/MESOSCALE/LMD_MM_MARS/makemeso ¶
r333 r335 453 453 sed s+"mpif90"+"${WHERE_MPI}/mpif90"+g configure.wrf | sed s+"mpicc"+"${WHERE_MPI}/mpicc"+g | sed s+"mpicc"+"mpicc -DMPI2_SUPPORT"+g > yeah 454 454 #| sed s+"fastsse"+"fast"+g > yeah 455 mv -f yeah configure.wrf ;; 456 #sed s+"-fastsse"+"-O2 -Munroll -Mcache_align"+g configure.wrf > yeah 457 ##sed s+"-fastsse"+"-O2 -fpic"+g configure.wrf > yeah ## marche pas, ILM problem 458 ##sed s+"-fastsse"+"-mcmodel=medium -Mlarge_arrays"+g configure.wrf > yeah ## marche pas, ILM problem 459 #mv -f yeah configure.wrf ;; 455 #mv -f yeah configure.wrf #;; 456 # #sed s+"-fastsse"+"-O2 -Munroll -Mcache_align"+g configure.wrf > yeah 457 # ##sed s+"-fastsse"+"-O2 -fpic"+g configure.wrf > yeah ## marche pas, ILM problem 458 # ##sed s+"-fastsse"+"-mcmodel=medium -Mlarge_arrays"+g configure.wrf > yeah ## marche pas, ILM problem 459 #sed s+"-fastsse"+"-O3 -tp=nehalem-64 -fastsse -Mscalarsse -Mvect=sse -Mipa=fast"+g configure.wrf > yeah 460 #sed s+"-fastsse"+"-O2 -Munroll -Mcache_align"+g configure.wrf > yeah 461 mv -f yeah configure.wrf ;; 460 462 # MPICH, 64 bits, OK with periodic BC but no positive definite 461 463 mpi_64_test) cd arch ; ln -sf ../configure.defaults.fix64nest configure.defaults ; cd .. -
TabularUnified trunk/MESOSCALE_DEV/BENCH ¶
r326 r335 1 [NB: FASTCASEnewphys ne fonctionne plus...]2 3 1 **** TESTCASE du USER MANUAL **** 2 LMD_MM_MARS avec ancienne physique 4 3 run sur 4 processeurs 5 4 mm reglages et conditions limite … … 51 50 Timing for main: time 2024-01-17_03:05:00 on domain 1: 0.25855 elapsed seconds. 52 51 52 **** options d'arnaud 53 54 Timing for Writing wrfout_d01_2024-01-17_03:00:00 for domain 1: 0.02100 elapsed seconds. 55 Timing for processing lateral boundary for domain 1: 0.00592 elapsed seconds. 56 Timing for main: time 2024-01-17_03:00:50 on domain 1: 0.40633 elapsed seconds. 57 Timing for main: time 2024-01-17_03:01:00 on domain 1: 0.29322 elapsed seconds. 58 Timing for main: time 2024-01-17_03:01:50 on domain 1: 0.35327 elapsed seconds. 59 Timing for main: time 2024-01-17_03:02:00 on domain 1: 0.28635 elapsed seconds. 60 Timing for main: time 2024-01-17_03:02:50 on domain 1: 0.36557 elapsed seconds. 61 Timing for main: time 2024-01-17_03:03:00 on domain 1: 0.29840 elapsed seconds. 62 Timing for main: time 2024-01-17_03:03:50 on domain 1: 0.36257 elapsed seconds. 63 Timing for main: time 2024-01-17_03:04:00 on domain 1: 0.29930 elapsed seconds. 64 Timing for main: time 2024-01-17_03:04:50 on domain 1: 0.35682 elapsed seconds. 65 Timing for main: time 2024-01-17_03:05:00 on domain 1: 0.30329 elapsed seconds. 66 67 53 68 **** FERME UPMC [gnome iDataPlex] + IFORT 54 69 ***************************************** … … 66 81 Timing for main: time 2024-01-17_03:05:00 on domain 1: 0.25240 elapsed seconds. 67 82 83 **** PENN [nouvelle ferme de calcul] + PGF90 [MPI sur 1 proc seulement] 84 *********************************************************************** 85 Timing for Writing wrfout_d01_2024-01-17_03:00:00 for domain 1: 0.01476 elapsed seconds. 86 Timing for processing lateral boundary for domain 1: 0.00443 elapsed seconds. 87 Timing for main: time 2024-01-17_03:00:50 on domain 1: 1.14924 elapsed seconds. 88 Timing for main: time 2024-01-17_03:01:00 on domain 1: 0.66135 elapsed seconds. 89 Timing for main: time 2024-01-17_03:01:50 on domain 1: 1.14865 elapsed seconds. 90 Timing for main: time 2024-01-17_03:02:00 on domain 1: 0.70446 elapsed seconds. 91 Timing for main: time 2024-01-17_03:02:50 on domain 1: 1.16477 elapsed seconds. 92 Timing for main: time 2024-01-17_03:03:00 on domain 1: 0.68162 elapsed seconds. 93 Timing for main: time 2024-01-17_03:03:50 on domain 1: 1.13741 elapsed seconds. 94 Timing for main: time 2024-01-17_03:04:00 on domain 1: 0.66107 elapsed seconds. 95 Timing for main: time 2024-01-17_03:04:50 on domain 1: 1.14676 elapsed seconds. 96 Timing for main: time 2024-01-17_03:05:00 on domain 1: 0.69579 elapsed seconds. 97 98 **** ULRICH [poweredge aymeric local] + PGF90 [PGF single proc] 99 *************************************************************** 100 Timing for Writing wrfout_d01_2024-01-17_03:00:00 for domain 1: 0.01468 elapsed seconds. 101 Timing for processing lateral boundary for domain 1: 0.00421 elapsed seconds. 102 Timing for main: time 2024-01-17_03:00:50 on domain 1: 1.10895 elapsed seconds. 103 Timing for main: time 2024-01-17_03:01:00 on domain 1: 0.64522 elapsed seconds. 104 Timing for main: time 2024-01-17_03:01:50 on domain 1: 1.09015 elapsed seconds. 105 Timing for main: time 2024-01-17_03:02:00 on domain 1: 0.64544 elapsed seconds. 106 Timing for main: time 2024-01-17_03:02:50 on domain 1: 1.08960 elapsed seconds. 107 Timing for main: time 2024-01-17_03:03:00 on domain 1: 0.64575 elapsed seconds. 108 Timing for main: time 2024-01-17_03:03:50 on domain 1: 1.08922 elapsed seconds. 109 Timing for main: time 2024-01-17_03:04:00 on domain 1: 0.64583 elapsed seconds. 110 Timing for main: time 2024-01-17_03:04:50 on domain 1: 1.09003 elapsed seconds. 111 Timing for main: time 2024-01-17_03:05:00 on domain 1: 0.64583 elapsed seconds. 112 113 -
TabularUnified trunk/MESOSCALE_DEV/NOTES.txt ¶
r331 r335 1 NOUVELLE FERME 2 - OK avec pgf_64_single 3 - OK avec mpi_64 sur 1 proc 4 - pas OK avec mpi_64 et 4 proc 5 cas Hellas crashe entre la sortie 9 et 10 6 .... fastsse ou pas ne change rien [mpich fast dans les deux cas] 7 .... changer radt ne fait que retarder 8 .... baisser le pas de temps pose probleme plus tot 9 .... ne marche pas en phasant les options WRF avec les options LMD physics 10 .... avec fastsse partout (mm MPI), crash ds la sortie 2 11 .... option de base SVN avec mpich sans fast, marche pas non plus crashe entre 9 et 10 12 13 14 --------------------------------------------------------------------------------- 15 1 16 --- sur nouvelles machines problemes run parallele avec nouvelle physique 2 17
Note: See TracChangeset
for help on using the changeset viewer.