- Timestamp:
- Apr 22, 2016, 9:02:11 AM (9 years ago)
- Location:
- trunk
- Files:
-
- 15 added
- 9 deleted
- 92 edited
Legend:
- Unmodified
- Added
- Removed
-
trunk/DOC/chantiers/commit_importants.log
r1540 r1543 1685 1685 - moved "iniprint.h" from dyn3d_common to misc (NB: it is used in both physics 1686 1686 and dynamics; this should be cleaned up further...) 1687 1688 ********************** 1689 **** commit_v1543 **** 1690 ********************** 1691 Ehouarn: Further adaptations to keep up with changes in LMDZ5 concerning 1692 physics/dynamics separation: 1693 * dyn3d: 1694 - adapted gcm.F so that all physics initializations are now done in iniphysiq. 1695 1696 * dyn3dpar: 1697 - adapted gcm.F so that all physics initializations are now done in iniphysiq. 1698 - updated calfis_p.F to follow up with changes. 1699 - copied over updated "bands.F90" from LMDZ5. 1700 1701 * dynphy_lonlat: 1702 - calfis_p.F90, mod_interface_dyn_phys.F90, 1703 follow up of changes in phy_common/mod_* routines 1704 1705 * phy_common: 1706 - added "geometry_mod.F90" to store information about the grid (replaces 1707 phy*/comgeomphy.F90) and give variables friendlier names: rlond => 1708 longitude , rlatd => latitude, airephy => cell_area, 1709 cuphy => dx , cvphy => dy 1710 - added "physics_distribution_mod.F90" 1711 - updated "mod_grid_phy_lmdz.F90", "mod_phys_lmdz_mpi_data.F90", 1712 "mod_phys_lmdz_para.F90", "mod_phys_lmdz_mpi_transfert.F90", 1713 "mod_grid_phy_lmdz.F90", "mod_phys_lmdz_omp_data.F90", 1714 "mod_phys_lmdz_omp_transfert.F90", "write_field_phy.F90" 1715 and "ioipsl_getin_p_mod.F90" to LMDZ5 versions. 1716 1717 * phy[venus/titan/mars/std]: 1718 - removed "init_phys_lmdz.F90", "comgeomphy.F90"; adapted routines to use 1719 geometry_mod (longitude, latitude, cell_area, etc.) -
trunk/LMDZ.COMMON/libf/dyn3d/gcm.F90
r1523 r1543 25 25 ok_dyn_ins,ok_dyn_ave,iecri,periodav, & 26 26 less1day,fractday,ndynstep,nsplit_phys 27 USE mod_const_mpi, ONLY: COMM_LMDZ 27 28 use cpdet_mod, only: ini_cpdet 28 29 USE temps_mod, ONLY: calend,start_time,annee_ref,day_ref, & 29 30 itau_dyn,itau_phy,day_ini,jD_ref,jH_ref,day_end 30 31 31 #ifdef INCA32 ! Only INCA needs these informations (from the Earth's physics)33 USE indice_sol_mod34 USE mod_phys_lmdz_para, ONLY : klon_mpi_para_nb35 #endif36 32 37 33 !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! … … 184 180 ! A nettoyer. On ne veut qu'une ou deux routines d'interface 185 181 ! dynamique -> physique pour l'initialisation 186 #ifdef CPP_PHYS187 CALL init_phys_lmdz(iim,jjp1,llm,1,(/(jjm-1)*iim+2/))188 ! call initcomgeomphy ! now done in iniphysiq189 #endif182 !#ifdef CPP_PHYS 183 ! CALL init_phys_lmdz(iim,jjp1,llm,1,(/(jjm-1)*iim+2/)) 184 !! call initcomgeomphy ! now done in iniphysiq 185 !#endif 190 186 !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! 191 187 ! … … 225 221 #endif 226 222 !----------------------------------------------------------------------- 227 228 IF (type_trac == 'inca') THEN229 #ifdef INCA230 call init_const_lmdz(nbtr,anneeref,dayref,iphysiq,day_step,nday, &231 nbsrf, is_oce,is_sic,is_ter,is_lic)232 call init_inca_para(iim,jjm+1,klon,1,klon_mpi_para_nb,0)233 #endif234 END IF235 223 ! 236 224 ! … … 281 269 282 270 endif ! of if (read_start) 283 284 IF (type_trac == 'inca') THEN285 #ifdef INCA286 call init_inca_dim(klon,llm,iim,jjm, &287 rlonu,rlatu,rlonv,rlatv)288 #endif289 END IF290 271 291 272 … … 445 426 ! Physics: 446 427 #ifdef CPP_PHYS 447 CALL iniphysiq(iim,jjm,llm,daysec,day_ini,dtphys/nsplit_phys, & 448 rlatu,rlatv,rlonu,rlonv,aire,cu,cv,rad,g,r,cpp, & 449 iflag_phys) 428 CALL iniphysiq(iim,jjm,llm, & 429 (jjm-1)*iim+2,comm_lmdz, & 430 daysec,day_ini,dtphys/nsplit_phys, & 431 rlatu,rlatv,rlonu,rlonv,aire,cu,cv,rad,g,r,cpp, & 432 iflag_phys) 450 433 #endif 451 434 ENDIF ! of IF ((iflag_phys==1).or.(iflag_phys>=100)) -
trunk/LMDZ.COMMON/libf/dyn3dpar/bands.F90
r1019 r1543 1 1 ! 2 ! $Id: bands.F90 1615 2012-02-10 15:42:26Z emillour $2 ! $Id: bands.F90 2351 2015-08-25 15:14:59Z emillour $ 3 3 ! 4 4 module Bands … … 19 19 20 20 subroutine AllocateBands 21 useparallel_lmdz21 USE parallel_lmdz 22 22 implicit none 23 23 … … 33 33 34 34 subroutine Read_distrib 35 useparallel_lmdz35 USE parallel_lmdz 36 36 implicit none 37 37 … … 93 93 SUBROUTINE Set_Bands 94 94 USE parallel_lmdz 95 #ifdef CPP_PHYS96 ! Ehouarn: what follows is only related to // physics97 USE mod_phys_lmdz_para, ONLY : jj_para_begin,jj_para_end98 #endif99 95 IMPLICIT NONE 100 96 INCLUDE 'dimensions.h' 101 INTEGER :: i 102 97 INTEGER :: i, ij 98 INTEGER :: jj_para_begin(0:mpi_size-1) 99 INTEGER :: jj_para_end(0:mpi_size-1) 100 103 101 do i=0,mpi_size-1 104 102 jj_nb_vanleer2(i)=(jjm+1)/mpi_size … … 106 104 enddo 107 105 108 #ifdef CPP_PHYS 106 jj_para_begin(0)=1 107 ij=distrib_phys(0)+iim-1 108 jj_para_end(0)=((ij-1)/iim)+1 109 110 DO i=1,mpi_Size-1 111 ij=ij+1 112 jj_para_begin(i)=((ij-1)/iim)+1 113 ij=ij+distrib_phys(i)-1 114 jj_para_end(i)=((ij-1)/iim)+1 115 ENDDO 116 109 117 do i=0,MPI_Size-1 110 118 jj_Nb_physic(i)=jj_para_end(i)-jj_para_begin(i)+1 … … 127 135 endif 128 136 enddo 129 #endif130 137 131 138 end subroutine Set_Bands … … 134 141 subroutine AdjustBands_caldyn 135 142 use times 136 useparallel_lmdz143 USE parallel_lmdz 137 144 implicit none 138 145 … … 199 206 subroutine AdjustBands_vanleer 200 207 use times 201 useparallel_lmdz208 USE parallel_lmdz 202 209 implicit none 203 210 … … 265 272 subroutine AdjustBands_dissip 266 273 use times 267 useparallel_lmdz274 USE parallel_lmdz 268 275 implicit none 269 276 -
trunk/LMDZ.COMMON/libf/dyn3dpar/gcm.F
r1523 r1543 14 14 USE parallel_lmdz 15 15 USE infotrac 16 #ifdef CPP_PHYS17 USE mod_interface_dyn_phys18 #endif16 !#ifdef CPP_PHYS 17 ! USE mod_interface_dyn_phys 18 !#endif 19 19 USE mod_hallo 20 20 USE Bands … … 40 40 USE mod_phys_lmdz_omp_data, ONLY: klon_omp 41 41 USE dimphy 42 USE comgeomphy43 42 #endif 44 43 USE comconst_mod, ONLY: daysec,dtvr,dtphys,rad,g,r,cpp … … 196 195 call Read_Distrib 197 196 198 #ifdef CPP_PHYS199 CALL init_phys_lmdz(iim,jjp1,llm,mpi_size,distrib_phys)197 !#ifdef CPP_PHYS 198 ! CALL init_phys_lmdz(iim,jjp1,llm,mpi_size,distrib_phys) 200 199 !#endif 201 200 ! CALL set_bands 202 201 !#ifdef CPP_PHYS 203 CALL Init_interface_dyn_phys204 #endif202 ! CALL Init_interface_dyn_phys 203 !#endif 205 204 CALL barrier 206 205 … … 497 496 ! & latfi,lonfi,airefi,zcufi,zcvfi,rad,g,r,cpp, 498 497 ! & iflag_phys) 499 CALL iniphysiq(iim,jjm,llm,daysec,day_ini,dtphys/nsplit_phys, 498 CALL iniphysiq(iim,jjm,llm, 499 & distrib_phys(mpi_rank),comm_lmdz, 500 & daysec,day_ini,dtphys/nsplit_phys, 500 501 & rlatu,rlatv,rlonu,rlonv,aire,cu,cv,rad,g,r,cpp, 501 502 & iflag_phys) -
trunk/LMDZ.COMMON/libf/dynphy_lonlat/calfis_p.F
r1459 r1543 29 29 ! Ehouarn: if using (parallelized) physics 30 30 USE dimphy 31 USE mod_phys_lmdz_para, mpi_root_xx=>mpi_root 31 USE mod_phys_lmdz_mpi_data, mpi_root_xx=>mpi_master 32 USE mod_phys_lmdz_omp_data, ONLY: klon_omp, klon_omp_begin 33 USE mod_const_mpi, ONLY: COMM_LMDZ 32 34 USE mod_interface_dyn_phys 33 35 ! USE IOPHY -
trunk/LMDZ.COMMON/libf/dynphy_lonlat/mod_interface_dyn_phys.F90
r1403 r1543 1 1 ! 2 ! $Id: mod_interface_dyn_phys.F90 1615 2012-02-10 15:42:26Z emillour $2 ! $Id: mod_interface_dyn_phys.F90 2351 2015-08-25 15:14:59Z emillour $ 3 3 ! 4 4 MODULE mod_interface_dyn_phys … … 7 7 8 8 9 #ifdef CPP_PHYS10 ! Interface with parallel physics,11 9 CONTAINS 12 10 11 #ifdef CPP_PARA 12 ! Interface with parallel physics, 13 13 SUBROUTINE Init_interface_dyn_phys 14 USE mod_phys_lmdz_mpi_data, ONLY: klon_mpi, is_north_pole, is_south_pole, & 15 ii_begin, jj_begin, ii_end, jj_end 14 USE mod_phys_lmdz_mpi_data 16 15 IMPLICIT NONE 17 16 include 'dimensions.h' … … 55 54 56 55 END SUBROUTINE Init_interface_dyn_phys 56 #else 57 SUBROUTINE Init_interface_dyn_phys 58 ! dummy routine for seq case 59 END SUBROUTINE Init_interface_dyn_phys 57 60 #endif 58 ! of #ifdef CPP_P HYS61 ! of #ifdef CPP_PARA 59 62 END MODULE mod_interface_dyn_phys -
trunk/LMDZ.COMMON/libf/dynphy_lonlat/phytitan/iniphysiq_mod.F90
r1525 r1543 6 6 CONTAINS 7 7 8 SUBROUTINE iniphysiq(iim,jjm,nlayer,punjours, pdayref,ptimestep, & 9 rlatu,rlatv,rlonu,rlonv,aire,cu,cv, & 8 SUBROUTINE iniphysiq(iim,jjm,nlayer, & 9 nbp, communicator, & 10 punjours, pdayref,ptimestep, & 11 rlatu,rlatv,rlonu,rlonv,aire,cu,cv, & 10 12 prad,pg,pr,pcpp,iflag_phys) 11 USE dimphy, ONLY: klev ! number of atmospheric levels 12 USE mod_grid_phy_lmdz, ONLY: klon_glo ! number of atmospheric columns 13 ! (on full grid) 13 USE dimphy, ONLY: init_dimphy 14 USE mod_grid_phy_lmdz, ONLY: klon_glo, & ! number of atmospheric columns (on full grid) 15 regular_lonlat, & ! regular longitude-latitude grid type 16 nbp_lon, nbp_lat, nbp_lev 14 17 USE mod_phys_lmdz_para, ONLY: klon_omp, & ! number of columns (on local omp grid) 15 18 klon_omp_begin, & ! start index of local omp subgrid … … 17 20 klon_mpi_begin ! start indes of columns (on local mpi grid) 18 21 USE control_mod, ONLY: nday 19 USE comgeomphy, ONLY: initcomgeomphy, & 20 airephy, & ! physics grid area (m2) 21 cuphy, & ! cu coeff. (u_covariant = cu * u) 22 cvphy, & ! cv coeff. (v_covariant = cv * v) 23 rlond, & ! longitudes 24 rlatd ! latitudes 22 USE geometry_mod, ONLY : init_geometry 23 ! USE comgeomphy, ONLY: initcomgeomphy, & 24 ! airephy, & ! physics grid area (m2) 25 ! cuphy, & ! cu coeff. (u_covariant = cu * u) 26 ! cvphy, & ! cv coeff. (v_covariant = cv * v) 27 ! rlond, & ! longitudes 28 ! rlatd ! latitudes 25 29 USE temps_mod, ONLY: annee_ref, day_ref, day_ini, day_end 26 30 USE time_phylmdz_mod, ONLY: init_time 31 USE physics_distribution_mod, ONLY : init_physics_distribution 27 32 USE regular_lonlat_mod, ONLY : init_regular_lonlat, & 28 33 east, west, north, south, & 29 34 north_east, north_west, & 30 35 south_west, south_east 36 USE mod_interface_dyn_phys, ONLY : init_interface_dyn_phys 31 37 USE nrtype, ONLY: pi 32 38 IMPLICIT NONE … … 48 54 INTEGER, INTENT (IN) :: iim ! number of atmospheric columns along longitudes 49 55 INTEGER, INTENT (IN) :: jjm ! number of atompsheric columns along latitudes 56 INTEGER, INTENT(IN) :: nbp ! number of physics columns for this MPI process 57 INTEGER, INTENT(IN) :: communicator ! MPI communicator 50 58 REAL, INTENT (IN) :: rlatu(jjm+1) ! latitudes of the physics grid 51 59 REAL, INTENT (IN) :: rlatv(jjm) ! latitude boundaries of the physics grid … … 60 68 61 69 INTEGER :: ibegin, iend, offset 62 INTEGER :: i,j 70 INTEGER :: i,j,k 63 71 CHARACTER (LEN=20) :: modname = 'iniphysiq' 64 72 CHARACTER (LEN=80) :: abort_message … … 70 78 71 79 ! global array, on full physics grid: 72 REAL,ALLOCATABLE :: latfi(:) 73 REAL,ALLOCATABLE :: lonfi(:) 74 REAL,ALLOCATABLE :: cufi(:) 75 REAL,ALLOCATABLE :: cvfi(:) 76 REAL,ALLOCATABLE :: airefi(:) 77 78 IF (nlayer/=klev) THEN 79 WRITE (lunout, *) 'STOP in ', trim(modname) 80 WRITE (lunout, *) 'Problem with dimensions :' 81 WRITE (lunout, *) 'nlayer = ', nlayer 82 WRITE (lunout, *) 'klev = ', klev 83 abort_message = '' 84 CALL abort_gcm(modname, 'Problem with dimensions', 1) 85 END IF 86 87 !call init_phys_lmdz(iim,jjm+1,llm,1,(/(jjm-1)*iim+2/)) 80 REAL,ALLOCATABLE :: latfi_glo(:) 81 REAL,ALLOCATABLE :: lonfi_glo(:) 82 REAL,ALLOCATABLE :: cufi_glo(:) 83 REAL,ALLOCATABLE :: cvfi_glo(:) 84 REAL,ALLOCATABLE :: airefi_glo(:) 85 REAL,ALLOCATABLE :: boundslonfi_glo(:,:) 86 REAL,ALLOCATABLE :: boundslatfi_glo(:,:) 87 88 ! local arrays, on given MPI/OpenMP domain: 89 REAL,ALLOCATABLE,SAVE :: latfi(:) 90 REAL,ALLOCATABLE,SAVE :: lonfi(:) 91 REAL,ALLOCATABLE,SAVE :: cufi(:) 92 REAL,ALLOCATABLE,SAVE :: cvfi(:) 93 REAL,ALLOCATABLE,SAVE :: airefi(:) 94 REAL,ALLOCATABLE,SAVE :: boundslonfi(:,:) 95 REAL,ALLOCATABLE,SAVE :: boundslatfi(:,:) 96 !$OMP THREADPRIVATE (latfi,lonfi,cufi,cvfi,airefi,boundslonfi,boundslatfi) 97 98 ! Initialize Physics distibution and parameters and interface with dynamics 99 IF (iim*jjm>1) THEN ! general 3D case 100 CALL init_physics_distribution(regular_lonlat,4, & 101 nbp,iim,jjm+1,nlayer,communicator) 102 ELSE ! For 1D model 103 CALL init_physics_distribution(regular_lonlat,4, & 104 1,1,1,nlayer,communicator) 105 ENDIF 106 CALL init_interface_dyn_phys 88 107 89 108 ! init regular global longitude-latitude grid points and boundaries … … 110 129 111 130 ! Generate global arrays on full physics grid 112 ALLOCATE(latfi(klon_glo),lonfi(klon_glo),cufi(klon_glo),cvfi(klon_glo)) 113 ALLOCATE(airefi(klon_glo)) 131 ALLOCATE(latfi_glo(klon_glo),lonfi_glo(klon_glo)) 132 ALLOCATE(cufi_glo(klon_glo),cvfi_glo(klon_glo)) 133 ALLOCATE(airefi_glo(klon_glo)) 134 ALLOCATE(boundslonfi_glo(klon_glo,4)) 135 ALLOCATE(boundslatfi_glo(klon_glo,4)) 114 136 115 137 IF (klon_glo>1) THEN ! general case 116 138 ! North pole 117 latfi(1)=rlatu(1) 118 lonfi(1)=0. 119 cufi(1) = cu(1) 120 cvfi(1) = cv(1) 139 latfi_glo(1)=rlatu(1) 140 lonfi_glo(1)=0. 141 cufi_glo(1) = cu(1) 142 cvfi_glo(1) = cv(1) 143 boundslonfi_glo(1,north_east)=0 144 boundslatfi_glo(1,north_east)=PI/2 145 boundslonfi_glo(1,north_west)=2*PI 146 boundslatfi_glo(1,north_west)=PI/2 147 boundslonfi_glo(1,south_west)=2*PI 148 boundslatfi_glo(1,south_west)=rlatv(1) 149 boundslonfi_glo(1,south_east)=0 150 boundslatfi_glo(1,south_east)=rlatv(1) 121 151 DO j=2,jjm 122 152 DO i=1,iim 123 latfi((j-2)*iim+1+i)= rlatu(j) 124 lonfi((j-2)*iim+1+i)= rlonv(i) 125 cufi((j-2)*iim+1+i) = cu((j-1)*(iim+1)+i) 126 cvfi((j-2)*iim+1+i) = cv((j-1)*(iim+1)+i) 153 k=(j-2)*iim+1+i 154 latfi_glo(k)= rlatu(j) 155 lonfi_glo(k)= rlonv(i) 156 cufi_glo(k) = cu((j-1)*(iim+1)+i) 157 cvfi_glo(k) = cv((j-1)*(iim+1)+i) 158 boundslonfi_glo(k,north_east)=rlonu(i) 159 boundslatfi_glo(k,north_east)=rlatv(j-1) 160 boundslonfi_glo(k,north_west)=rlonu(i+1) 161 boundslatfi_glo(k,north_west)=rlatv(j-1) 162 boundslonfi_glo(k,south_west)=rlonu(i+1) 163 boundslatfi_glo(k,south_west)=rlatv(j) 164 boundslonfi_glo(k,south_east)=rlonu(i) 165 boundslatfi_glo(k,south_east)=rlatv(j) 127 166 ENDDO 128 167 ENDDO 129 168 ! South pole 130 latfi(klon_glo)= rlatu(jjm+1) 131 lonfi(klon_glo)= 0. 132 cufi(klon_glo) = cu((iim+1)*jjm+1) 133 cvfi(klon_glo) = cv((iim+1)*jjm-iim) 169 latfi_glo(klon_glo)= rlatu(jjm+1) 170 lonfi_glo(klon_glo)= 0. 171 cufi_glo(klon_glo) = cu((iim+1)*jjm+1) 172 cvfi_glo(klon_glo) = cv((iim+1)*jjm-iim) 173 boundslonfi_glo(klon_glo,north_east)= 0 174 boundslatfi_glo(klon_glo,north_east)= rlatv(jjm) 175 boundslonfi_glo(klon_glo,north_west)= 2*PI 176 boundslatfi_glo(klon_glo,north_west)= rlatv(jjm) 177 boundslonfi_glo(klon_glo,south_west)= 2*PI 178 boundslatfi_glo(klon_glo,south_west)= -PI/2 179 boundslonfi_glo(klon_glo,south_east)= 0 180 boundslatfi_glo(klon_glo,south_east)= -Pi/2 134 181 135 182 ! build airefi(), mesh area on physics grid 136 CALL gr_dyn_fi(1,iim+1,jjm+1,klon_glo,aire,airefi )183 CALL gr_dyn_fi(1,iim+1,jjm+1,klon_glo,aire,airefi_glo) 137 184 ! Poles are single points on physics grid 138 airefi (1)=sum(aire(1:iim,1))139 airefi (klon_glo)=sum(aire(1:iim,jjm+1))185 airefi_glo(1)=sum(aire(1:iim,1)) 186 airefi_glo(klon_glo)=sum(aire(1:iim,jjm+1)) 140 187 141 188 ! Sanity check: do total planet area match between physics and dynamics? 142 189 total_area_dyn=sum(aire(1:iim,1:jjm+1)) 143 total_area_phy=sum(airefi (1:klon_glo))190 total_area_phy=sum(airefi_glo(1:klon_glo)) 144 191 IF (total_area_dyn/=total_area_phy) THEN 145 192 WRITE (lunout, *) 'iniphysiq: planet total surface discrepancy !!!' … … 154 201 ELSE ! klon_glo==1, running the 1D model 155 202 ! just copy over input values 156 latfi(1)=rlatu(1) 157 lonfi(1)=rlonv(1) 158 cufi(1)=cu(1) 159 cvfi(1)=cv(1) 160 airefi(1)=aire(1,1) 203 latfi_glo(1)=rlatu(1) 204 lonfi_glo(1)=rlonv(1) 205 cufi_glo(1)=cu(1) 206 cvfi_glo(1)=cv(1) 207 airefi_glo(1)=aire(1,1) 208 boundslonfi_glo(1,north_east)=rlonu(1) 209 boundslatfi_glo(1,north_east)=PI/2 210 boundslonfi_glo(1,north_west)=rlonu(2) 211 boundslatfi_glo(1,north_west)=PI/2 212 boundslonfi_glo(1,south_west)=rlonu(2) 213 boundslatfi_glo(1,south_west)=rlatv(1) 214 boundslonfi_glo(1,south_east)=rlonu(1) 215 boundslatfi_glo(1,south_east)=rlatv(1) 161 216 ENDIF ! of IF (klon_glo>1) 162 217 163 218 !$OMP PARALLEL 164 ! Now generate local lon/lat/cu/cv/area arrays 165 CALL initcomgeomphy 219 ! Now generate local lon/lat/cu/cv/area/bounds arrays 220 ALLOCATE(latfi(klon_omp),lonfi(klon_omp),cufi(klon_omp),cvfi(klon_omp)) 221 ALLOCATE(airefi(klon_omp)) 222 ALLOCATE(boundslonfi(klon_omp,4)) 223 ALLOCATE(boundslatfi(klon_omp,4)) 224 ! CALL initcomgeomphy 166 225 167 226 offset = klon_mpi_begin - 1 168 airephy(1:klon_omp) = airefi(offset+klon_omp_begin:offset+klon_omp_end) 169 cuphy(1:klon_omp) = cufi(offset+klon_omp_begin:offset+klon_omp_end) 170 cvphy(1:klon_omp) = cvfi(offset+klon_omp_begin:offset+klon_omp_end) 171 rlond(1:klon_omp) = lonfi(offset+klon_omp_begin:offset+klon_omp_end) 172 rlatd(1:klon_omp) = latfi(offset+klon_omp_begin:offset+klon_omp_end) 227 airefi(1:klon_omp) = airefi_glo(offset+klon_omp_begin:offset+klon_omp_end) 228 cufi(1:klon_omp) = cufi_glo(offset+klon_omp_begin:offset+klon_omp_end) 229 cvfi(1:klon_omp) = cvfi_glo(offset+klon_omp_begin:offset+klon_omp_end) 230 lonfi(1:klon_omp) = lonfi_glo(offset+klon_omp_begin:offset+klon_omp_end) 231 latfi(1:klon_omp) = latfi_glo(offset+klon_omp_begin:offset+klon_omp_end) 232 boundslonfi(1:klon_omp,:) = boundslonfi_glo(offset+klon_omp_begin:offset+klon_omp_end,:) 233 boundslatfi(1:klon_omp,:) = boundslatfi_glo(offset+klon_omp_begin:offset+klon_omp_end,:) 234 235 ! copy over local grid longitudes and latitudes 236 CALL init_geometry(klon_omp,lonfi,latfi,boundslonfi,boundslatfi, & 237 airefi,cufi,cvfi) 173 238 174 239 ! Initialize some physical constants … … 177 242 ! Initialize some "temporal and calendar" related variables 178 243 CALL init_time(annee_ref,day_ref,day_ini,day_end,nday,ptimestep) 244 245 ! Initialize dimphy module 246 CALL Init_dimphy(klon_omp,nlayer) 179 247 180 248 !$OMP END PARALLEL -
trunk/LMDZ.COMMON/libf/dynphy_lonlat/phyvenus/iniphysiq_mod.F90
r1524 r1543 6 6 CONTAINS 7 7 8 SUBROUTINE iniphysiq(iim,jjm,nlayer,punjours, pdayref,ptimestep, & 9 rlatu,rlatv,rlonu,rlonv,aire,cu,cv, & 8 SUBROUTINE iniphysiq(iim,jjm,nlayer, & 9 nbp, communicator, & 10 punjours, pdayref,ptimestep, & 11 rlatu,rlatv,rlonu,rlonv,aire,cu,cv, & 10 12 prad,pg,pr,pcpp,iflag_phys) 11 USE dimphy, ONLY: klev ! number of atmospheric levels 12 USE mod_grid_phy_lmdz, ONLY: klon_glo ! number of atmospheric columns 13 ! (on full grid) 13 USE dimphy, ONLY: init_dimphy 14 USE mod_grid_phy_lmdz, ONLY: klon_glo, & ! number of atmospheric columns (on full grid) 15 regular_lonlat, & ! regular longitude-latitude grid type 16 nbp_lon, nbp_lat, nbp_lev 14 17 USE mod_phys_lmdz_para, ONLY: klon_omp, & ! number of columns (on local omp grid) 15 18 klon_omp_begin, & ! start index of local omp subgrid 16 19 klon_omp_end, & ! end index of local omp subgrid 17 20 klon_mpi_begin ! start indes of columns (on local mpi grid) 18 USE comgeomphy, ONLY: initcomgeomphy, & 19 airephy, & ! physics grid area (m2) 20 cuphy, & ! cu coeff. (u_covariant = cu * u) 21 cvphy, & ! cv coeff. (v_covariant = cv * v) 22 rlond, & ! longitudes 23 rlatd ! latitudes 21 USE geometry_mod, ONLY : init_geometry 22 ! USE comgeomphy, ONLY: initcomgeomphy, & 23 ! airephy, & ! physics grid area (m2) 24 ! cuphy, & ! cu coeff. (u_covariant = cu * u) 25 ! cvphy, & ! cv coeff. (v_covariant = cv * v) 26 ! rlond, & ! longitudes 27 ! rlatd ! latitudes 24 28 USE temps_mod, ONLY: annee_ref, day_ref, day_ini, day_end 25 29 USE time_phylmdz_mod, ONLY: init_time 30 USE physics_distribution_mod, ONLY : init_physics_distribution 26 31 USE regular_lonlat_mod, ONLY : init_regular_lonlat, & 27 32 east, west, north, south, & 28 33 north_east, north_west, & 29 34 south_west, south_east 35 USE mod_interface_dyn_phys, ONLY : init_interface_dyn_phys 30 36 USE nrtype, ONLY: pi 31 37 IMPLICIT NONE … … 47 53 INTEGER, INTENT (IN) :: iim ! number of atmospheric columns along longitudes 48 54 INTEGER, INTENT (IN) :: jjm ! number of atompsheric columns along latitudes 55 INTEGER, INTENT(IN) :: nbp ! number of physics columns for this MPI process 56 INTEGER, INTENT(IN) :: communicator ! MPI communicator 49 57 REAL, INTENT (IN) :: rlatu(jjm+1) ! latitudes of the physics grid 50 58 REAL, INTENT (IN) :: rlatv(jjm) ! latitude boundaries of the physics grid … … 59 67 60 68 INTEGER :: ibegin, iend, offset 61 INTEGER :: i,j 69 INTEGER :: i,j,k 62 70 CHARACTER (LEN=20) :: modname = 'iniphysiq' 63 71 CHARACTER (LEN=80) :: abort_message … … 69 77 70 78 ! global array, on full physics grid: 71 REAL,ALLOCATABLE :: latfi(:) 72 REAL,ALLOCATABLE :: lonfi(:) 73 REAL,ALLOCATABLE :: cufi(:) 74 REAL,ALLOCATABLE :: cvfi(:) 75 REAL,ALLOCATABLE :: airefi(:) 76 77 IF (nlayer/=klev) THEN 78 WRITE (lunout, *) 'STOP in ', trim(modname) 79 WRITE (lunout, *) 'Problem with dimensions :' 80 WRITE (lunout, *) 'nlayer = ', nlayer 81 WRITE (lunout, *) 'klev = ', klev 82 abort_message = '' 83 CALL abort_gcm(modname, 'Problem with dimensions', 1) 84 END IF 85 86 !call init_phys_lmdz(iim,jjm+1,llm,1,(/(jjm-1)*iim+2/)) 79 REAL,ALLOCATABLE :: latfi_glo(:) 80 REAL,ALLOCATABLE :: lonfi_glo(:) 81 REAL,ALLOCATABLE :: cufi_glo(:) 82 REAL,ALLOCATABLE :: cvfi_glo(:) 83 REAL,ALLOCATABLE :: airefi_glo(:) 84 REAL,ALLOCATABLE :: boundslonfi_glo(:,:) 85 REAL,ALLOCATABLE :: boundslatfi_glo(:,:) 86 87 ! local arrays, on given MPI/OpenMP domain: 88 REAL,ALLOCATABLE,SAVE :: latfi(:) 89 REAL,ALLOCATABLE,SAVE :: lonfi(:) 90 REAL,ALLOCATABLE,SAVE :: cufi(:) 91 REAL,ALLOCATABLE,SAVE :: cvfi(:) 92 REAL,ALLOCATABLE,SAVE :: airefi(:) 93 REAL,ALLOCATABLE,SAVE :: boundslonfi(:,:) 94 REAL,ALLOCATABLE,SAVE :: boundslatfi(:,:) 95 !$OMP THREADPRIVATE (latfi,lonfi,cufi,cvfi,airefi,boundslonfi,boundslatfi) 96 97 ! Initialize Physics distibution and parameters and interface with dynamics 98 IF (iim*jjm>1) THEN ! general 3D case 99 CALL init_physics_distribution(regular_lonlat,4, & 100 nbp,iim,jjm+1,nlayer,communicator) 101 ELSE ! For 1D model 102 CALL init_physics_distribution(regular_lonlat,4, & 103 1,1,1,nlayer,communicator) 104 ENDIF 105 CALL init_interface_dyn_phys 87 106 88 107 ! init regular global longitude-latitude grid points and boundaries … … 109 128 110 129 ! Generate global arrays on full physics grid 111 ALLOCATE(latfi(klon_glo),lonfi(klon_glo),cufi(klon_glo),cvfi(klon_glo)) 112 ALLOCATE(airefi(klon_glo)) 130 ALLOCATE(latfi_glo(klon_glo),lonfi_glo(klon_glo)) 131 ALLOCATE(cufi_glo(klon_glo),cvfi_glo(klon_glo)) 132 ALLOCATE(airefi_glo(klon_glo)) 133 ALLOCATE(boundslonfi_glo(klon_glo,4)) 134 ALLOCATE(boundslatfi_glo(klon_glo,4)) 113 135 114 136 IF (klon_glo>1) THEN ! general case 115 137 ! North pole 116 latfi(1)=rlatu(1) 117 lonfi(1)=0. 118 cufi(1) = cu(1) 119 cvfi(1) = cv(1) 138 latfi_glo(1)=rlatu(1) 139 lonfi_glo(1)=0. 140 cufi_glo(1) = cu(1) 141 cvfi_glo(1) = cv(1) 142 boundslonfi_glo(1,north_east)=0 143 boundslatfi_glo(1,north_east)=PI/2 144 boundslonfi_glo(1,north_west)=2*PI 145 boundslatfi_glo(1,north_west)=PI/2 146 boundslonfi_glo(1,south_west)=2*PI 147 boundslatfi_glo(1,south_west)=rlatv(1) 148 boundslonfi_glo(1,south_east)=0 149 boundslatfi_glo(1,south_east)=rlatv(1) 120 150 DO j=2,jjm 121 151 DO i=1,iim 122 latfi((j-2)*iim+1+i)= rlatu(j) 123 lonfi((j-2)*iim+1+i)= rlonv(i) 124 cufi((j-2)*iim+1+i) = cu((j-1)*(iim+1)+i) 125 cvfi((j-2)*iim+1+i) = cv((j-1)*(iim+1)+i) 152 k=(j-2)*iim+1+i 153 latfi_glo(k)= rlatu(j) 154 lonfi_glo(k)= rlonv(i) 155 cufi_glo(k) = cu((j-1)*(iim+1)+i) 156 cvfi_glo(k) = cv((j-1)*(iim+1)+i) 157 boundslonfi_glo(k,north_east)=rlonu(i) 158 boundslatfi_glo(k,north_east)=rlatv(j-1) 159 boundslonfi_glo(k,north_west)=rlonu(i+1) 160 boundslatfi_glo(k,north_west)=rlatv(j-1) 161 boundslonfi_glo(k,south_west)=rlonu(i+1) 162 boundslatfi_glo(k,south_west)=rlatv(j) 163 boundslonfi_glo(k,south_east)=rlonu(i) 164 boundslatfi_glo(k,south_east)=rlatv(j) 126 165 ENDDO 127 166 ENDDO 128 167 ! South pole 129 latfi(klon_glo)= rlatu(jjm+1) 130 lonfi(klon_glo)= 0. 131 cufi(klon_glo) = cu((iim+1)*jjm+1) 132 cvfi(klon_glo) = cv((iim+1)*jjm-iim) 168 latfi_glo(klon_glo)= rlatu(jjm+1) 169 lonfi_glo(klon_glo)= 0. 170 cufi_glo(klon_glo) = cu((iim+1)*jjm+1) 171 cvfi_glo(klon_glo) = cv((iim+1)*jjm-iim) 172 boundslonfi_glo(klon_glo,north_east)= 0 173 boundslatfi_glo(klon_glo,north_east)= rlatv(jjm) 174 boundslonfi_glo(klon_glo,north_west)= 2*PI 175 boundslatfi_glo(klon_glo,north_west)= rlatv(jjm) 176 boundslonfi_glo(klon_glo,south_west)= 2*PI 177 boundslatfi_glo(klon_glo,south_west)= -PI/2 178 boundslonfi_glo(klon_glo,south_east)= 0 179 boundslatfi_glo(klon_glo,south_east)= -Pi/2 133 180 134 181 ! build airefi(), mesh area on physics grid 135 CALL gr_dyn_fi(1,iim+1,jjm+1,klon_glo,aire,airefi )182 CALL gr_dyn_fi(1,iim+1,jjm+1,klon_glo,aire,airefi_glo) 136 183 ! Poles are single points on physics grid 137 airefi (1)=sum(aire(1:iim,1))138 airefi (klon_glo)=sum(aire(1:iim,jjm+1))184 airefi_glo(1)=sum(aire(1:iim,1)) 185 airefi_glo(klon_glo)=sum(aire(1:iim,jjm+1)) 139 186 140 187 ! Sanity check: do total planet area match between physics and dynamics? 141 188 total_area_dyn=sum(aire(1:iim,1:jjm+1)) 142 total_area_phy=sum(airefi (1:klon_glo))189 total_area_phy=sum(airefi_glo(1:klon_glo)) 143 190 IF (total_area_dyn/=total_area_phy) THEN 144 191 WRITE (lunout, *) 'iniphysiq: planet total surface discrepancy !!!' … … 153 200 ELSE ! klon_glo==1, running the 1D model 154 201 ! just copy over input values 155 latfi(1)=rlatu(1) 156 lonfi(1)=rlonv(1) 157 cufi(1)=cu(1) 158 cvfi(1)=cv(1) 159 airefi(1)=aire(1,1) 202 latfi_glo(1)=rlatu(1) 203 lonfi_glo(1)=rlonv(1) 204 cufi_glo(1)=cu(1) 205 cvfi_glo(1)=cv(1) 206 airefi_glo(1)=aire(1,1) 207 boundslonfi_glo(1,north_east)=rlonu(1) 208 boundslatfi_glo(1,north_east)=PI/2 209 boundslonfi_glo(1,north_west)=rlonu(2) 210 boundslatfi_glo(1,north_west)=PI/2 211 boundslonfi_glo(1,south_west)=rlonu(2) 212 boundslatfi_glo(1,south_west)=rlatv(1) 213 boundslonfi_glo(1,south_east)=rlonu(1) 214 boundslatfi_glo(1,south_east)=rlatv(1) 160 215 ENDIF ! of IF (klon_glo>1) 161 216 162 217 !$OMP PARALLEL 163 ! Now generate local lon/lat/cu/cv/area arrays 164 CALL initcomgeomphy 218 ! Now generate local lon/lat/cu/cv/area/bounds arrays 219 ALLOCATE(latfi(klon_omp),lonfi(klon_omp),cufi(klon_omp),cvfi(klon_omp)) 220 ALLOCATE(airefi(klon_omp)) 221 ALLOCATE(boundslonfi(klon_omp,4)) 222 ALLOCATE(boundslatfi(klon_omp,4)) 223 ! CALL initcomgeomphy 165 224 166 225 offset = klon_mpi_begin - 1 167 airephy(1:klon_omp) = airefi(offset+klon_omp_begin:offset+klon_omp_end) 168 cuphy(1:klon_omp) = cufi(offset+klon_omp_begin:offset+klon_omp_end) 169 cvphy(1:klon_omp) = cvfi(offset+klon_omp_begin:offset+klon_omp_end) 170 rlond(1:klon_omp) = lonfi(offset+klon_omp_begin:offset+klon_omp_end) 171 rlatd(1:klon_omp) = latfi(offset+klon_omp_begin:offset+klon_omp_end) 226 airefi(1:klon_omp) = airefi_glo(offset+klon_omp_begin:offset+klon_omp_end) 227 cufi(1:klon_omp) = cufi_glo(offset+klon_omp_begin:offset+klon_omp_end) 228 cvfi(1:klon_omp) = cvfi_glo(offset+klon_omp_begin:offset+klon_omp_end) 229 lonfi(1:klon_omp) = lonfi_glo(offset+klon_omp_begin:offset+klon_omp_end) 230 latfi(1:klon_omp) = latfi_glo(offset+klon_omp_begin:offset+klon_omp_end) 231 boundslonfi(1:klon_omp,:) = boundslonfi_glo(offset+klon_omp_begin:offset+klon_omp_end,:) 232 boundslatfi(1:klon_omp,:) = boundslatfi_glo(offset+klon_omp_begin:offset+klon_omp_end,:) 233 234 ! copy over local grid longitudes and latitudes 235 CALL init_geometry(klon_omp,lonfi,latfi,boundslonfi,boundslatfi, & 236 airefi,cufi,cvfi) 172 237 173 238 ! Initialize some physical constants … … 176 241 ! Initialize some "temporal and calendar" related variables 177 242 CALL init_time(annee_ref,day_ref,day_ini,day_end,ptimestep) 243 244 ! Initialize dimphy module 245 CALL Init_dimphy(klon_omp,nlayer) 178 246 179 247 !$OMP END PARALLEL -
trunk/LMDZ.COMMON/libf/phy_common/ioipsl_getin_p_mod.F90
r1521 r1543 12 12 USE mod_phys_lmdz_mpi_data, ONLY : is_mpi_root 13 13 USE mod_phys_lmdz_omp_data, ONLY : is_omp_root 14 use mod_phys_lmdz_para, only: bcast14 USE mod_phys_lmdz_transfert_para, ONLY : bcast 15 15 !- 16 16 IMPLICIT NONE -
trunk/LMDZ.COMMON/libf/phy_common/mod_grid_phy_lmdz.F90
r1534 r1543 1 1 ! 2 !$ Header$2 !$Id $ 3 3 ! 4 4 MODULE mod_grid_phy_lmdz … … 7 7 PRIVATE :: grid1dTo2d_glo_igen, grid1dTo2d_glo_rgen, grid1dTo2d_glo_lgen, & 8 8 grid2dTo1d_glo_igen, grid2dTo1d_glo_rgen, grid2dTo1d_glo_lgen 9 9 10 INTEGER,PARAMETER :: unstructured=0 11 INTEGER,PARAMETER :: regular_lonlat=1 12 13 INTEGER,SAVE :: grid_type 14 INTEGER,SAVE :: nvertex 10 15 INTEGER,SAVE :: nbp_lon ! == iim 11 INTEGER,SAVE :: nbp_lat ! == jjmp1 16 INTEGER,SAVE :: nbp_lat ! == jjmp1 (or == 1 if running 1D model) 12 17 INTEGER,SAVE :: nbp_lev ! == llm 13 INTEGER,SAVE :: klon_glo 18 INTEGER,SAVE :: klon_glo ! total number of atmospheric columns 14 19 15 20 INTERFACE grid1dTo2d_glo … … 32 37 33 38 34 SUBROUTINE init_grid_phy_lmdz( iim,jjp1,llm)39 SUBROUTINE init_grid_phy_lmdz(grid_type_,nvertex_,nbp_lon_,nbp_lat_,nbp_lev_) 35 40 IMPLICIT NONE 36 INTEGER, INTENT(in) :: iim 37 INTEGER, INTENT(in) :: jjp1 38 INTEGER, INTENT(in) :: llm 39 40 nbp_lon=iim 41 nbp_lat=jjp1 42 nbp_lev=llm 43 44 ! Ehouarn: handle 1D case: 41 INTEGER,INTENT(IN) :: grid_type_ 42 INTEGER,INTENT(IN) :: nvertex_ 43 INTEGER, INTENT(IN) :: nbp_lon_ 44 INTEGER, INTENT(IN) :: nbp_lat_ 45 INTEGER, INTENT(IN) :: nbp_lev_ 46 47 grid_type = grid_type_ 48 nvertex = nvertex_ 49 nbp_lon = nbp_lon_ 50 nbp_lat = nbp_lat_ 51 nbp_lev = nbp_lev_ 52 45 53 IF (nbp_lon*nbp_lat==1) THEN 46 54 klon_glo=1 … … 283 291 284 292 !---------------------------------------------------------------- 285 ! fonctions generiques (privees)293 ! Generic (private) fonctions 286 294 !---------------------------------------------------------------- 295 287 296 SUBROUTINE grid1dTo2d_glo_igen(VarIn,VarOut,dimsize) 297 288 298 IMPLICIT NONE 289 299 … … 320 330 321 331 SUBROUTINE grid1dTo2d_glo_rgen(VarIn,VarOut,dimsize) 332 322 333 IMPLICIT NONE 323 334 … … 353 364 354 365 SUBROUTINE grid1dTo2d_glo_lgen(VarIn,VarOut,dimsize) 366 355 367 IMPLICIT NONE 356 368 … … 386 398 387 399 SUBROUTINE grid2dTo1d_glo_igen(VarIn,VarOut,dimsize) 400 388 401 IMPLICIT NONE 389 402 … … 408 421 409 422 SUBROUTINE grid2dTo1d_glo_rgen(VarIn,VarOut,dimsize) 423 410 424 IMPLICIT NONE 411 425 … … 430 444 431 445 SUBROUTINE grid2dTo1d_glo_lgen(VarIn,VarOut,dimsize) 446 432 447 IMPLICIT NONE 433 448 -
trunk/LMDZ.COMMON/libf/phy_common/mod_phys_lmdz_mpi_data.F90
r1521 r1543 3 3 ! 4 4 MODULE mod_phys_lmdz_mpi_data 5 USE mod_const_mpi5 ! USE mod_const_mpi 6 6 7 7 INTEGER,SAVE :: ii_begin … … 35 35 INTEGER,SAVE :: mpi_rank 36 36 INTEGER,SAVE :: mpi_size 37 INTEGER,SAVE :: mpi_root 37 INTEGER,SAVE :: mpi_master 38 ! INTEGER,SAVE :: mpi_root 38 39 LOGICAL,SAVE :: is_mpi_root 39 40 LOGICAL,SAVE :: is_using_mpi … … 43 44 LOGICAL,SAVE :: is_south_pole 44 45 INTEGER,SAVE :: COMM_LMDZ_PHY 46 INTEGER,SAVE :: MPI_REAL_LMDZ ! MPI_REAL8 45 47 46 48 CONTAINS 47 49 48 SUBROUTINE init_phys_lmdz_mpi_data(iim,jjp1,nb_proc,distrib) 49 USE mod_const_mpi, ONLY : COMM_LMDZ 50 ! SUBROUTINE Init_phys_lmdz_mpi_data(iim,jjp1,nb_proc,distrib) 51 SUBROUTINE init_phys_lmdz_mpi_data(nbp, nbp_lon, nbp_lat, communicator) 52 ! USE mod_const_mpi, ONLY : COMM_LMDZ 50 53 IMPLICIT NONE 51 INTEGER,INTENT(in) :: iim 52 INTEGER,INTENT(in) :: jjp1 53 INTEGER,INTENT(in) :: nb_proc 54 INTEGER,INTENT(in) :: distrib(0:nb_proc-1) 55 54 #ifdef CPP_MPI 55 INCLUDE 'mpif.h' 56 #endif 57 INTEGER,INTENT(in) :: nbp 58 INTEGER,INTENT(in) :: nbp_lon 59 INTEGER,INTENT(in) :: nbp_lat 60 INTEGER,INTENT(in) :: communicator 61 62 INTEGER,ALLOCATABLE :: distrib(:) 56 63 INTEGER :: ierr 57 64 INTEGER :: klon_glo … … 64 71 #endif 65 72 66 if ( iim.eq.1) then73 if ((nbp_lon.eq.1).and.(nbp_lat.eq.1)) then ! running 1D column model 67 74 klon_glo=1 68 75 else 69 klon_glo=iim*(jjp1-2)+2 76 ! The usual global physics grid: 1 point for each pole and nbp_lon points 77 ! for all other latitudes 78 klon_glo=nbp_lon*(nbp_lat-2)+2 70 79 endif 71 80 72 COMM_LMDZ_PHY= COMM_LMDZ81 COMM_LMDZ_PHY=communicator 73 82 74 83 IF (is_using_mpi) THEN 75 84 #ifdef CPP_MPI 85 MPI_REAL_LMDZ=MPI_REAL8 76 86 CALL MPI_COMM_SIZE(COMM_LMDZ_PHY,mpi_size,ierr) 77 87 CALL MPI_COMM_RANK(COMM_LMDZ_PHY,mpi_rank,ierr) … … 82 92 ENDIF 83 93 94 ALLOCATE(distrib(0:mpi_size-1)) 95 96 IF (is_using_mpi) THEN 97 #ifdef CPP_MPI 98 CALL MPI_ALLGATHER(nbp,1,MPI_INTEGER,distrib,1,MPI_INTEGER,COMM_LMDZ_PHY,ierr) 99 #endif 100 ELSE 101 distrib(:)=nbp 102 ENDIF 103 104 84 105 IF (mpi_rank == 0) THEN 85 mpi_ root= 0106 mpi_master = 0 86 107 is_mpi_root = .true. 87 108 ENDIF … … 115 136 116 137 117 klon_mpi_para_nb(0:mpi_size-1)=distrib(0: nb_proc-1)138 klon_mpi_para_nb(0:mpi_size-1)=distrib(0:mpi_size-1) 118 139 119 140 DO i=0,mpi_size-1 … … 132 153 ij_para_begin(i) = 1 133 154 ELSE 134 ij_para_begin(i) = klon_mpi_para_begin(i)+ iim-1155 ij_para_begin(i) = klon_mpi_para_begin(i)+nbp_lon-1 135 156 ENDIF 136 157 137 jj_para_begin(i) = (ij_para_begin(i)-1)/ iim+ 1138 ii_para_begin(i) = MOD(ij_para_begin(i)-1, iim) + 1158 jj_para_begin(i) = (ij_para_begin(i)-1)/nbp_lon + 1 159 ii_para_begin(i) = MOD(ij_para_begin(i)-1,nbp_lon) + 1 139 160 140 161 141 ij_para_end(i) = klon_mpi_para_end(i)+ iim-1142 jj_para_end(i) = (ij_para_end(i)-1)/ iim+ 1143 ii_para_end(i) = MOD(ij_para_end(i)-1, iim) + 1162 ij_para_end(i) = klon_mpi_para_end(i)+nbp_lon-1 163 jj_para_end(i) = (ij_para_end(i)-1)/nbp_lon + 1 164 ii_para_end(i) = MOD(ij_para_end(i)-1,nbp_lon) + 1 144 165 145 166 … … 161 182 klon_mpi = klon_mpi_para_nb(mpi_rank) 162 183 163 CALL print_module_data164 165 END SUBROUTINE init_phys_lmdz_mpi_data184 CALL Print_module_data 185 186 END SUBROUTINE Init_phys_lmdz_mpi_data 166 187 167 188 SUBROUTINE print_module_data 189 ! USE print_control_mod, ONLY: lunout 168 190 IMPLICIT NONE 169 191 INCLUDE "iniprint.h" … … 193 215 WRITE(lunout,*) 'mpi_rank =', mpi_rank 194 216 WRITE(lunout,*) 'mpi_size =', mpi_size 195 WRITE(lunout,*) 'mpi_ root =', mpi_root217 WRITE(lunout,*) 'mpi_master =', mpi_master 196 218 WRITE(lunout,*) 'is_mpi_root =', is_mpi_root 197 219 WRITE(lunout,*) 'is_north_pole =', is_north_pole -
trunk/LMDZ.COMMON/libf/phy_common/mod_phys_lmdz_mpi_transfert.F90
r1521 r1543 9 9 bcast_mpi_i,bcast_mpi_i1,bcast_mpi_i2,bcast_mpi_i3,bcast_mpi_i4, & 10 10 bcast_mpi_r,bcast_mpi_r1,bcast_mpi_r2,bcast_mpi_r3,bcast_mpi_r4, & 11 11 bcast_mpi_l,bcast_mpi_l1,bcast_mpi_l2,bcast_mpi_l3,bcast_mpi_l4 12 12 END INTERFACE 13 13 … … 15 15 MODULE PROCEDURE scatter_mpi_i,scatter_mpi_i1,scatter_mpi_i2,scatter_mpi_i3, & 16 16 scatter_mpi_r,scatter_mpi_r1,scatter_mpi_r2,scatter_mpi_r3, & 17 17 scatter_mpi_l,scatter_mpi_l1,scatter_mpi_l2,scatter_mpi_l3 18 18 END INTERFACE 19 19 … … 22 22 MODULE PROCEDURE gather_mpi_i,gather_mpi_i1,gather_mpi_i2,gather_mpi_i3, & 23 23 gather_mpi_r,gather_mpi_r1,gather_mpi_r2,gather_mpi_r3, & 24 24 gather_mpi_l,gather_mpi_l1,gather_mpi_l2,gather_mpi_l3 25 25 END INTERFACE 26 26 … … 28 28 MODULE PROCEDURE scatter2D_mpi_i,scatter2D_mpi_i1,scatter2D_mpi_i2,scatter2D_mpi_i3, & 29 29 scatter2D_mpi_r,scatter2D_mpi_r1,scatter2D_mpi_r2,scatter2D_mpi_r3, & 30 30 scatter2D_mpi_l,scatter2D_mpi_l1,scatter2D_mpi_l2,scatter2D_mpi_l3 31 31 END INTERFACE 32 32 … … 34 34 MODULE PROCEDURE gather2D_mpi_i,gather2D_mpi_i1,gather2D_mpi_i2,gather2D_mpi_i3, & 35 35 gather2D_mpi_r,gather2D_mpi_r1,gather2D_mpi_r2,gather2D_mpi_r3, & 36 36 gather2D_mpi_l,gather2D_mpi_l1,gather2D_mpi_l2,gather2D_mpi_l3 37 37 END INTERFACE 38 38 … … 45 45 MODULE PROCEDURE grid1dTo2d_mpi_i,grid1dTo2d_mpi_i1,grid1dTo2d_mpi_i2,grid1dTo2d_mpi_i3, & 46 46 grid1dTo2d_mpi_r,grid1dTo2d_mpi_r1,grid1dTo2d_mpi_r2,grid1dTo2d_mpi_r3, & 47 47 grid1dTo2d_mpi_l,grid1dTo2d_mpi_l1,grid1dTo2d_mpi_l2,grid1dTo2d_mpi_l3 48 48 END INTERFACE 49 49 … … 51 51 MODULE PROCEDURE grid2dTo1d_mpi_i,grid2dTo1d_mpi_i1,grid2dTo1d_mpi_i2,grid2dTo1d_mpi_i3, & 52 52 grid2dTo1d_mpi_r,grid2dTo1d_mpi_r1,grid2dTo1d_mpi_r2,grid2dTo1d_mpi_r3, & 53 53 grid2dTo1d_mpi_l,grid2dTo1d_mpi_l1,grid2dTo1d_mpi_l2,grid2dTo1d_mpi_l3 54 54 END INTERFACE 55 55 … … 1236 1236 1237 1237 SUBROUTINE bcast_mpi_cgen(var,nb) 1238 USE mod_phys_lmdz_mpi_data , mpi_root_x=>mpi_root1238 USE mod_phys_lmdz_mpi_data 1239 1239 IMPLICIT NONE 1240 1240 … … 1250 1250 1251 1251 #ifdef CPP_MPI 1252 CALL MPI_BCAST(Var,nb,MPI_CHARACTER,mpi_ root_x,COMM_LMDZ_PHY,ierr)1252 CALL MPI_BCAST(Var,nb,MPI_CHARACTER,mpi_master,COMM_LMDZ_PHY,ierr) 1253 1253 #endif 1254 1254 … … 1258 1258 1259 1259 SUBROUTINE bcast_mpi_igen(var,nb) 1260 USE mod_phys_lmdz_mpi_data , mpi_root_x=>mpi_root 1261 IMPLICIT NONE 1262 1260 USE mod_phys_lmdz_mpi_data 1261 IMPLICIT NONE 1262 1263 INTEGER,INTENT(IN) :: nb 1263 1264 INTEGER,DIMENSION(nb),INTENT(INOUT) :: Var 1264 INTEGER,INTENT(IN) :: nb1265 1265 1266 1266 #ifdef CPP_MPI … … 1272 1272 1273 1273 #ifdef CPP_MPI 1274 CALL MPI_BCAST(Var,nb,MPI_INTEGER,mpi_ root_x,COMM_LMDZ_PHY,ierr)1274 CALL MPI_BCAST(Var,nb,MPI_INTEGER,mpi_master,COMM_LMDZ_PHY,ierr) 1275 1275 #endif 1276 1276 … … 1281 1281 1282 1282 SUBROUTINE bcast_mpi_rgen(var,nb) 1283 USE mod_phys_lmdz_mpi_data , mpi_root_x=>mpi_root 1284 IMPLICIT NONE 1285 1283 USE mod_phys_lmdz_mpi_data 1284 IMPLICIT NONE 1285 1286 INTEGER,INTENT(IN) :: nb 1286 1287 REAL,DIMENSION(nb),INTENT(INOUT) :: Var 1288 1289 #ifdef CPP_MPI 1290 INCLUDE 'mpif.h' 1291 #endif 1292 INTEGER :: ierr 1293 1294 IF (.not.is_using_mpi) RETURN 1295 1296 #ifdef CPP_MPI 1297 CALL MPI_BCAST(Var,nb,MPI_REAL_LMDZ,mpi_master,COMM_LMDZ_PHY,ierr) 1298 #endif 1299 1300 END SUBROUTINE bcast_mpi_rgen 1301 1302 1303 1304 1305 SUBROUTINE bcast_mpi_lgen(var,nb) 1306 USE mod_phys_lmdz_mpi_data 1307 IMPLICIT NONE 1308 1287 1309 INTEGER,INTENT(IN) :: nb 1310 LOGICAL,DIMENSION(nb),INTENT(INOUT) :: Var 1288 1311 1289 1312 #ifdef CPP_MPI … … 1295 1318 1296 1319 #ifdef CPP_MPI 1297 CALL MPI_BCAST(Var,nb,MPI_REAL_LMDZ,mpi_root_x,COMM_LMDZ_PHY,ierr) 1298 #endif 1299 1300 END SUBROUTINE bcast_mpi_rgen 1301 1302 1303 1304 1305 SUBROUTINE bcast_mpi_lgen(var,nb) 1306 USE mod_phys_lmdz_mpi_data , mpi_root_x=>mpi_root 1307 IMPLICIT NONE 1308 1309 LOGICAL,DIMENSION(nb),INTENT(INOUT) :: Var 1310 INTEGER,INTENT(IN) :: nb 1311 1312 #ifdef CPP_MPI 1313 INCLUDE 'mpif.h' 1314 #endif 1315 INTEGER :: ierr 1316 1317 IF (.not.is_using_mpi) RETURN 1318 1319 #ifdef CPP_MPI 1320 CALL MPI_BCAST(Var,nb,MPI_LOGICAL,mpi_root_x,COMM_LMDZ_PHY,ierr) 1320 CALL MPI_BCAST(Var,nb,MPI_LOGICAL,mpi_master,COMM_LMDZ_PHY,ierr) 1321 1321 #endif 1322 1322 … … 1326 1326 1327 1327 SUBROUTINE scatter_mpi_igen(VarIn, VarOut, dimsize) 1328 USE mod_phys_lmdz_mpi_data , mpi_root_x=>mpi_root1328 USE mod_phys_lmdz_mpi_data 1329 1329 USE mod_grid_phy_lmdz 1330 1330 IMPLICIT NONE … … 1365 1365 #ifdef CPP_MPI 1366 1366 CALL MPI_SCATTERV(VarTmp,counts,displs,MPI_INTEGER,VarOut,klon_mpi*dimsize, & 1367 MPI_INTEGER,mpi_ root_x, COMM_LMDZ_PHY,ierr)1367 MPI_INTEGER,mpi_master, COMM_LMDZ_PHY,ierr) 1368 1368 #endif 1369 1369 … … 1371 1371 1372 1372 SUBROUTINE scatter_mpi_rgen(VarIn, VarOut, dimsize) 1373 USE mod_phys_lmdz_mpi_data , mpi_root_x=>mpi_root1373 USE mod_phys_lmdz_mpi_data 1374 1374 USE mod_grid_phy_lmdz 1375 1375 IMPLICIT NONE … … 1409 1409 #ifdef CPP_MPI 1410 1410 CALL MPI_SCATTERV(VarTmp,counts,displs,MPI_REAL_LMDZ,VarOut,klon_mpi*dimsize, & 1411 MPI_REAL_LMDZ,mpi_ root_x, COMM_LMDZ_PHY,ierr)1411 MPI_REAL_LMDZ,mpi_master, COMM_LMDZ_PHY,ierr) 1412 1412 1413 1413 #endif … … 1417 1417 1418 1418 SUBROUTINE scatter_mpi_lgen(VarIn, VarOut, dimsize) 1419 USE mod_phys_lmdz_mpi_data , mpi_root_x=>mpi_root1419 USE mod_phys_lmdz_mpi_data 1420 1420 USE mod_grid_phy_lmdz 1421 1421 IMPLICIT NONE … … 1455 1455 #ifdef CPP_MPI 1456 1456 CALL MPI_SCATTERV(VarTmp,counts,displs,MPI_LOGICAL,VarOut,klon_mpi*dimsize, & 1457 MPI_LOGICAL,mpi_ root_x, COMM_LMDZ_PHY,ierr)1457 MPI_LOGICAL,mpi_master, COMM_LMDZ_PHY,ierr) 1458 1458 #endif 1459 1459 … … 1464 1464 1465 1465 SUBROUTINE gather_mpi_igen(VarIn, VarOut, dimsize) 1466 USE mod_phys_lmdz_mpi_data , mpi_root_x=>mpi_root1466 USE mod_phys_lmdz_mpi_data 1467 1467 USE mod_grid_phy_lmdz 1468 1468 IMPLICIT NONE … … 1493 1493 displs(rank)=Index-1 1494 1494 counts(rank)=nb*dimsize 1495 1495 Index=Index+nb*dimsize 1496 1496 ENDDO 1497 1497 … … 1500 1500 #ifdef CPP_MPI 1501 1501 CALL MPI_GATHERV(VarIn,klon_mpi*dimsize,MPI_INTEGER,VarTmp,counts,displs, & 1502 MPI_INTEGER,mpi_ root_x, COMM_LMDZ_PHY,ierr)1503 #endif 1504 1505 1502 MPI_INTEGER,mpi_master, COMM_LMDZ_PHY,ierr) 1503 #endif 1504 1505 1506 1506 IF (is_mpi_root) THEN 1507 1507 Index=1 … … 1510 1510 DO i=1,dimsize 1511 1511 VarOut(klon_mpi_para_begin(rank):klon_mpi_para_end(rank),i)=VarTmp(Index:Index+nb-1) 1512 1512 Index=Index+nb 1513 1513 ENDDO 1514 1514 ENDDO … … 1518 1518 1519 1519 SUBROUTINE gather_mpi_rgen(VarIn, VarOut, dimsize) 1520 USE mod_phys_lmdz_mpi_data , mpi_root_x=>mpi_root1520 USE mod_phys_lmdz_mpi_data 1521 1521 USE mod_grid_phy_lmdz 1522 1522 IMPLICIT NONE … … 1542 1542 displs(rank)=Index-1 1543 1543 counts(rank)=nb*dimsize 1544 1544 Index=Index+nb*dimsize 1545 1545 ENDDO 1546 1546 ENDIF … … 1553 1553 #ifdef CPP_MPI 1554 1554 CALL MPI_GATHERV(VarIn,klon_mpi*dimsize,MPI_REAL_LMDZ,VarTmp,counts,displs, & 1555 MPI_REAL_LMDZ,mpi_ root_x, COMM_LMDZ_PHY,ierr)1556 #endif 1557 1555 MPI_REAL_LMDZ,mpi_master, COMM_LMDZ_PHY,ierr) 1556 #endif 1557 1558 1558 IF (is_mpi_root) THEN 1559 1559 Index=1 … … 1562 1562 DO i=1,dimsize 1563 1563 VarOut(klon_mpi_para_begin(rank):klon_mpi_para_end(rank),i)=VarTmp(Index:Index+nb-1) 1564 1564 Index=Index+nb 1565 1565 ENDDO 1566 1566 ENDDO … … 1570 1570 1571 1571 SUBROUTINE gather_mpi_lgen(VarIn, VarOut, dimsize) 1572 USE mod_phys_lmdz_mpi_data , mpi_root_x=>mpi_root1572 USE mod_phys_lmdz_mpi_data 1573 1573 USE mod_grid_phy_lmdz 1574 1574 IMPLICIT NONE … … 1599 1599 displs(rank)=Index-1 1600 1600 counts(rank)=nb*dimsize 1601 1601 Index=Index+nb*dimsize 1602 1602 ENDDO 1603 1603 ENDIF … … 1606 1606 #ifdef CPP_MPI 1607 1607 CALL MPI_GATHERV(VarIn,klon_mpi*dimsize,MPI_LOGICAL,VarTmp,counts,displs, & 1608 MPI_LOGICAL,mpi_ root_x, COMM_LMDZ_PHY,ierr)1609 #endif 1610 1608 MPI_LOGICAL,mpi_master, COMM_LMDZ_PHY,ierr) 1609 #endif 1610 1611 1611 IF (is_mpi_root) THEN 1612 1612 Index=1 … … 1615 1615 DO i=1,dimsize 1616 1616 VarOut(klon_mpi_para_begin(rank):klon_mpi_para_end(rank),i)=VarTmp(Index:Index+nb-1) 1617 1617 Index=Index+nb 1618 1618 ENDDO 1619 1619 ENDDO … … 1625 1625 1626 1626 SUBROUTINE reduce_sum_mpi_igen(VarIn,VarOut,nb) 1627 USE mod_phys_lmdz_mpi_data , mpi_root_x=>mpi_root1627 USE mod_phys_lmdz_mpi_data 1628 1628 USE mod_grid_phy_lmdz 1629 1629 IMPLICIT NONE … … 1633 1633 #endif 1634 1634 1635 INTEGER,INTENT(IN) :: nb 1635 1636 INTEGER,DIMENSION(nb),INTENT(IN) :: VarIn 1636 1637 INTEGER,DIMENSION(nb),INTENT(OUT) :: VarOut 1637 INTEGER,INTENT(IN) :: nb1638 1638 INTEGER :: ierr 1639 1639 … … 1645 1645 1646 1646 #ifdef CPP_MPI 1647 CALL MPI_REDUCE(VarIn,VarOut,nb,MPI_INTEGER,MPI_SUM,mpi_ root_x,COMM_LMDZ_PHY,ierr)1647 CALL MPI_REDUCE(VarIn,VarOut,nb,MPI_INTEGER,MPI_SUM,mpi_master,COMM_LMDZ_PHY,ierr) 1648 1648 #endif 1649 1649 … … 1651 1651 1652 1652 SUBROUTINE reduce_sum_mpi_rgen(VarIn,VarOut,nb) 1653 USE mod_phys_lmdz_mpi_data , mpi_root_x=>mpi_root1653 USE mod_phys_lmdz_mpi_data 1654 1654 USE mod_grid_phy_lmdz 1655 1655 … … 1660 1660 #endif 1661 1661 1662 INTEGER,INTENT(IN) :: nb 1662 1663 REAL,DIMENSION(nb),INTENT(IN) :: VarIn 1663 1664 REAL,DIMENSION(nb),INTENT(OUT) :: VarOut 1664 INTEGER,INTENT(IN) :: nb1665 1665 INTEGER :: ierr 1666 1666 … … 1671 1671 1672 1672 #ifdef CPP_MPI 1673 CALL MPI_REDUCE(VarIn,VarOut,nb,MPI_REAL_LMDZ,MPI_SUM,mpi_ root_x,COMM_LMDZ_PHY,ierr)1673 CALL MPI_REDUCE(VarIn,VarOut,nb,MPI_REAL_LMDZ,MPI_SUM,mpi_master,COMM_LMDZ_PHY,ierr) 1674 1674 #endif 1675 1675 … … 1707 1707 DO ij=1,nbp_lon 1708 1708 VarOut(ij,i)=VarIn(1,i) 1709 1709 ENDDO 1710 1710 ENDDO 1711 1711 ENDIF … … 1715 1715 DO ij=nbp_lon*(jj_nb-1)+1,nbp_lon*jj_nb 1716 1716 VarOut(ij,i)=VarIn(klon_mpi,i) 1717 1717 ENDDO 1718 1718 ENDDO 1719 1719 ENDIF … … 1751 1751 DO ij=1,nbp_lon 1752 1752 VarOut(ij,i)=VarIn(1,i) 1753 1753 ENDDO 1754 1754 ENDDO 1755 1755 ENDIF … … 1759 1759 DO ij=nbp_lon*(jj_nb-1)+1,nbp_lon*jj_nb 1760 1760 VarOut(ij,i)=VarIn(klon_mpi,i) 1761 1761 ENDDO 1762 1762 ENDDO 1763 1763 ENDIF … … 1796 1796 DO ij=1,nbp_lon 1797 1797 VarOut(ij,i)=VarIn(1,i) 1798 1798 ENDDO 1799 1799 ENDDO 1800 1800 ENDIF … … 1804 1804 DO ij=nbp_lon*(jj_nb-1)+1,nbp_lon*jj_nb 1805 1805 VarOut(ij,i)=VarIn(klon_mpi,i) 1806 1806 ENDDO 1807 1807 ENDDO 1808 1808 ENDIF … … 1901 1901 1902 1902 END MODULE mod_phys_lmdz_mpi_transfert 1903 -
trunk/LMDZ.COMMON/libf/phy_common/mod_phys_lmdz_omp_data.F90
r1521 r1543 1 1 ! 2 !$Id: mod_phys_lmdz_omp_data.F90 1575 2011-09-21 13:57:48Z jghattas$2 !$Id: mod_phys_lmdz_omp_data.F90 2326 2015-07-10 12:24:29Z emillour $ 3 3 ! 4 4 MODULE mod_phys_lmdz_omp_data … … 20 20 CONTAINS 21 21 22 SUBROUTINE init_phys_lmdz_omp_data(klon_mpi)22 SUBROUTINE Init_phys_lmdz_omp_data(klon_mpi) 23 23 USE dimphy 24 24 IMPLICIT NONE … … 85 85 klon_omp_end=klon_omp_para_end(omp_rank) 86 86 87 CALL print_module_data87 CALL Print_module_data 88 88 89 END SUBROUTINE init_phys_lmdz_omp_data89 END SUBROUTINE Init_phys_lmdz_omp_data 90 90 91 SUBROUTINE print_module_data91 SUBROUTINE Print_module_data 92 92 IMPLICIT NONE 93 93 INCLUDE "iniprint.h" … … 106 106 !$OMP END CRITICAL 107 107 108 END SUBROUTINE print_module_data108 END SUBROUTINE Print_module_data 109 109 END MODULE mod_phys_lmdz_omp_data -
trunk/LMDZ.COMMON/libf/phy_common/mod_phys_lmdz_omp_transfert.F90
r1521 r1543 25 25 bcast_omp_i,bcast_omp_i1,bcast_omp_i2,bcast_omp_i3,bcast_omp_i4, & 26 26 bcast_omp_r,bcast_omp_r1,bcast_omp_r2,bcast_omp_r3,bcast_omp_r4, & 27 27 bcast_omp_l,bcast_omp_l1,bcast_omp_l2,bcast_omp_l3,bcast_omp_l4 28 28 END INTERFACE 29 29 … … 31 31 MODULE PROCEDURE scatter_omp_i,scatter_omp_i1,scatter_omp_i2,scatter_omp_i3, & 32 32 scatter_omp_r,scatter_omp_r1,scatter_omp_r2,scatter_omp_r3, & 33 33 scatter_omp_l,scatter_omp_l1,scatter_omp_l2,scatter_omp_l3 34 34 END INTERFACE 35 35 … … 38 38 MODULE PROCEDURE gather_omp_i,gather_omp_i1,gather_omp_i2,gather_omp_i3, & 39 39 gather_omp_r,gather_omp_r1,gather_omp_r2,gather_omp_r3, & 40 40 gather_omp_l,gather_omp_l1,gather_omp_l2,gather_omp_l3 41 41 END INTERFACE 42 42 … … 48 48 49 49 50 PUBLIC bcast_omp,scatter_omp,gather_omp,reduce_sum_omp 50 PUBLIC bcast_omp,scatter_omp,gather_omp,reduce_sum_omp, omp_barrier 51 51 52 52 CONTAINS 53 53 54 SUBROUTINE omp_barrier 55 IMPLICIT NONE 56 57 !$OMP BARRIER 58 59 END SUBROUTINE omp_barrier 60 54 61 SUBROUTINE check_buffer_i(buff_size) 55 62 IMPLICIT NONE … … 733 740 IMPLICIT NONE 734 741 742 INTEGER,INTENT(IN) :: Nb 735 743 INTEGER,DIMENSION(Nb),INTENT(INOUT) :: Var 736 744 INTEGER,DIMENSION(Nb),INTENT(INOUT) :: Buff 737 INTEGER,INTENT(IN) :: Nb738 745 739 746 INTEGER :: i … … 757 764 IMPLICIT NONE 758 765 766 INTEGER,INTENT(IN) :: Nb 759 767 REAL,DIMENSION(Nb),INTENT(INOUT) :: Var 760 768 REAL,DIMENSION(Nb),INTENT(INOUT) :: Buff 761 INTEGER,INTENT(IN) :: Nb762 769 763 770 INTEGER :: i … … 780 787 IMPLICIT NONE 781 788 789 INTEGER,INTENT(IN) :: Nb 782 790 LOGICAL,DIMENSION(Nb),INTENT(INOUT) :: Var 783 791 LOGICAL,DIMENSION(Nb),INTENT(INOUT) :: Buff 784 INTEGER,INTENT(IN) :: Nb785 792 786 793 INTEGER :: i -
trunk/LMDZ.COMMON/libf/phy_common/mod_phys_lmdz_para.F90
r1521 r1543 16 16 CONTAINS 17 17 18 SUBROUTINE init_phys_lmdz_para(iim,jjp1,nb_proc,distrib)18 SUBROUTINE Init_phys_lmdz_para(nbp,nbp_lon,nbp_lat,communicator) 19 19 IMPLICIT NONE 20 INTEGER,INTENT(in) :: iim21 INTEGER,INTENT(in) :: jjp122 INTEGER,INTENT(in) :: nb _proc23 INTEGER,INTENT(in) :: distrib(0:nb_proc-1)20 INTEGER,INTENT(in) :: nbp 21 INTEGER,INTENT(in) :: nbp_lon 22 INTEGER,INTENT(in) :: nbp_lat 23 INTEGER,INTENT(in) :: communicator 24 24 25 CALL init_phys_lmdz_mpi_data(iim,jjp1,nb_proc,distrib)25 CALL Init_phys_lmdz_mpi_data(nbp,nbp_lon,nbp_lat,communicator) 26 26 !$OMP PARALLEL 27 CALL init_phys_lmdz_omp_data(klon_mpi)27 CALL Init_phys_lmdz_omp_data(klon_mpi) 28 28 klon_loc=klon_omp 29 29 IF (is_mpi_root .AND. is_omp_root) THEN … … 32 32 is_master=.FALSE. 33 33 ENDIF 34 CALL test_transfert34 CALL Test_transfert 35 35 !$OMP END PARALLEL 36 36 IF (is_using_mpi .OR. is_using_omp) THEN … … 42 42 ENDIF 43 43 44 END SUBROUTINE init_phys_lmdz_para44 END SUBROUTINE Init_phys_lmdz_para 45 45 46 SUBROUTINE test_transfert46 SUBROUTINE Test_transfert 47 47 USE mod_grid_phy_lmdz 48 48 IMPLICIT NONE … … 108 108 109 109 110 END SUBROUTINE test_transfert110 END SUBROUTINE Test_transfert 111 111 112 112 END MODULE mod_phys_lmdz_para -
trunk/LMDZ.COMMON/libf/phy_common/write_field_phy.F90
r1523 r1543 1 1 ! 2 ! $ Header$2 ! $Id: write_field_phy.F90 2342 2015-08-19 13:21:38Z emillour $ 3 3 ! 4 4 MODULE write_field_phy 5 5 6 ! Dump a field on the global (nbp_lon by nbp_lat) physics grid 7 6 8 CONTAINS 7 9 8 10 SUBROUTINE WriteField_phy(name,Field,ll) 9 USE dimphy 10 USE mod_phys_lmdz_para 11 USE mod_grid_phy_lmdz 12 USE Write_Field 11 USE mod_phys_lmdz_para, ONLY: klon_omp, is_mpi_root, & 12 Gather 13 USE mod_grid_phy_lmdz, ONLY: nbp_lon, nbp_lat, klon_glo, & 14 Grid1Dto2D_glo 15 USE Write_Field, ONLY: WriteField 13 16 14 17 IMPLICIT NONE 15 include 'dimensions.h'16 include 'paramet.h'17 18 18 character(len=*):: name19 INTEGER :: ll20 real, dimension(klon_omp,ll) :: Field21 real,save,allocatable :: Field_tmp(:,:) 19 CHARACTER(len=*),INTENT(IN) :: name 20 INTEGER,INTENT(IN) :: ll 21 REAL,INTENT(IN) :: Field(klon_omp,ll) 22 22 23 real, dimension(klon_glo,ll):: New_Field 23 real, dimension( iim,jjp1,ll):: Field_2d24 real, dimension(nbp_lon,nbp_lat,ll):: Field_2d 24 25 25 26 CALL Gather(Field,New_Field) -
trunk/LMDZ.GENERIC/README
r1542 r1543 1203 1203 comgeomphy.F90 instead 1204 1204 1205 1205 == 22/04/2016 == 1206 - Updates and cleanup wrt dynamics/physics separation: 1207 Removed init_phys_lmdz.F90 and comgeomphy.F90 from phystd; 1208 comgeomphy is replaced by geometry_mod (located in phy_common). 1209 Added physics_distribution_mod.F90 in phy_common and 1210 mod_interface_dyn_phys.F90 in dynphy_lonlat. 1211 Added nrtype.F90 (contains math const. like PI, etc.) in "misc" 1212 -
trunk/LMDZ.GENERIC/libf/dyn3d/gcm.F
r1523 r1543 6 6 & iconser, ecritphy, idissip 7 7 ! use comgeomphy, only: initcomgeomphy 8 USE mod_const_mpi, ONLY: COMM_LMDZ 8 9 use filtreg_mod, only: inifilr 9 10 USE comvert_mod, ONLY: ap,bp … … 155 156 REAL dtetaecdt(ip1jmp1,llm) 156 157 REAL vcont(ip1jm,llm),ucont(ip1jmp1,llm) 157 REAL vnat(ip1jm,llm),unat(ip1jmp1,llm)158 158 159 159 c----------------------------------------------------------------------- … … 177 177 c----------------------------------------------------------------------- 178 178 CALL defrun_new( 99, .TRUE. ) 179 180 !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!181 ! FH 2008/05/02182 ! A nettoyer. On ne veut qu'une ou deux routines d'interface183 ! dynamique -> physique pour l'initialisation184 !#ifdef CPP_PHYS185 CALL init_phys_lmdz(iim,jjp1,llm,1,(/(jjm-1)*iim+2/))186 ! call initcomgeomphy ! now done in iniphysiq187 !#endif188 !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!189 179 190 180 ! Initialize tracers … … 260 250 !#ifdef CPP_PHYS 261 251 ! CALL iniphysiq(iim,jjm,llm,daysec,day_ini,dtphys/nsplit_phys, 262 CALL iniphysiq(iim,jjm,llm,daysec,day_ini,dtphys, 252 CALL iniphysiq(iim,jjm,llm, 253 & (jjm-1)*iim+2,comm_lmdz, 254 & daysec,day_ini,dtphys, 263 255 & rlatu,rlatv,rlonu,rlonv, 264 256 & aire,cu,cv,rad,g,r,cpp, -
trunk/LMDZ.GENERIC/libf/dynphy_lonlat/phystd/iniphysiq_mod.F90
r1542 r1543 3 3 CONTAINS 4 4 5 subroutine iniphysiq(ii,jj,nlayer,punjours, pdayref,ptimestep, & 6 rlatudyn,rlatvdyn,rlonudyn,rlonvdyn,airedyn,cudyn,cvdyn, & 5 subroutine iniphysiq(ii,jj,nlayer, & 6 nbp, communicator, & 7 punjours, pdayref,ptimestep, & 8 rlatudyn,rlatvdyn,rlonudyn,rlonvdyn, & 9 airedyn,cudyn,cvdyn, & 7 10 prad,pg,pr,pcpp,iflag_phys) 8 11 9 use dimphy, only : klev ! number of atmospheric levels10 use mod_grid_phy_lmdz, only : klon_glo ! number of atmospheric columns11 ! (on full grid)12 use dimphy, only : init_dimphy 13 use mod_grid_phy_lmdz, only : klon_glo, & ! number of atmospheric columns (on full grid) 14 regular_lonlat ! regular longitude-latitude grid type 12 15 use mod_phys_lmdz_para, only : klon_omp, & ! number of columns (on local omp grid) 13 16 klon_omp_begin, & ! start index of local omp subgrid … … 15 18 klon_mpi_begin ! start indes of columns (on local mpi grid) 16 19 use control_mod, only: nday 17 use comgeomphy, only : initcomgeomphy, & 18 cell_area, & ! physics grid area (m2) 19 dx, & ! cu coeff. (u_covariant = cu * u) 20 dy, & ! cv coeff. (v_covariant = cv * v) 21 longitude, & ! longitudes (rad) 22 latitude ! latitudes (rad) 20 use geometry_mod, only: init_geometry, & 21 cell_area, & ! physics grid area (m2) 22 longitude, & ! longitudes (rad) 23 latitude ! latitudes (rad) 24 !use comgeomphy, only : initcomgeomphy, & 25 ! cell_area, & ! physics grid area (m2) 26 ! dx, & ! cu coeff. (u_covariant = cu * u) 27 ! dy, & ! cv coeff. (v_covariant = cv * v) 28 ! longitude, & ! longitudes (rad) 29 ! latitude ! latitudes (rad) 23 30 use surf_heat_transp_mod, only: ini_surf_heat_transp 24 31 use infotrac, only : nqtot ! number of advected tracers … … 26 33 USE comvert_mod, ONLY: ap,bp,preff 27 34 use inifis_mod, only: inifis 35 use physics_distribution_mod, only: init_physics_distribution 28 36 use regular_lonlat_mod, only: init_regular_lonlat, & 29 37 east, west, north, south, & 30 38 north_east, north_west, & 31 39 south_west, south_east 40 use mod_interface_dyn_phys, only: init_interface_dyn_phys 32 41 use ioipsl_getin_p_mod, only: getin_p 33 42 … … 47 56 integer,intent(in) :: ii ! number of atmospheric coulumns along longitudes 48 57 integer,intent(in) :: jj ! number of atompsheric columns along latitudes 58 integer,intent(in) :: nbp ! number of physics columns for this MPI process 59 integer,intent(in) :: communicator ! MPI communicator 49 60 real,intent(in) :: rlatudyn(jj+1) ! latitudes of the physics grid 50 61 real,intent(in) :: rlatvdyn(jj) ! latitude boundaries of the physics grid … … 59 70 60 71 integer :: ibegin,iend,offset 61 integer :: i,j 72 integer :: i,j,k 62 73 character(len=20) :: modname='iniphysiq' 63 74 character(len=80) :: abort_message … … 71 82 72 83 ! global array, on full physics grid: 73 real,allocatable :: latfi(:) 74 real,allocatable :: lonfi(:) 75 real,allocatable :: cufi(:) 76 real,allocatable :: cvfi(:) 77 real,allocatable :: airefi(:) 84 real,allocatable :: latfi_glo(:) 85 real,allocatable :: lonfi_glo(:) 86 real,allocatable :: cufi_glo(:) 87 real,allocatable :: cvfi_glo(:) 88 real,allocatable :: airefi_glo(:) 89 real,allocatable :: boundslonfi_glo(:,:) 90 real,allocatable :: boundslatfi_glo(:,:) 91 92 ! local arrays, on given MPI/OpenMP domain: 93 real,allocatable,save :: latfi(:) 94 real,allocatable,save :: lonfi(:) 95 real,allocatable,save :: cufi(:) 96 real,allocatable,save :: cvfi(:) 97 real,allocatable,save :: airefi(:) 98 real,allocatable,save :: boundslonfi(:,:) 99 real,allocatable,save :: boundslatfi(:,:) 100 !$OMP THREADPRIVATE (latfi,lonfi,cufi,cvfi,airefi,boundslonfi,boundslatfi) 78 101 79 102 pi=2.*asin(1.0) 80 103 81 IF (nlayer.NE.klev) THEN 82 write(*,*) 'STOP in ',trim(modname) 83 write(*,*) 'Problem with dimensions :' 84 write(*,*) 'nlayer = ',nlayer 85 write(*,*) 'klev = ',klev 86 abort_message = '' 87 CALL abort_gcm (modname,abort_message,1) 88 ENDIF 89 90 !IF (ngrid.NE.klon_glo) THEN 91 ! write(*,*) 'STOP in ',trim(modname) 92 ! write(*,*) 'Problem with dimensions :' 93 ! write(*,*) 'ngrid = ',ngrid 94 ! write(*,*) 'klon = ',klon_glo 95 ! abort_message = '' 96 ! CALL abort_gcm (modname,abort_message,1) 97 !ENDIF 98 99 !call init_phys_lmdz(iim,jjm+1,llm,1,(/(jjm-1)*iim+2/)) 104 ! Initialize Physics distibution and parameters and interface with dynamics 105 CALL init_physics_distribution(regular_lonlat,4, & 106 nbp,ii,jj+1,nlayer,communicator) 107 CALL init_interface_dyn_phys 100 108 101 109 ! init regular global longitude-latitude grid points and boundaries … … 122 130 123 131 ! Generate global arrays on full physics grid 124 allocate(latfi(klon_glo),lonfi(klon_glo),cufi(klon_glo),cvfi(klon_glo)) 125 latfi(1)=rlatudyn(1) 126 lonfi(1)=0. 127 cufi(1) = cudyn(1) 128 cvfi(1) = cvdyn(1) 132 allocate(latfi_glo(klon_glo),lonfi_glo(klon_glo)) 133 allocate(cufi_glo(klon_glo),cvfi_glo(klon_glo)) 134 allocate(airefi_glo(klon_glo)) 135 allocate(boundslonfi_glo(klon_glo,4)) 136 allocate(boundslatfi_glo(klon_glo,4)) 137 138 ! North pole 139 latfi_glo(1)=rlatudyn(1) 140 lonfi_glo(1)=0. 141 cufi_glo(1) = cudyn(1) 142 cvfi_glo(1) = cvdyn(1) 143 boundslonfi_glo(1,north_east)=0 144 boundslatfi_glo(1,north_east)=PI/2 145 boundslonfi_glo(1,north_west)=2*PI 146 boundslatfi_glo(1,north_west)=PI/2 147 boundslonfi_glo(1,south_west)=2*PI 148 boundslatfi_glo(1,south_west)=rlatvdyn(1) 149 boundslonfi_glo(1,south_east)=0 150 boundslatfi_glo(1,south_east)=rlatvdyn(1) 129 151 DO j=2,jj 130 152 DO i=1,ii 131 latfi((j-2)*ii+1+i)= rlatudyn(j) 132 lonfi((j-2)*ii+1+i)= rlonvdyn(i) 133 cufi((j-2)*ii+1+i) = cudyn((j-1)*(ii+1)+i) 134 cvfi((j-2)*ii+1+i) = cvdyn((j-1)*(ii+1)+i) 153 k=(j-2)*ii+1+i 154 latfi_glo((j-2)*ii+1+i)= rlatudyn(j) 155 lonfi_glo((j-2)*ii+1+i)= rlonvdyn(i) 156 cufi_glo((j-2)*ii+1+i) = cudyn((j-1)*(ii+1)+i) 157 cvfi_glo((j-2)*ii+1+i) = cvdyn((j-1)*(ii+1)+i) 158 boundslonfi_glo(k,north_east)=rlonudyn(i) 159 boundslatfi_glo(k,north_east)=rlatvdyn(j-1) 160 boundslonfi_glo(k,north_west)=rlonudyn(i+1) 161 boundslatfi_glo(k,north_west)=rlatvdyn(j-1) 162 boundslonfi_glo(k,south_west)=rlonudyn(i+1) 163 boundslatfi_glo(k,south_west)=rlatvdyn(j) 164 boundslonfi_glo(k,south_east)=rlonudyn(i) 165 boundslatfi_glo(k,south_east)=rlatvdyn(j) 135 166 ENDDO 136 167 ENDDO 137 latfi(klon_glo)= rlatudyn(jj+1) 138 lonfi(klon_glo)= 0. 139 cufi(klon_glo) = cudyn((ii+1)*jj+1) 140 cvfi(klon_glo) = cvdyn((ii+1)*jj-ii) 168 ! South pole 169 latfi_glo(klon_glo)= rlatudyn(jj+1) 170 lonfi_glo(klon_glo)= 0. 171 cufi_glo(klon_glo) = cudyn((ii+1)*jj+1) 172 cvfi_glo(klon_glo) = cvdyn((ii+1)*jj-ii) 173 boundslonfi_glo(klon_glo,north_east)= 0 174 boundslatfi_glo(klon_glo,north_east)= rlatvdyn(jj) 175 boundslonfi_glo(klon_glo,north_west)= 2*PI 176 boundslatfi_glo(klon_glo,north_west)= rlatvdyn(jj) 177 boundslonfi_glo(klon_glo,south_west)= 2*PI 178 boundslatfi_glo(klon_glo,south_west)= -PI/2 179 boundslonfi_glo(klon_glo,south_east)= 0 180 boundslatfi_glo(klon_glo,south_east)= -Pi/2 141 181 142 182 ! build airefi(), mesh area on physics grid 143 allocate(airefi(klon_glo)) 144 CALL gr_dyn_fi(1,ii+1,jj+1,klon_glo,airedyn,airefi) 183 CALL gr_dyn_fi(1,ii+1,jj+1,klon_glo,airedyn,airefi_glo) 145 184 ! Poles are single points on physics grid 146 airefi (1)=sum(airedyn(1:ii,1))147 airefi (klon_glo)=sum(airedyn(1:ii,jj+1))185 airefi_glo(1)=sum(airedyn(1:ii,1)) 186 airefi_glo(klon_glo)=sum(airedyn(1:ii,jj+1)) 148 187 149 188 ! Sanity check: do total planet area match between physics and dynamics? 150 189 total_area_dyn=sum(airedyn(1:ii,1:jj+1)) 151 total_area_phy=sum(airefi (1:klon_glo))190 total_area_phy=sum(airefi_glo(1:klon_glo)) 152 191 IF (total_area_dyn/=total_area_phy) THEN 153 192 WRITE (lunout, *) 'iniphysiq: planet total surface discrepancy !!!' … … 164 203 !$OMP PARALLEL 165 204 ! Now generate local lon/lat/cu/cv/area arrays 166 call initcomgeomphy 167 168 !!!!$OMP PARALLEL PRIVATE(ibegin,iend) & 169 !!! !$OMP SHARED(airefi,cufi,cvfi,lonfi,latfi) 205 allocate(latfi(klon_omp),lonfi(klon_omp),cufi(klon_omp),cvfi(klon_omp)) 206 allocate(airefi(klon_omp)) 207 allocate(boundslonfi(klon_omp,4)) 208 allocate(boundslatfi(klon_omp,4)) 209 !call initcomgeomphy 170 210 171 211 offset=klon_mpi_begin-1 172 cell_area(1:klon_omp)=airefi(offset+klon_omp_begin:offset+klon_omp_end) 173 dx(1:klon_omp)=cufi(offset+klon_omp_begin:offset+klon_omp_end) 174 dy(1:klon_omp)=cvfi(offset+klon_omp_begin:offset+klon_omp_end) 175 longitude(1:klon_omp)=lonfi(offset+klon_omp_begin:offset+klon_omp_end) 176 latitude(1:klon_omp)=latfi(offset+klon_omp_begin:offset+klon_omp_end) 212 airefi(1:klon_omp)=airefi_glo(offset+klon_omp_begin:offset+klon_omp_end) 213 cufi(1:klon_omp)=cufi_glo(offset+klon_omp_begin:offset+klon_omp_end) 214 cvfi(1:klon_omp)=cvfi_glo(offset+klon_omp_begin:offset+klon_omp_end) 215 lonfi(1:klon_omp)=lonfi_glo(offset+klon_omp_begin:offset+klon_omp_end) 216 latfi(1:klon_omp)=latfi_glo(offset+klon_omp_begin:offset+klon_omp_end) 217 boundslonfi(1:klon_omp,:)=boundslonfi_glo(offset+klon_omp_begin:offset+klon_omp_end,:) 218 boundslatfi(1:klon_omp,:)=boundslatfi_glo(offset+klon_omp_begin:offset+klon_omp_end,:) 219 220 ! copy over local grid longitudes and latitudes 221 CALL init_geometry(klon_omp,lonfi,latfi,boundslonfi,boundslatfi, & 222 airefi,cufi,cvfi) 223 224 call init_dimphy(klon_omp,nlayer) ! Initialize dimphy module 177 225 178 226 ! copy over preff , ap() and bp() -
trunk/LMDZ.GENERIC/libf/dynphy_lonlat/phystd/newstart.F
r1524 r1543 15 15 c======================================================================= 16 16 17 use mod_phys_lmdz_para, only: is_parallel, is_sequential, 18 & is_mpi_root, is_omp_root, 19 & is_master 17 20 use infotrac, only: infotrac_init, nqtot, tname 18 21 USE tracer_h, ONLY: igcm_co2_ice, igcm_h2o_vap, igcm_h2o_ice … … 20 23 USE surfdat_h, ONLY: phisfi, albedodat, 21 24 & zmea, zstd, zsig, zgam, zthe 22 USE comgeomfi_h, ONLY: lati, long, area23 25 use datafile_mod, only: datadir, surfdir 24 26 use ioipsl_getin_p_mod, only: getin_p … … 26 28 use phyredem, only: physdem0, physdem1 27 29 use iostart, only: open_startphy 28 use comgeomphy, only: initcomgeomphy29 30 use slab_ice_h, only:noceanmx 30 31 use filtreg_mod, only: inifilr 32 USE mod_const_mpi, ONLY: COMM_LMDZ 31 33 USE comvert_mod, ONLY: ap,bp,aps,bps,pa,preff 32 34 USE comconst_mod, ONLY: lllm,daysec,dtvr,dtphys,cpp,kappa, 33 . 35 . rad,omeg,g,r,pi 34 36 USE serre_mod, ONLY: alphax 35 37 USE temps_mod, ONLY: day_ini 36 38 USE ener_mod, ONLY: etot0,ptot0,ztot0,stot0,ang0 37 use ini fis_mod, only: inifis39 use iniphysiq_mod, only: iniphysiq 38 40 implicit none 39 41 40 #include "dimensions.h" 41 !#include "dimphys.h" 42 include "dimensions.h" 42 43 integer, parameter :: ngridmx = (2+(jjm-1)*iim - 1/jjm) 43 !#include "planete.h" 44 #include "paramet.h" 45 #include "comgeom2.h" 46 !#include "control.h" 47 #include "comdissnew.h" 48 #include "netcdf.inc" 49 !#include "advtrac.h" 44 include "paramet.h" 45 include "comgeom2.h" 46 include "comdissnew.h" 47 include "netcdf.inc" 48 50 49 c======================================================================= 51 50 c Declarations … … 54 53 c Variables dimension du fichier "start_archive" 55 54 c------------------------------------ 56 CHARACTER 55 CHARACTER relief*3 57 56 58 57 … … 94 93 c variable physique 95 94 c------------------ 96 REAL tsurf(ngridmx) 97 REAL tsoil(ngridmx,nsoilmx) ! soil temperature98 ! REAL co2ice(ngridmx) 99 REAL emis(ngridmx) 95 REAL tsurf(ngridmx) ! surface temperature 96 REAL,ALLOCATABLE :: tsoil(:,:) ! soil temperature 97 ! REAL co2ice(ngridmx) ! CO2 ice layer 98 REAL emis(ngridmx) ! surface emissivity 100 99 real emisread ! added by RW 101 100 REAL,ALLOCATABLE :: qsurf(:,:) … … 103 102 ! REAL rnaturfi(ngridmx) 104 103 real alb(iip1,jjp1),albfi(ngridmx) ! albedos 105 real ith(iip1,jjp1,nsoilmx),ithfi(ngridmx,nsoilmx) ! thermal inertia (3D)104 real,ALLOCATABLE :: ith(:,:,:),ithfi(:,:) ! thermal inertia (3D) 106 105 real surfith(iip1,jjp1),surfithfi(ngridmx) ! surface thermal inertia (2D) 107 106 REAL latfi(ngridmx),lonfi(ngridmx),airefi(ngridmx) … … 113 112 114 113 REAL rnat(ngridmx) 115 REAL tslab(ngridmx,nsoilmx) ! slab ocean temperature114 REAL,ALLOCATABLE :: tslab(:,:) ! slab ocean temperature 116 115 REAL pctsrf_sic(ngridmx) ! sea ice cover 117 116 REAL tsea_ice(ngridmx) ! temperature sea_ice … … 145 144 character*80 fichnom 146 145 character*250 filestring 147 integer Lmodif,iq ,thermo146 integer Lmodif,iq 148 147 character modif*20 149 148 real z_reel(iip1,jjp1) … … 155 154 real val, val2, val3, val4 ! to store temporary variables 156 155 real :: iceith=2000 ! thermal inertia of subterranean ice 157 integer iref,jref158 156 159 157 INTEGER :: itau 160 158 161 INTEGER :: nq,numvanle162 159 character(len=20) :: txt ! to store some text 163 160 character(len=50) :: surfacefile ! "surface.nc" (or equivalent file) … … 166 163 real :: profile(llm+1) ! to store an atmospheric profile + surface value 167 164 168 ! added by RW for test169 real pmean, phi0170 171 165 ! added by BC for equilibrium temperature startup 172 166 real teque … … 193 187 ! make deliberate choice of these values elsewhere. 194 188 189 planet_type="generic" 190 195 191 ! initialize "serial/parallel" related stuff 196 CALL init_phys_lmdz(iim,jjp1,llm,1,(/(jjm-1)*iim+2/)) 197 call initcomgeomphy 192 ! (required because we call tabfi() below, before calling iniphysiq) 193 is_sequential=.true. 194 is_parallel=.false. 195 is_mpi_root=.true. 196 is_omp_root=.true. 197 is_master=.true. 198 198 199 ! Load tracer number and names: 199 ! call iniadvtrac(nqtot,numvanle)200 200 call infotrac_init 201 201 ! allocate arrays … … 203 203 allocate(qsurf(ngridmx,nqtot)) 204 204 205 planet_type="generic" 206 205 ! get value of nsoilmx and allocate corresponding arrays 206 nsoilmx=18 ! default value 207 call getin_p("nsoilmx",nsoilmx) 208 209 allocate(tsoil(ngridmx,nsoilmx)) 210 allocate(ith(iip1,jjp1,nsoilmx),ithfi(ngridmx,nsoilmx)) 211 allocate(tslab(ngridmx,nsoilmx)) 212 207 213 c======================================================================= 208 214 c Choice of the start file(s) to use … … 284 290 IF (.not. ALLOCATED(zgam)) ALLOCATE(zgam(ngridmx)) 285 291 IF (.not. ALLOCATED(zthe)) ALLOCATE(zthe(ngridmx)) 286 ! ALLOCATE ARRAYS in comsoil_h (if not already done)287 IF (.not.ALLOCATED(layer))288 . ALLOCATE(layer(nsoilmx))289 IF (.not.ALLOCATED(mlayer))290 . ALLOCATE(mlayer(0:nsoilmx-1))291 IF (.not.ALLOCATED(inertiedat))292 . ALLOCATE(inertiedat(ngridmx,nsoilmx))293 ! ALLOCATE ARRAYS IN comgeomfi_h (done in inifis usually)294 IF (.not. ALLOCATED(lati)) ALLOCATE(lati(ngridmx))295 IF (.not. ALLOCATED(long)) ALLOCATE(long(ngridmx))296 IF (.not. ALLOCATED(area)) ALLOCATE(area(ngridmx))297 292 298 293 c----------------------------------------------------------------------- … … 337 332 write(*,*) i,tab_cntrl(i) 338 333 enddo 339 334 340 335 ! Lmodif set to 0 to disable modifications possibility in phyeta0 341 336 write(*,*) 'Reading file START' … … 355 350 do i=1,ngridmx 356 351 albfi(i) = albedodat(i) 357 352 do j=1,nsoilmx 358 353 ithfi(i,j) = inertiedat(i,j) 359 354 enddo 360 355 ! build a surfithfi(:) using 1st layer of ithfi(:), which might 361 356 ! be needed later on if reinitializing soil thermal inertia … … 371 366 endif 372 367 c----------------------------------------------------------------------- 373 c 368 c Initialisation des constantes dynamique 374 369 c----------------------------------------------------------------------- 375 370 … … 451 446 idum=0 452 447 453 c Initialisation coordonnees /aires 454 c ------------------------------- 455 ! Note: rlatu(:) and rlonv(:) are commons defined in "comgeom.h" 456 ! rlatu() and rlonv() are given in radians 457 latfi(1)=rlatu(1) 458 lonfi(1)=0. 459 DO j=2,jjm 460 DO i=1,iim 461 latfi((j-2)*iim+1+i)=rlatu(j) 462 lonfi((j-2)*iim+1+i)=rlonv(i) 463 ENDDO 464 ENDDO 465 latfi(ngridmx)=rlatu(jjp1) 466 lonfi(ngridmx)=0. 467 468 ! build airefi(), mesh area on physics grid 469 CALL gr_dyn_fi(1,iip1,jjp1,ngridmx,aire,airefi) 470 ! Poles are single points on physics grid 471 airefi(1)=sum(aire(1:iim,1)) 472 airefi(ngridmx)=sum(aire(1:iim,jjm+1)) 473 474 ! also initialize various physics flags/settings which might be needed 475 ! (for instance initracer needs to know about some flags, and/or 476 ! 'datafile' path may be changed by user) 477 call inifis(ngridmx,llm,nqtot,day_ini,daysec,dtphys, 478 & latfi,lonfi,airefi,rad,g,r,cpp) 448 ! Initialize the physics 449 CALL iniphysiq(iim,jjm,llm, 450 & (jjm-1)*iim+2,comm_lmdz, 451 & daysec,day_ini,dtphys, 452 & rlatu,rlatv,rlonu,rlonv, 453 & aire,cu,cv,rad,g,r,cpp, 454 & 1) 479 455 480 456 c======================================================================= … … 560 536 & rnat,pctsrf_sic,tslab,tsea_ice,sea_ice) 561 537 write(*,*) "OK, read start_archive file" 562 563 564 538 ! copy soil thermal inertia 539 ithfi(:,:)=inertiedat(:,:) 540 565 541 ierr= NF_CLOSE(nid) 566 542 … … 727 703 DO j=1,jjp1 728 704 DO i=1,iip1 729 alb(i,j) = alb_bb 730 731 ith(i,j,isoil) = ith_bb 732 705 alb(i,j) = alb_bb ! albedo 706 do isoil=1,nsoilmx 707 ith(i,j,isoil) = ith_bb ! thermal inertia 708 enddo 733 709 END DO 734 710 END DO … … 867 843 if (yes.eq.'y') then 868 844 write(*,*) 'Value?' 869 845 read(*,*,iostat=ierr) psea 870 846 DO i=1,iip1 871 847 DO j=1,jjp1 … … 874 850 ENDDO 875 851 ENDDO 876 852 write(*,*) 'psea=',psea 877 853 else 878 854 write(*,*) 'no' … … 1467 1443 ! ---------------------------------------------------------------------- 1468 1444 1469 1445 else if (trim(modif) .eq. 'therm_ini_s') then 1470 1446 ! write(*,*)"surfithfi(1):",surfithfi(1) 1471 1472 1473 1447 do isoil=1,nsoilmx 1448 inertiedat(1:ngridmx,isoil)=surfithfi(1:ngridmx) 1449 enddo 1474 1450 write(*,*)'OK: Soil thermal inertia has been reset to referenc 1475 1451 &e surface values' 1476 ! 1477 1478 1479 1452 ! write(*,*)"inertiedat(1,1):",inertiedat(1,1) 1453 ithfi(:,:)=inertiedat(:,:) 1454 ! recast ithfi() onto ith() 1455 call gr_fi_dyn(nsoilmx,ngridmx,iip1,jjp1,ithfi,ith) 1480 1456 ! Check: 1481 1457 ! do i=1,iip1 … … 1485 1461 ! enddo 1486 1462 ! enddo 1487 ! 1463 ! enddo 1488 1464 1489 1465 … … 1660 1636 1661 1637 c======================================================================= 1662 c 1638 c Formats 1663 1639 c======================================================================= 1664 1640 -
trunk/LMDZ.GENERIC/libf/dynphy_lonlat/phystd/start2archive.F
r1478 r1543 21 21 use infotrac, only: infotrac_init, nqtot, tname 22 22 USE comsoil_h 23 USE comgeomfi_h, ONLY: lati, long, area 23 24 ! USE comgeomfi_h, ONLY: lati, long, area 24 25 ! use control_mod 25 use comgeomphy, only: initcomgeomphy26 ! use comgeomphy, only: initcomgeomphy 26 27 use slab_ice_h, only: noceanmx 27 28 ! to use 'getin' 28 29 USE ioipsl_getincom 29 30 USE planete_mod, only: year_day 31 USE mod_const_mpi, ONLY: COMM_LMDZ 30 32 USE control_mod, only: planet_type 31 33 USE callkeys_mod, ONLY: ok_slab_ocean 32 34 use filtreg_mod, only: inifilr 33 35 USE comvert_mod, ONLY: ap,bp 34 USE comconst_mod, ONLY: cpp,g36 USE comconst_mod, ONLY: daysec,dtphys,rad,g,r,cpp 35 37 USE logic_mod, ONLY: grireg 36 38 USE temps_mod, ONLY: day_ini 39 USE iniphysiq_mod, ONLY: iniphysiq 37 40 implicit none 38 41 39 #include "dimensions.h"42 include "dimensions.h" 40 43 integer, parameter :: ngridmx = (2+(jjm-1)*iim - 1/jjm) 41 #include "paramet.h"42 #include "comdissip.h"43 #include "comgeom.h"44 include "paramet.h" 45 include "comdissip.h" 46 include "comgeom.h" 44 47 !#include "control.h" 45 48 … … 47 50 !#include "planete.h" 48 51 !#include"advtrac.h" 49 #include "netcdf.inc"52 include "netcdf.inc" 50 53 c----------------------------------------------------------------------- 51 54 c Declarations … … 68 71 c Variable Physiques (grille physique) 69 72 c ------------------------------------ 70 REAL tsurf(ngridmx) 71 REAL tsoil(ngridmx,nsoilmx) ! Soil temperature72 REAL co2ice(ngridmx) 73 REAL tsurf(ngridmx) ! Surface temperature 74 REAL,ALLOCATABLE :: tsoil(:,:) ! Soil temperature 75 REAL co2ice(ngridmx) ! CO2 ice layer 73 76 REAL q2(ngridmx,llm+1) 74 77 REAL,ALLOCATABLE :: qsurf(:,:) … … 93 96 REAL T(ip1jmp1,llm),us(ip1jmp1,llm),vs(ip1jmp1,llm) 94 97 REAL tsurfS(ip1jmp1) 95 REAL tsoilS(ip1jmp1,nsoilmx)96 REAL ithS(ip1jmp1,nsoilmx) ! Soil Thermal Inertia98 REAL,ALLOCATABLE :: tsoilS(:,:) 99 REAL,ALLOCATABLE :: ithS(:,:) ! Soil Thermal Inertia 97 100 REAL co2iceS(ip1jmp1) 98 101 REAL q2S(ip1jmp1,llm+1) … … 144 147 grireg = .TRUE. 145 148 146 ! initialize "serial/parallel" related stuff147 CALL init_phys_lmdz(iim,jjp1,llm,1,(/(jjm-1)*iim+2/))148 call initcomgeomphy149 150 ! ALLOCATE ARRAYS IN comgeomfi_h (usually done in inifis)151 ! this must be here for start2archive to work152 IF (.not. ALLOCATED(lati)) ALLOCATE(lati(ngridmx))153 IF (.not. ALLOCATED(long)) ALLOCATE(long(ngridmx))154 IF (.not. ALLOCATED(area)) ALLOCATE(area(ngridmx))155 156 149 planet_type="generic" 157 150 … … 160 153 c======================================================================= 161 154 ! Load tracer number and names: 162 ! call iniadvtrac(nqtot,numvanle)163 155 call infotrac_init 164 156 … … 168 160 allocate(qsurfS(ip1jmp1,nqtot)) 169 161 ! other array allocations: 170 call ini_comsoil_h(ngridmx) 162 ! call ini_comsoil_h(ngridmx) ! done via iniphysiq 171 163 172 164 fichnom = 'start.nc' … … 198 190 199 191 ierr = NF_CLOSE(nid1) 192 193 ! Get value of the "subsurface_layers" dimension from physics start file 194 fichnom = 'startfi.nc' 195 ierr = NF_OPEN (fichnom, NF_NOWRITE,nid1) 196 IF (ierr.NE.NF_NOERR) THEN 197 write(6,*)' Pb d''ouverture du fichier'//trim(fichnom) 198 CALL ABORT 199 ENDIF 200 ierr = NF_INQ_DIMID(nid1,"subsurface_layers",varid) 201 IF (ierr .NE. NF_NOERR) THEN 202 PRINT*, "start2archive: No subsurface_layers dimension!!" 203 CALL abort 204 ENDIF 205 ierr = NF_INQ_DIMLEN(nid1,varid,nsoilmx) 206 IF (ierr .NE. NF_NOERR) THEN 207 PRINT*, "start2archive: Failed reading subsurface_layers value!!" 208 CALL abort 209 ENDIF 210 ierr = NF_CLOSE(nid1) 200 211 212 ! allocate arrays of nsoilmx size 213 allocate(tsoil(ngridmx,nsoilmx)) 214 allocate(tsoilS(ip1jmp1,nsoilmx)) 215 allocate(ithS(ip1jmp1,nsoilmx)) 216 217 c----------------------------------------------------------------------- 218 c Initialisations 219 c----------------------------------------------------------------------- 220 221 CALL defrun_new(99, .FALSE. ) 222 call iniconst 223 call inigeom 224 call inifilr 225 226 ! Initialize the physics 227 CALL iniphysiq(iim,jjm,llm, 228 & (jjm-1)*iim+2,comm_lmdz, 229 & daysec,day_ini,dtphys, 230 & rlatu,rlatv,rlonu,rlonv, 231 & aire,cu,cv,rad,g,r,cpp, 232 & 1) 201 233 202 234 fichnom = 'startfi.nc' … … 256 288 c ***************************************************************** 257 289 258 c-----------------------------------------------------------------------259 c Initialisations260 c-----------------------------------------------------------------------261 262 CALL defrun_new(99, .FALSE. )263 call iniconst264 call inigeom265 call inifilr266 290 CALL pression(ip1jmp1, ap, bp, ps, p3d) 267 291 call exner_hyb(ip1jmp1, ps, p3d, beta, pks, pk, pkf) -
trunk/LMDZ.GENERIC/libf/phy_common/ioipsl_getin_p_mod.F90
r1521 r1543 12 12 USE mod_phys_lmdz_mpi_data, ONLY : is_mpi_root 13 13 USE mod_phys_lmdz_omp_data, ONLY : is_omp_root 14 use mod_phys_lmdz_para, only: bcast14 USE mod_phys_lmdz_transfert_para, ONLY : bcast 15 15 !- 16 16 IMPLICIT NONE -
trunk/LMDZ.GENERIC/libf/phy_common/mod_grid_phy_lmdz.F90
r1521 r1543 1 1 ! 2 !$ Header$2 !$Id $ 3 3 ! 4 4 MODULE mod_grid_phy_lmdz … … 7 7 PRIVATE :: grid1dTo2d_glo_igen, grid1dTo2d_glo_rgen, grid1dTo2d_glo_lgen, & 8 8 grid2dTo1d_glo_igen, grid2dTo1d_glo_rgen, grid2dTo1d_glo_lgen 9 9 10 INTEGER,PARAMETER :: unstructured=0 11 INTEGER,PARAMETER :: regular_lonlat=1 12 13 INTEGER,SAVE :: grid_type 14 INTEGER,SAVE :: nvertex 10 15 INTEGER,SAVE :: nbp_lon ! == iim 11 INTEGER,SAVE :: nbp_lat ! == jjmp1 16 INTEGER,SAVE :: nbp_lat ! == jjmp1 (or == 1 if running 1D model) 12 17 INTEGER,SAVE :: nbp_lev ! == llm 13 INTEGER,SAVE :: klon_glo 14 !$OMP THREADPRIVATE(nbp_lon,nbp_lat,nbp_lev,klon_glo) 18 INTEGER,SAVE :: klon_glo ! total number of atmospheric columns 15 19 16 20 INTERFACE grid1dTo2d_glo … … 33 37 34 38 35 SUBROUTINE init_grid_phy_lmdz( iim,jjp1,llm)39 SUBROUTINE init_grid_phy_lmdz(grid_type_,nvertex_,nbp_lon_,nbp_lat_,nbp_lev_) 36 40 IMPLICIT NONE 37 INTEGER, INTENT(in) :: iim 38 INTEGER, INTENT(in) :: jjp1 39 INTEGER, INTENT(in) :: llm 40 41 nbp_lon=iim 42 nbp_lat=jjp1 43 nbp_lev=llm 44 klon_glo=(iim*jjp1)-2*(iim-1) 45 46 ! Ehouarn: handle 1D case: 47 if ((iim.eq.1).and.(jjp1.eq.2)) then 48 nbp_lat=1 49 klon_glo=1 50 endif 41 INTEGER,INTENT(IN) :: grid_type_ 42 INTEGER,INTENT(IN) :: nvertex_ 43 INTEGER, INTENT(IN) :: nbp_lon_ 44 INTEGER, INTENT(IN) :: nbp_lat_ 45 INTEGER, INTENT(IN) :: nbp_lev_ 46 47 grid_type = grid_type_ 48 nvertex = nvertex_ 49 nbp_lon = nbp_lon_ 50 nbp_lat = nbp_lat_ 51 nbp_lev = nbp_lev_ 52 53 IF (nbp_lon*nbp_lat==1) THEN 54 klon_glo=1 55 ELSE 56 klon_glo=(nbp_lon*nbp_lat)-2*(nbp_lon-1) 57 ENDIF 51 58 52 59 END SUBROUTINE init_grid_phy_lmdz … … 284 291 285 292 !---------------------------------------------------------------- 286 ! fonctions generiques (privees)293 ! Generic (private) fonctions 287 294 !---------------------------------------------------------------- 295 288 296 SUBROUTINE grid1dTo2d_glo_igen(VarIn,VarOut,dimsize) 297 289 298 IMPLICIT NONE 290 299 … … 321 330 322 331 SUBROUTINE grid1dTo2d_glo_rgen(VarIn,VarOut,dimsize) 332 323 333 IMPLICIT NONE 324 334 … … 354 364 355 365 SUBROUTINE grid1dTo2d_glo_lgen(VarIn,VarOut,dimsize) 366 356 367 IMPLICIT NONE 357 368 … … 387 398 388 399 SUBROUTINE grid2dTo1d_glo_igen(VarIn,VarOut,dimsize) 400 389 401 IMPLICIT NONE 390 402 … … 409 421 410 422 SUBROUTINE grid2dTo1d_glo_rgen(VarIn,VarOut,dimsize) 423 411 424 IMPLICIT NONE 412 425 … … 431 444 432 445 SUBROUTINE grid2dTo1d_glo_lgen(VarIn,VarOut,dimsize) 446 433 447 IMPLICIT NONE 434 448 -
trunk/LMDZ.GENERIC/libf/phy_common/mod_phys_lmdz_mpi_data.F90
r1521 r1543 3 3 ! 4 4 MODULE mod_phys_lmdz_mpi_data 5 USE mod_const_mpi, only: MPI_REAL_LMDZ, COMM_LMDZ 5 ! USE mod_const_mpi 6 6 7 7 INTEGER,SAVE :: ii_begin … … 16 16 INTEGER,SAVE :: klon_mpi_end 17 17 INTEGER,SAVE :: klon_mpi 18 !!$OMP THREADPRIVATE(ii_begin,ii_end,jj_begin,jj_end,jj_nb,ij_begin,&19 ! !$OMP ij_end,ij_nb,klon_mpi_begin,klon_mpi_end,klon_mpi)20 18 21 19 INTEGER,SAVE,ALLOCATABLE,DIMENSION(:) :: jj_para_nb … … 33 31 INTEGER,SAVE,ALLOCATABLE,DIMENSION(:) :: klon_mpi_para_begin 34 32 INTEGER,SAVE,ALLOCATABLE,DIMENSION(:) :: klon_mpi_para_end 35 !!$OMP THREADPRIVATE(jj_para_nb,jj_para_begin,jj_para_end,ii_para_begin,ii_para_end,&36 ! !$OMP ij_para_nb,ij_para_begin,ij_para_end,klon_mpi_para_nb,klon_mpi_para_begin,&37 ! !$OMP klon_mpi_para_end)38 33 39 34 40 35 INTEGER,SAVE :: mpi_rank 41 36 INTEGER,SAVE :: mpi_size 42 INTEGER,SAVE :: mpi_root 37 INTEGER,SAVE :: mpi_master 38 ! INTEGER,SAVE :: mpi_root 43 39 LOGICAL,SAVE :: is_mpi_root 44 40 LOGICAL,SAVE :: is_using_mpi 45 !!$OMP THREADPRIVATE(mpi_rank,mpi_size,mpi_root,is_mpi_root,is_using_mpi)46 41 47 42 … … 49 44 LOGICAL,SAVE :: is_south_pole 50 45 INTEGER,SAVE :: COMM_LMDZ_PHY 51 !!$OMP THREADPRIVATE(is_north_pole,is_south_pole,COMM_LMDZ_PHY) 46 INTEGER,SAVE :: MPI_REAL_LMDZ ! MPI_REAL8 52 47 53 48 CONTAINS 54 49 55 SUBROUTINE Init_phys_lmdz_mpi_data(iim,jjp1,nb_proc,distrib) 56 USE mod_const_mpi, ONLY : COMM_LMDZ 50 ! SUBROUTINE Init_phys_lmdz_mpi_data(iim,jjp1,nb_proc,distrib) 51 SUBROUTINE init_phys_lmdz_mpi_data(nbp, nbp_lon, nbp_lat, communicator) 52 ! USE mod_const_mpi, ONLY : COMM_LMDZ 57 53 IMPLICIT NONE 58 INTEGER,INTENT(in) :: iim 59 INTEGER,INTENT(in) :: jjp1 60 INTEGER,INTENT(in) :: nb_proc 61 INTEGER,INTENT(in) :: distrib(0:nb_proc-1) 62 54 #ifdef CPP_MPI 55 INCLUDE 'mpif.h' 56 #endif 57 INTEGER,INTENT(in) :: nbp 58 INTEGER,INTENT(in) :: nbp_lon 59 INTEGER,INTENT(in) :: nbp_lat 60 INTEGER,INTENT(in) :: communicator 61 62 INTEGER,ALLOCATABLE :: distrib(:) 63 63 INTEGER :: ierr 64 64 INTEGER :: klon_glo … … 71 71 #endif 72 72 73 if ( iim.eq.1) then73 if ((nbp_lon.eq.1).and.(nbp_lat.eq.1)) then ! running 1D column model 74 74 klon_glo=1 75 75 else 76 klon_glo=iim*(jjp1-2)+2 76 ! The usual global physics grid: 1 point for each pole and nbp_lon points 77 ! for all other latitudes 78 klon_glo=nbp_lon*(nbp_lat-2)+2 77 79 endif 78 80 79 COMM_LMDZ_PHY= COMM_LMDZ81 COMM_LMDZ_PHY=communicator 80 82 81 83 IF (is_using_mpi) THEN 82 84 #ifdef CPP_MPI 85 MPI_REAL_LMDZ=MPI_REAL8 83 86 CALL MPI_COMM_SIZE(COMM_LMDZ_PHY,mpi_size,ierr) 84 87 CALL MPI_COMM_RANK(COMM_LMDZ_PHY,mpi_rank,ierr) … … 89 92 ENDIF 90 93 94 ALLOCATE(distrib(0:mpi_size-1)) 95 96 IF (is_using_mpi) THEN 97 #ifdef CPP_MPI 98 CALL MPI_ALLGATHER(nbp,1,MPI_INTEGER,distrib,1,MPI_INTEGER,COMM_LMDZ_PHY,ierr) 99 #endif 100 ELSE 101 distrib(:)=nbp 102 ENDIF 103 104 91 105 IF (mpi_rank == 0) THEN 92 mpi_ root= 0106 mpi_master = 0 93 107 is_mpi_root = .true. 94 108 ENDIF … … 122 136 123 137 124 klon_mpi_para_nb(0:mpi_size-1)=distrib(0: nb_proc-1)138 klon_mpi_para_nb(0:mpi_size-1)=distrib(0:mpi_size-1) 125 139 126 140 DO i=0,mpi_size-1 … … 139 153 ij_para_begin(i) = 1 140 154 ELSE 141 ij_para_begin(i) = klon_mpi_para_begin(i)+ iim-1155 ij_para_begin(i) = klon_mpi_para_begin(i)+nbp_lon-1 142 156 ENDIF 143 157 144 jj_para_begin(i) = (ij_para_begin(i)-1)/ iim+ 1145 ii_para_begin(i) = MOD(ij_para_begin(i)-1, iim) + 1158 jj_para_begin(i) = (ij_para_begin(i)-1)/nbp_lon + 1 159 ii_para_begin(i) = MOD(ij_para_begin(i)-1,nbp_lon) + 1 146 160 147 161 148 ij_para_end(i) = klon_mpi_para_end(i)+iim-1 149 jj_para_end(i) = (ij_para_end(i)-1)/iim + 1 150 ii_para_end(i) = MOD(ij_para_end(i)-1,iim) + 1 151 152 ! Ehouarn: handle 1D case: 153 if (klon_glo.eq.1) then 154 klon_mpi_para_end(i) = 1 155 klon_mpi_para_nb(i) = 1 156 ij_para_end(i) = 1 157 jj_para_end(i) = 1 158 ii_para_end(i) = 1 159 endif 162 ij_para_end(i) = klon_mpi_para_end(i)+nbp_lon-1 163 jj_para_end(i) = (ij_para_end(i)-1)/nbp_lon + 1 164 ii_para_end(i) = MOD(ij_para_end(i)-1,nbp_lon) + 1 165 160 166 161 167 ij_para_nb(i) = ij_para_end(i)-ij_para_begin(i)+1 … … 181 187 182 188 SUBROUTINE print_module_data 189 ! USE print_control_mod, ONLY: lunout 183 190 IMPLICIT NONE 184 !INCLUDE "iniprint.h"185 186 WRITE( *,*) 'ii_begin =', ii_begin187 WRITE( *,*) 'ii_end =', ii_end188 WRITE( *,*) 'jj_begin =',jj_begin189 WRITE( *,*) 'jj_end =', jj_end190 WRITE( *,*) 'jj_nb =', jj_nb191 WRITE( *,*) 'ij_begin =', ij_begin192 WRITE( *,*) 'ij_end =', ij_end193 WRITE( *,*) 'ij_nb =', ij_nb194 WRITE( *,*) 'klon_mpi_begin =', klon_mpi_begin195 WRITE( *,*) 'klon_mpi_end =', klon_mpi_end196 WRITE( *,*) 'klon_mpi =', klon_mpi197 WRITE( *,*) 'jj_para_nb =', jj_para_nb198 WRITE( *,*) 'jj_para_begin =', jj_para_begin199 WRITE( *,*) 'jj_para_end =', jj_para_end200 WRITE( *,*) 'ii_para_begin =', ii_para_begin201 WRITE( *,*) 'ii_para_end =', ii_para_end202 WRITE( *,*) 'ij_para_nb =', ij_para_nb203 WRITE( *,*) 'ij_para_begin =', ij_para_begin204 WRITE( *,*) 'ij_para_end =', ij_para_end205 WRITE( *,*) 'klon_mpi_para_nb =', klon_mpi_para_nb206 WRITE( *,*) 'klon_mpi_para_begin =', klon_mpi_para_begin207 WRITE( *,*) 'klon_mpi_para_end =', klon_mpi_para_end208 WRITE( *,*) 'mpi_rank =', mpi_rank209 WRITE( *,*) 'mpi_size =', mpi_size210 WRITE( *,*) 'mpi_root =', mpi_root211 WRITE( *,*) 'is_mpi_root =', is_mpi_root212 WRITE( *,*) 'is_north_pole =', is_north_pole213 WRITE( *,*) 'is_south_pole =', is_south_pole214 WRITE( *,*) 'COMM_LMDZ_PHY =', COMM_LMDZ_PHY191 INCLUDE "iniprint.h" 192 193 WRITE(lunout,*) 'ii_begin =', ii_begin 194 WRITE(lunout,*) 'ii_end =', ii_end 195 WRITE(lunout,*) 'jj_begin =',jj_begin 196 WRITE(lunout,*) 'jj_end =', jj_end 197 WRITE(lunout,*) 'jj_nb =', jj_nb 198 WRITE(lunout,*) 'ij_begin =', ij_begin 199 WRITE(lunout,*) 'ij_end =', ij_end 200 WRITE(lunout,*) 'ij_nb =', ij_nb 201 WRITE(lunout,*) 'klon_mpi_begin =', klon_mpi_begin 202 WRITE(lunout,*) 'klon_mpi_end =', klon_mpi_end 203 WRITE(lunout,*) 'klon_mpi =', klon_mpi 204 WRITE(lunout,*) 'jj_para_nb =', jj_para_nb 205 WRITE(lunout,*) 'jj_para_begin =', jj_para_begin 206 WRITE(lunout,*) 'jj_para_end =', jj_para_end 207 WRITE(lunout,*) 'ii_para_begin =', ii_para_begin 208 WRITE(lunout,*) 'ii_para_end =', ii_para_end 209 WRITE(lunout,*) 'ij_para_nb =', ij_para_nb 210 WRITE(lunout,*) 'ij_para_begin =', ij_para_begin 211 WRITE(lunout,*) 'ij_para_end =', ij_para_end 212 WRITE(lunout,*) 'klon_mpi_para_nb =', klon_mpi_para_nb 213 WRITE(lunout,*) 'klon_mpi_para_begin =', klon_mpi_para_begin 214 WRITE(lunout,*) 'klon_mpi_para_end =', klon_mpi_para_end 215 WRITE(lunout,*) 'mpi_rank =', mpi_rank 216 WRITE(lunout,*) 'mpi_size =', mpi_size 217 WRITE(lunout,*) 'mpi_master =', mpi_master 218 WRITE(lunout,*) 'is_mpi_root =', is_mpi_root 219 WRITE(lunout,*) 'is_north_pole =', is_north_pole 220 WRITE(lunout,*) 'is_south_pole =', is_south_pole 221 WRITE(lunout,*) 'COMM_LMDZ_PHY =', COMM_LMDZ_PHY 215 222 216 223 END SUBROUTINE print_module_data -
trunk/LMDZ.GENERIC/libf/phy_common/mod_phys_lmdz_mpi_transfert.F90
r1521 r1543 9 9 bcast_mpi_i,bcast_mpi_i1,bcast_mpi_i2,bcast_mpi_i3,bcast_mpi_i4, & 10 10 bcast_mpi_r,bcast_mpi_r1,bcast_mpi_r2,bcast_mpi_r3,bcast_mpi_r4, & 11 11 bcast_mpi_l,bcast_mpi_l1,bcast_mpi_l2,bcast_mpi_l3,bcast_mpi_l4 12 12 END INTERFACE 13 13 … … 15 15 MODULE PROCEDURE scatter_mpi_i,scatter_mpi_i1,scatter_mpi_i2,scatter_mpi_i3, & 16 16 scatter_mpi_r,scatter_mpi_r1,scatter_mpi_r2,scatter_mpi_r3, & 17 17 scatter_mpi_l,scatter_mpi_l1,scatter_mpi_l2,scatter_mpi_l3 18 18 END INTERFACE 19 19 … … 22 22 MODULE PROCEDURE gather_mpi_i,gather_mpi_i1,gather_mpi_i2,gather_mpi_i3, & 23 23 gather_mpi_r,gather_mpi_r1,gather_mpi_r2,gather_mpi_r3, & 24 24 gather_mpi_l,gather_mpi_l1,gather_mpi_l2,gather_mpi_l3 25 25 END INTERFACE 26 26 … … 28 28 MODULE PROCEDURE scatter2D_mpi_i,scatter2D_mpi_i1,scatter2D_mpi_i2,scatter2D_mpi_i3, & 29 29 scatter2D_mpi_r,scatter2D_mpi_r1,scatter2D_mpi_r2,scatter2D_mpi_r3, & 30 30 scatter2D_mpi_l,scatter2D_mpi_l1,scatter2D_mpi_l2,scatter2D_mpi_l3 31 31 END INTERFACE 32 32 … … 34 34 MODULE PROCEDURE gather2D_mpi_i,gather2D_mpi_i1,gather2D_mpi_i2,gather2D_mpi_i3, & 35 35 gather2D_mpi_r,gather2D_mpi_r1,gather2D_mpi_r2,gather2D_mpi_r3, & 36 36 gather2D_mpi_l,gather2D_mpi_l1,gather2D_mpi_l2,gather2D_mpi_l3 37 37 END INTERFACE 38 38 … … 45 45 MODULE PROCEDURE grid1dTo2d_mpi_i,grid1dTo2d_mpi_i1,grid1dTo2d_mpi_i2,grid1dTo2d_mpi_i3, & 46 46 grid1dTo2d_mpi_r,grid1dTo2d_mpi_r1,grid1dTo2d_mpi_r2,grid1dTo2d_mpi_r3, & 47 47 grid1dTo2d_mpi_l,grid1dTo2d_mpi_l1,grid1dTo2d_mpi_l2,grid1dTo2d_mpi_l3 48 48 END INTERFACE 49 49 … … 51 51 MODULE PROCEDURE grid2dTo1d_mpi_i,grid2dTo1d_mpi_i1,grid2dTo1d_mpi_i2,grid2dTo1d_mpi_i3, & 52 52 grid2dTo1d_mpi_r,grid2dTo1d_mpi_r1,grid2dTo1d_mpi_r2,grid2dTo1d_mpi_r3, & 53 53 grid2dTo1d_mpi_l,grid2dTo1d_mpi_l1,grid2dTo1d_mpi_l2,grid2dTo1d_mpi_l3 54 54 END INTERFACE 55 55 … … 514 514 515 515 SUBROUTINE scatter2D_mpi_i(VarIn, VarOut) 516 USE mod_grid_phy_lmdz , only: klon_glo, grid2dTo1d_glo516 USE mod_grid_phy_lmdz 517 517 IMPLICIT NONE 518 518 … … 528 528 529 529 SUBROUTINE scatter2D_mpi_i1(VarIn, VarOut) 530 USE mod_grid_phy_lmdz , only: klon_glo, grid2dTo1d_glo530 USE mod_grid_phy_lmdz 531 531 IMPLICIT NONE 532 532 … … 541 541 542 542 SUBROUTINE scatter2D_mpi_i2(VarIn, VarOut) 543 USE mod_grid_phy_lmdz , only: klon_glo, grid2dTo1d_glo543 USE mod_grid_phy_lmdz 544 544 IMPLICIT NONE 545 545 … … 555 555 556 556 SUBROUTINE scatter2D_mpi_i3(VarIn, VarOut) 557 USE mod_grid_phy_lmdz , only: klon_glo, grid2dTo1d_glo557 USE mod_grid_phy_lmdz 558 558 IMPLICIT NONE 559 559 … … 570 570 571 571 SUBROUTINE scatter2D_mpi_r(VarIn, VarOut) 572 USE mod_grid_phy_lmdz , only: klon_glo, grid2dTo1d_glo572 USE mod_grid_phy_lmdz 573 573 IMPLICIT NONE 574 574 … … 585 585 586 586 SUBROUTINE scatter2D_mpi_r1(VarIn, VarOut) 587 USE mod_grid_phy_lmdz , only: klon_glo, grid2dTo1d_glo587 USE mod_grid_phy_lmdz 588 588 IMPLICIT NONE 589 589 REAL,INTENT(IN),DIMENSION(:,:,:) :: VarIn … … 599 599 600 600 SUBROUTINE scatter2D_mpi_r2(VarIn, VarOut) 601 USE mod_grid_phy_lmdz , only: klon_glo, grid2dTo1d_glo601 USE mod_grid_phy_lmdz 602 602 IMPLICIT NONE 603 603 … … 613 613 614 614 SUBROUTINE scatter2D_mpi_r3(VarIn, VarOut) 615 USE mod_grid_phy_lmdz , only: klon_glo, grid2dTo1d_glo615 USE mod_grid_phy_lmdz 616 616 IMPLICIT NONE 617 617 … … 628 628 629 629 SUBROUTINE scatter2D_mpi_l(VarIn, VarOut) 630 USE mod_grid_phy_lmdz , only: klon_glo, grid2dTo1d_glo630 USE mod_grid_phy_lmdz 631 631 IMPLICIT NONE 632 632 … … 643 643 644 644 SUBROUTINE scatter2D_mpi_l1(VarIn, VarOut) 645 USE mod_grid_phy_lmdz , only: klon_glo, grid2dTo1d_glo645 USE mod_grid_phy_lmdz 646 646 IMPLICIT NONE 647 647 … … 658 658 659 659 SUBROUTINE scatter2D_mpi_l2(VarIn, VarOut) 660 USE mod_grid_phy_lmdz , only: klon_glo, grid2dTo1d_glo660 USE mod_grid_phy_lmdz 661 661 IMPLICIT NONE 662 662 … … 672 672 673 673 SUBROUTINE scatter2D_mpi_l3(VarIn, VarOut) 674 USE mod_grid_phy_lmdz , only: klon_glo, grid2dTo1d_glo674 USE mod_grid_phy_lmdz 675 675 IMPLICIT NONE 676 676 … … 691 691 692 692 SUBROUTINE gather2D_mpi_i(VarIn, VarOut) 693 USE mod_grid_phy_lmdz , only: klon_glo, grid1dTo2d_glo693 USE mod_grid_phy_lmdz 694 694 IMPLICIT NONE 695 695 … … 705 705 706 706 SUBROUTINE gather2D_mpi_i1(VarIn, VarOut) 707 USE mod_grid_phy_lmdz , only: klon_glo, grid1dTo2d_glo707 USE mod_grid_phy_lmdz 708 708 IMPLICIT NONE 709 709 … … 719 719 720 720 SUBROUTINE gather2D_mpi_i2(VarIn, VarOut) 721 USE mod_grid_phy_lmdz , only: klon_glo, grid1dTo2d_glo721 USE mod_grid_phy_lmdz 722 722 IMPLICIT NONE 723 723 … … 733 733 734 734 SUBROUTINE gather2D_mpi_i3(VarIn, VarOut) 735 USE mod_grid_phy_lmdz , only: klon_glo, grid1dTo2d_glo735 USE mod_grid_phy_lmdz 736 736 IMPLICIT NONE 737 737 … … 749 749 750 750 SUBROUTINE gather2D_mpi_r(VarIn, VarOut) 751 USE mod_grid_phy_lmdz , only: klon_glo, grid1dTo2d_glo751 USE mod_grid_phy_lmdz 752 752 IMPLICIT NONE 753 753 … … 763 763 764 764 SUBROUTINE gather2D_mpi_r1(VarIn, VarOut) 765 USE mod_grid_phy_lmdz , only: klon_glo, grid1dTo2d_glo765 USE mod_grid_phy_lmdz 766 766 IMPLICIT NONE 767 767 … … 777 777 778 778 SUBROUTINE gather2D_mpi_r2(VarIn, VarOut) 779 USE mod_grid_phy_lmdz , only: klon_glo, grid1dTo2d_glo779 USE mod_grid_phy_lmdz 780 780 IMPLICIT NONE 781 781 … … 791 791 792 792 SUBROUTINE gather2D_mpi_r3(VarIn, VarOut) 793 USE mod_grid_phy_lmdz , only: klon_glo, grid1dTo2d_glo793 USE mod_grid_phy_lmdz 794 794 IMPLICIT NONE 795 795 … … 807 807 808 808 SUBROUTINE gather2D_mpi_l(VarIn, VarOut) 809 USE mod_grid_phy_lmdz , only: klon_glo, grid1dTo2d_glo809 USE mod_grid_phy_lmdz 810 810 IMPLICIT NONE 811 811 … … 821 821 822 822 SUBROUTINE gather2D_mpi_l1(VarIn, VarOut) 823 USE mod_grid_phy_lmdz , only: klon_glo, grid1dTo2d_glo823 USE mod_grid_phy_lmdz 824 824 IMPLICIT NONE 825 825 … … 835 835 836 836 SUBROUTINE gather2D_mpi_l2(VarIn, VarOut) 837 USE mod_grid_phy_lmdz , only: klon_glo, grid1dTo2d_glo837 USE mod_grid_phy_lmdz 838 838 IMPLICIT NONE 839 839 … … 849 849 850 850 SUBROUTINE gather2D_mpi_l3(VarIn, VarOut) 851 USE mod_grid_phy_lmdz , only: klon_glo, grid1dTo2d_glo851 USE mod_grid_phy_lmdz 852 852 IMPLICIT NONE 853 853 … … 1236 1236 1237 1237 SUBROUTINE bcast_mpi_cgen(var,nb) 1238 USE mod_phys_lmdz_mpi_data , mpi_root_x=>mpi_root1238 USE mod_phys_lmdz_mpi_data 1239 1239 IMPLICIT NONE 1240 1240 … … 1250 1250 1251 1251 #ifdef CPP_MPI 1252 CALL MPI_BCAST(Var,nb,MPI_CHARACTER,mpi_ root_x,COMM_LMDZ_PHY,ierr)1252 CALL MPI_BCAST(Var,nb,MPI_CHARACTER,mpi_master,COMM_LMDZ_PHY,ierr) 1253 1253 #endif 1254 1254 … … 1258 1258 1259 1259 SUBROUTINE bcast_mpi_igen(var,nb) 1260 USE mod_phys_lmdz_mpi_data , mpi_root_x=>mpi_root 1261 IMPLICIT NONE 1262 1260 USE mod_phys_lmdz_mpi_data 1261 IMPLICIT NONE 1262 1263 INTEGER,INTENT(IN) :: nb 1263 1264 INTEGER,DIMENSION(nb),INTENT(INOUT) :: Var 1264 INTEGER,INTENT(IN) :: nb1265 1265 1266 1266 #ifdef CPP_MPI … … 1272 1272 1273 1273 #ifdef CPP_MPI 1274 CALL MPI_BCAST(Var,nb,MPI_INTEGER,mpi_ root_x,COMM_LMDZ_PHY,ierr)1274 CALL MPI_BCAST(Var,nb,MPI_INTEGER,mpi_master,COMM_LMDZ_PHY,ierr) 1275 1275 #endif 1276 1276 … … 1281 1281 1282 1282 SUBROUTINE bcast_mpi_rgen(var,nb) 1283 USE mod_phys_lmdz_mpi_data , mpi_root_x=>mpi_root 1284 IMPLICIT NONE 1285 1283 USE mod_phys_lmdz_mpi_data 1284 IMPLICIT NONE 1285 1286 INTEGER,INTENT(IN) :: nb 1286 1287 REAL,DIMENSION(nb),INTENT(INOUT) :: Var 1288 1289 #ifdef CPP_MPI 1290 INCLUDE 'mpif.h' 1291 #endif 1292 INTEGER :: ierr 1293 1294 IF (.not.is_using_mpi) RETURN 1295 1296 #ifdef CPP_MPI 1297 CALL MPI_BCAST(Var,nb,MPI_REAL_LMDZ,mpi_master,COMM_LMDZ_PHY,ierr) 1298 #endif 1299 1300 END SUBROUTINE bcast_mpi_rgen 1301 1302 1303 1304 1305 SUBROUTINE bcast_mpi_lgen(var,nb) 1306 USE mod_phys_lmdz_mpi_data 1307 IMPLICIT NONE 1308 1287 1309 INTEGER,INTENT(IN) :: nb 1310 LOGICAL,DIMENSION(nb),INTENT(INOUT) :: Var 1288 1311 1289 1312 #ifdef CPP_MPI … … 1295 1318 1296 1319 #ifdef CPP_MPI 1297 CALL MPI_BCAST(Var,nb,MPI_REAL_LMDZ,mpi_root_x,COMM_LMDZ_PHY,ierr) 1298 #endif 1299 1300 END SUBROUTINE bcast_mpi_rgen 1301 1302 1303 1304 1305 SUBROUTINE bcast_mpi_lgen(var,nb) 1306 USE mod_phys_lmdz_mpi_data , mpi_root_x=>mpi_root 1307 IMPLICIT NONE 1308 1309 LOGICAL,DIMENSION(nb),INTENT(INOUT) :: Var 1310 INTEGER,INTENT(IN) :: nb 1311 1312 #ifdef CPP_MPI 1313 INCLUDE 'mpif.h' 1314 #endif 1315 INTEGER :: ierr 1316 1317 IF (.not.is_using_mpi) RETURN 1318 1319 #ifdef CPP_MPI 1320 CALL MPI_BCAST(Var,nb,MPI_LOGICAL,mpi_root_x,COMM_LMDZ_PHY,ierr) 1321 if (ierr.ne.MPI_SUCCESS) then 1322 write(*,*) "bcast_mpi error: ierr=",ierr 1323 stop 1324 endif 1320 CALL MPI_BCAST(Var,nb,MPI_LOGICAL,mpi_master,COMM_LMDZ_PHY,ierr) 1325 1321 #endif 1326 1322 … … 1330 1326 1331 1327 SUBROUTINE scatter_mpi_igen(VarIn, VarOut, dimsize) 1332 USE mod_phys_lmdz_mpi_data , mpi_root_x=>mpi_root1328 USE mod_phys_lmdz_mpi_data 1333 1329 USE mod_grid_phy_lmdz 1334 1330 IMPLICIT NONE … … 1369 1365 #ifdef CPP_MPI 1370 1366 CALL MPI_SCATTERV(VarTmp,counts,displs,MPI_INTEGER,VarOut,klon_mpi*dimsize, & 1371 MPI_INTEGER,mpi_ root_x, COMM_LMDZ_PHY,ierr)1367 MPI_INTEGER,mpi_master, COMM_LMDZ_PHY,ierr) 1372 1368 #endif 1373 1369 … … 1375 1371 1376 1372 SUBROUTINE scatter_mpi_rgen(VarIn, VarOut, dimsize) 1377 USE mod_phys_lmdz_mpi_data , mpi_root_x=>mpi_root1373 USE mod_phys_lmdz_mpi_data 1378 1374 USE mod_grid_phy_lmdz 1379 1375 IMPLICIT NONE … … 1413 1409 #ifdef CPP_MPI 1414 1410 CALL MPI_SCATTERV(VarTmp,counts,displs,MPI_REAL_LMDZ,VarOut,klon_mpi*dimsize, & 1415 MPI_REAL_LMDZ,mpi_ root_x, COMM_LMDZ_PHY,ierr)1411 MPI_REAL_LMDZ,mpi_master, COMM_LMDZ_PHY,ierr) 1416 1412 1417 1413 #endif … … 1421 1417 1422 1418 SUBROUTINE scatter_mpi_lgen(VarIn, VarOut, dimsize) 1423 USE mod_phys_lmdz_mpi_data , mpi_root_x=>mpi_root1419 USE mod_phys_lmdz_mpi_data 1424 1420 USE mod_grid_phy_lmdz 1425 1421 IMPLICIT NONE … … 1459 1455 #ifdef CPP_MPI 1460 1456 CALL MPI_SCATTERV(VarTmp,counts,displs,MPI_LOGICAL,VarOut,klon_mpi*dimsize, & 1461 MPI_LOGICAL,mpi_ root_x, COMM_LMDZ_PHY,ierr)1457 MPI_LOGICAL,mpi_master, COMM_LMDZ_PHY,ierr) 1462 1458 #endif 1463 1459 … … 1468 1464 1469 1465 SUBROUTINE gather_mpi_igen(VarIn, VarOut, dimsize) 1470 USE mod_phys_lmdz_mpi_data , mpi_root_x=>mpi_root1466 USE mod_phys_lmdz_mpi_data 1471 1467 USE mod_grid_phy_lmdz 1472 1468 IMPLICIT NONE … … 1497 1493 displs(rank)=Index-1 1498 1494 counts(rank)=nb*dimsize 1499 1495 Index=Index+nb*dimsize 1500 1496 ENDDO 1501 1497 … … 1504 1500 #ifdef CPP_MPI 1505 1501 CALL MPI_GATHERV(VarIn,klon_mpi*dimsize,MPI_INTEGER,VarTmp,counts,displs, & 1506 MPI_INTEGER,mpi_ root_x, COMM_LMDZ_PHY,ierr)1507 #endif 1508 1509 1502 MPI_INTEGER,mpi_master, COMM_LMDZ_PHY,ierr) 1503 #endif 1504 1505 1510 1506 IF (is_mpi_root) THEN 1511 1507 Index=1 … … 1514 1510 DO i=1,dimsize 1515 1511 VarOut(klon_mpi_para_begin(rank):klon_mpi_para_end(rank),i)=VarTmp(Index:Index+nb-1) 1516 1512 Index=Index+nb 1517 1513 ENDDO 1518 1514 ENDDO … … 1522 1518 1523 1519 SUBROUTINE gather_mpi_rgen(VarIn, VarOut, dimsize) 1524 USE mod_phys_lmdz_mpi_data , mpi_root_x=>mpi_root1520 USE mod_phys_lmdz_mpi_data 1525 1521 USE mod_grid_phy_lmdz 1526 1522 IMPLICIT NONE … … 1546 1542 displs(rank)=Index-1 1547 1543 counts(rank)=nb*dimsize 1548 1544 Index=Index+nb*dimsize 1549 1545 ENDDO 1550 1546 ENDIF … … 1557 1553 #ifdef CPP_MPI 1558 1554 CALL MPI_GATHERV(VarIn,klon_mpi*dimsize,MPI_REAL_LMDZ,VarTmp,counts,displs, & 1559 MPI_REAL_LMDZ,mpi_ root_x, COMM_LMDZ_PHY,ierr)1560 #endif 1561 1555 MPI_REAL_LMDZ,mpi_master, COMM_LMDZ_PHY,ierr) 1556 #endif 1557 1562 1558 IF (is_mpi_root) THEN 1563 1559 Index=1 … … 1566 1562 DO i=1,dimsize 1567 1563 VarOut(klon_mpi_para_begin(rank):klon_mpi_para_end(rank),i)=VarTmp(Index:Index+nb-1) 1568 1564 Index=Index+nb 1569 1565 ENDDO 1570 1566 ENDDO … … 1574 1570 1575 1571 SUBROUTINE gather_mpi_lgen(VarIn, VarOut, dimsize) 1576 USE mod_phys_lmdz_mpi_data , mpi_root_x=>mpi_root1572 USE mod_phys_lmdz_mpi_data 1577 1573 USE mod_grid_phy_lmdz 1578 1574 IMPLICIT NONE … … 1603 1599 displs(rank)=Index-1 1604 1600 counts(rank)=nb*dimsize 1605 1601 Index=Index+nb*dimsize 1606 1602 ENDDO 1607 1603 ENDIF … … 1610 1606 #ifdef CPP_MPI 1611 1607 CALL MPI_GATHERV(VarIn,klon_mpi*dimsize,MPI_LOGICAL,VarTmp,counts,displs, & 1612 MPI_LOGICAL,mpi_ root_x, COMM_LMDZ_PHY,ierr)1613 #endif 1614 1608 MPI_LOGICAL,mpi_master, COMM_LMDZ_PHY,ierr) 1609 #endif 1610 1615 1611 IF (is_mpi_root) THEN 1616 1612 Index=1 … … 1619 1615 DO i=1,dimsize 1620 1616 VarOut(klon_mpi_para_begin(rank):klon_mpi_para_end(rank),i)=VarTmp(Index:Index+nb-1) 1621 1617 Index=Index+nb 1622 1618 ENDDO 1623 1619 ENDDO … … 1629 1625 1630 1626 SUBROUTINE reduce_sum_mpi_igen(VarIn,VarOut,nb) 1631 USE mod_phys_lmdz_mpi_data , mpi_root_x=>mpi_root1627 USE mod_phys_lmdz_mpi_data 1632 1628 USE mod_grid_phy_lmdz 1633 1629 IMPLICIT NONE … … 1637 1633 #endif 1638 1634 1635 INTEGER,INTENT(IN) :: nb 1639 1636 INTEGER,DIMENSION(nb),INTENT(IN) :: VarIn 1640 1637 INTEGER,DIMENSION(nb),INTENT(OUT) :: VarOut 1641 INTEGER,INTENT(IN) :: nb1642 1638 INTEGER :: ierr 1643 1639 … … 1649 1645 1650 1646 #ifdef CPP_MPI 1651 CALL MPI_REDUCE(VarIn,VarOut,nb,MPI_INTEGER,MPI_SUM,mpi_ root_x,COMM_LMDZ_PHY,ierr)1647 CALL MPI_REDUCE(VarIn,VarOut,nb,MPI_INTEGER,MPI_SUM,mpi_master,COMM_LMDZ_PHY,ierr) 1652 1648 #endif 1653 1649 … … 1655 1651 1656 1652 SUBROUTINE reduce_sum_mpi_rgen(VarIn,VarOut,nb) 1657 USE mod_phys_lmdz_mpi_data , mpi_root_x=>mpi_root1653 USE mod_phys_lmdz_mpi_data 1658 1654 USE mod_grid_phy_lmdz 1659 1655 … … 1664 1660 #endif 1665 1661 1662 INTEGER,INTENT(IN) :: nb 1666 1663 REAL,DIMENSION(nb),INTENT(IN) :: VarIn 1667 1664 REAL,DIMENSION(nb),INTENT(OUT) :: VarOut 1668 INTEGER,INTENT(IN) :: nb1669 1665 INTEGER :: ierr 1670 1666 … … 1675 1671 1676 1672 #ifdef CPP_MPI 1677 CALL MPI_REDUCE(VarIn,VarOut,nb,MPI_REAL_LMDZ,MPI_SUM,mpi_ root_x,COMM_LMDZ_PHY,ierr)1673 CALL MPI_REDUCE(VarIn,VarOut,nb,MPI_REAL_LMDZ,MPI_SUM,mpi_master,COMM_LMDZ_PHY,ierr) 1678 1674 #endif 1679 1675 … … 1711 1707 DO ij=1,nbp_lon 1712 1708 VarOut(ij,i)=VarIn(1,i) 1713 1709 ENDDO 1714 1710 ENDDO 1715 1711 ENDIF … … 1719 1715 DO ij=nbp_lon*(jj_nb-1)+1,nbp_lon*jj_nb 1720 1716 VarOut(ij,i)=VarIn(klon_mpi,i) 1721 1717 ENDDO 1722 1718 ENDDO 1723 1719 ENDIF … … 1755 1751 DO ij=1,nbp_lon 1756 1752 VarOut(ij,i)=VarIn(1,i) 1757 1753 ENDDO 1758 1754 ENDDO 1759 1755 ENDIF … … 1763 1759 DO ij=nbp_lon*(jj_nb-1)+1,nbp_lon*jj_nb 1764 1760 VarOut(ij,i)=VarIn(klon_mpi,i) 1765 1761 ENDDO 1766 1762 ENDDO 1767 1763 ENDIF … … 1800 1796 DO ij=1,nbp_lon 1801 1797 VarOut(ij,i)=VarIn(1,i) 1802 1798 ENDDO 1803 1799 ENDDO 1804 1800 ENDIF … … 1808 1804 DO ij=nbp_lon*(jj_nb-1)+1,nbp_lon*jj_nb 1809 1805 VarOut(ij,i)=VarIn(klon_mpi,i) 1810 1806 ENDDO 1811 1807 ENDDO 1812 1808 ENDIF … … 1905 1901 1906 1902 END MODULE mod_phys_lmdz_mpi_transfert 1903 -
trunk/LMDZ.GENERIC/libf/phy_common/mod_phys_lmdz_omp_data.F90
r1521 r1543 1 1 ! 2 !$Id: mod_phys_lmdz_omp_data.F90 1575 2011-09-21 13:57:48Z jghattas$2 !$Id: mod_phys_lmdz_omp_data.F90 2326 2015-07-10 12:24:29Z emillour $ 3 3 ! 4 4 MODULE mod_phys_lmdz_omp_data … … 91 91 SUBROUTINE Print_module_data 92 92 IMPLICIT NONE 93 !INCLUDE "iniprint.h"93 INCLUDE "iniprint.h" 94 94 95 95 !$OMP CRITICAL 96 WRITE( *,*)'--------> TASK ',omp_rank97 WRITE( *,*)'omp_size =',omp_size98 WRITE( *,*)'omp_rank =',omp_rank99 WRITE( *,*)'is_omp_root =',is_omp_root100 WRITE( *,*)'klon_omp_para_nb =',klon_omp_para_nb101 WRITE( *,*)'klon_omp_para_begin =',klon_omp_para_begin102 WRITE( *,*)'klon_omp_para_end =',klon_omp_para_end103 WRITE( *,*)'klon_omp =',klon_omp104 WRITE( *,*)'klon_omp_begin =',klon_omp_begin105 WRITE( *,*)'klon_omp_end =',klon_omp_end96 WRITE(lunout,*)'--------> TASK ',omp_rank 97 WRITE(lunout,*)'omp_size =',omp_size 98 WRITE(lunout,*)'omp_rank =',omp_rank 99 WRITE(lunout,*)'is_omp_root =',is_omp_root 100 WRITE(lunout,*)'klon_omp_para_nb =',klon_omp_para_nb 101 WRITE(lunout,*)'klon_omp_para_begin =',klon_omp_para_begin 102 WRITE(lunout,*)'klon_omp_para_end =',klon_omp_para_end 103 WRITE(lunout,*)'klon_omp =',klon_omp 104 WRITE(lunout,*)'klon_omp_begin =',klon_omp_begin 105 WRITE(lunout,*)'klon_omp_end =',klon_omp_end 106 106 !$OMP END CRITICAL 107 107 -
trunk/LMDZ.GENERIC/libf/phy_common/mod_phys_lmdz_omp_transfert.F90
r1521 r1543 25 25 bcast_omp_i,bcast_omp_i1,bcast_omp_i2,bcast_omp_i3,bcast_omp_i4, & 26 26 bcast_omp_r,bcast_omp_r1,bcast_omp_r2,bcast_omp_r3,bcast_omp_r4, & 27 27 bcast_omp_l,bcast_omp_l1,bcast_omp_l2,bcast_omp_l3,bcast_omp_l4 28 28 END INTERFACE 29 29 … … 31 31 MODULE PROCEDURE scatter_omp_i,scatter_omp_i1,scatter_omp_i2,scatter_omp_i3, & 32 32 scatter_omp_r,scatter_omp_r1,scatter_omp_r2,scatter_omp_r3, & 33 33 scatter_omp_l,scatter_omp_l1,scatter_omp_l2,scatter_omp_l3 34 34 END INTERFACE 35 35 … … 38 38 MODULE PROCEDURE gather_omp_i,gather_omp_i1,gather_omp_i2,gather_omp_i3, & 39 39 gather_omp_r,gather_omp_r1,gather_omp_r2,gather_omp_r3, & 40 40 gather_omp_l,gather_omp_l1,gather_omp_l2,gather_omp_l3 41 41 END INTERFACE 42 42 … … 48 48 49 49 50 PUBLIC bcast_omp,scatter_omp,gather_omp,reduce_sum_omp 50 PUBLIC bcast_omp,scatter_omp,gather_omp,reduce_sum_omp, omp_barrier 51 51 52 52 CONTAINS 53 53 54 SUBROUTINE omp_barrier 55 IMPLICIT NONE 56 57 !$OMP BARRIER 58 59 END SUBROUTINE omp_barrier 60 54 61 SUBROUTINE check_buffer_i(buff_size) 55 62 IMPLICIT NONE … … 733 740 IMPLICIT NONE 734 741 742 INTEGER,INTENT(IN) :: Nb 735 743 INTEGER,DIMENSION(Nb),INTENT(INOUT) :: Var 736 744 INTEGER,DIMENSION(Nb),INTENT(INOUT) :: Buff 737 INTEGER,INTENT(IN) :: Nb738 745 739 746 INTEGER :: i … … 757 764 IMPLICIT NONE 758 765 766 INTEGER,INTENT(IN) :: Nb 759 767 REAL,DIMENSION(Nb),INTENT(INOUT) :: Var 760 768 REAL,DIMENSION(Nb),INTENT(INOUT) :: Buff 761 INTEGER,INTENT(IN) :: Nb762 769 763 770 INTEGER :: i … … 780 787 IMPLICIT NONE 781 788 789 INTEGER,INTENT(IN) :: Nb 782 790 LOGICAL,DIMENSION(Nb),INTENT(INOUT) :: Var 783 791 LOGICAL,DIMENSION(Nb),INTENT(INOUT) :: Buff 784 INTEGER,INTENT(IN) :: Nb785 792 786 793 INTEGER :: i … … 802 809 803 810 SUBROUTINE scatter_omp_igen(VarIn,VarOut,dimsize,Buff) 804 USE mod_phys_lmdz_omp_data , only: klon_omp, klon_omp_begin811 USE mod_phys_lmdz_omp_data 805 812 USE mod_phys_lmdz_mpi_data, ONLY : klon_mpi 806 813 IMPLICIT NONE … … 833 840 834 841 SUBROUTINE scatter_omp_rgen(VarIn,VarOut,dimsize,Buff) 835 USE mod_phys_lmdz_omp_data , only: klon_omp, klon_omp_begin842 USE mod_phys_lmdz_omp_data 836 843 USE mod_phys_lmdz_mpi_data, ONLY : klon_mpi 837 844 IMPLICIT NONE … … 864 871 865 872 SUBROUTINE scatter_omp_lgen(VarIn,VarOut,dimsize,Buff) 866 USE mod_phys_lmdz_omp_data , only: klon_omp, klon_omp_begin873 USE mod_phys_lmdz_omp_data 867 874 USE mod_phys_lmdz_mpi_data, ONLY : klon_mpi 868 875 IMPLICIT NONE … … 898 905 899 906 SUBROUTINE gather_omp_igen(VarIn,VarOut,dimsize,Buff) 900 USE mod_phys_lmdz_omp_data , only: klon_omp, klon_omp_begin907 USE mod_phys_lmdz_omp_data 901 908 USE mod_phys_lmdz_mpi_data, ONLY : klon_mpi 902 909 IMPLICIT NONE … … 930 937 931 938 SUBROUTINE gather_omp_rgen(VarIn,VarOut,dimsize,Buff) 932 USE mod_phys_lmdz_omp_data , only: klon_omp, klon_omp_begin939 USE mod_phys_lmdz_omp_data 933 940 USE mod_phys_lmdz_mpi_data, ONLY : klon_mpi 934 941 IMPLICIT NONE … … 962 969 963 970 SUBROUTINE gather_omp_lgen(VarIn,VarOut,dimsize,Buff) 964 USE mod_phys_lmdz_omp_data , only: klon_omp, klon_omp_begin971 USE mod_phys_lmdz_omp_data 965 972 USE mod_phys_lmdz_mpi_data, ONLY : klon_mpi 966 973 IMPLICIT NONE -
trunk/LMDZ.GENERIC/libf/phy_common/mod_phys_lmdz_para.F90
r1521 r1543 13 13 14 14 !$OMP THREADPRIVATE(klon_loc,is_master) 15 !$OMP THREADPRIVATE(is_sequential,is_parallel)16 15 17 16 CONTAINS 18 17 19 SUBROUTINE Init_phys_lmdz_para( iim,jjp1,nb_proc,distrib)18 SUBROUTINE Init_phys_lmdz_para(nbp,nbp_lon,nbp_lat,communicator) 20 19 IMPLICIT NONE 21 INTEGER,INTENT(in) :: iim22 INTEGER,INTENT(in) :: jjp123 INTEGER,INTENT(in) :: nb _proc24 INTEGER,INTENT(in) :: distrib(0:nb_proc-1)20 INTEGER,INTENT(in) :: nbp 21 INTEGER,INTENT(in) :: nbp_lon 22 INTEGER,INTENT(in) :: nbp_lat 23 INTEGER,INTENT(in) :: communicator 25 24 26 CALL Init_phys_lmdz_mpi_data( iim,jjp1,nb_proc,distrib)25 CALL Init_phys_lmdz_mpi_data(nbp,nbp_lon,nbp_lat,communicator) 27 26 !$OMP PARALLEL 28 27 CALL Init_phys_lmdz_omp_data(klon_mpi) … … 46 45 47 46 SUBROUTINE Test_transfert 48 USE mod_grid_phy_lmdz, only: klon_glo, nbp_lev, nbp_lon, nbp_lat, & 49 grid1dTo2d_glo, grid2dTo1d_glo 47 USE mod_grid_phy_lmdz 50 48 IMPLICIT NONE 51 !INCLUDE "iniprint.h"49 INCLUDE "iniprint.h" 52 50 53 51 REAL :: Test_Field1d_glo(klon_glo,nbp_lev) … … 83 81 !$OMP MASTER 84 82 Checksum=sum(Test_Field1d_glo-tmp1d_glo) 85 WRITE( *,*) "------> Checksum =",Checksum," MUST BE 0"83 WRITE(lunout,*) "------> Checksum =",Checksum," MUST BE 0" 86 84 !$OMP END MASTER 87 85 ENDIF … … 95 93 !$OMP MASTER 96 94 Checksum=sum(Test_Field1d_glo-tmp1d_glo) 97 WRITE( *,*) "------> Checksum =",Checksum," MUST BE 0"95 WRITE(lunout,*) "------> Checksum =",Checksum," MUST BE 0" 98 96 !$OMP END MASTER 99 97 ENDIF … … 105 103 !$OMP MASTER 106 104 Checksum=sum(Test_Field1d_glo*omp_size*mpi_size-tmp1d_glo) 107 WRITE( *,*) "------> Checksum =",Checksum," MUST BE 0"105 WRITE(lunout,*) "------> Checksum =",Checksum," MUST BE 0" 108 106 !$OMP END MASTER 109 107 ENDIF -
trunk/LMDZ.GENERIC/libf/phystd/condense_co2.F90
r1542 r1543 11 11 use aerosol_mod, only : iaero_co2 12 12 USE surfdat_h, only: emisice, emissiv 13 USE comgeomphy, only: latitude ! in radians13 USE geometry_mod, only: latitude ! in radians 14 14 USE tracer_h, only: noms, rho_co2 15 15 use comcstfi_mod, only: g, r, cpp -
trunk/LMDZ.GENERIC/libf/phystd/dyn1d/rcm1d.F
r1542 r1543 3 3 ! to use 'getin' 4 4 use ioipsl_getincom, only: getin 5 use dimphy, only : init_dimphy 6 use mod_grid_phy_lmdz, only : regular_lonlat 5 7 use infotrac, only: nqtot, tname 6 8 use surfdat_h, only: albedodat, phisfi, dryness, watercaptag, … … 9 11 & dtemisice 10 12 use comdiurn_h, only: sinlat, coslat, sinlon, coslon 11 ! use comsaison_h12 13 use comsoil_h, only: nsoilmx, layer, mlayer, inertiedat, volcapa 13 14 use phyredem, only: physdem0,physdem1 14 use comgeomphy, only: initcomgeomphy, cell_area15 use geometry_mod, only: init_geometry 15 16 use slab_ice_h, only: noceanmx 16 17 use planete_mod, only: apoastr,periastr,year_day,peri_day, … … 27 28 use regular_lonlat_mod, only: init_regular_lonlat 28 29 use planete_mod, only: ini_planete_mod 30 use physics_distribution_mod, only: init_physics_distribution 31 use regular_lonlat_mod, only: init_regular_lonlat 32 use mod_interface_dyn_phys, only: init_interface_dyn_phys 29 33 use inifis_mod, only: inifis 30 34 implicit none … … 133 137 character*20,allocatable :: nametrac(:) ! name of the tracer (no need for adv trac common) 134 138 135 real :: latitude(1), longitude(1) 139 real :: latitude(1), longitude(1), cell_area(1) 136 140 137 141 c======================================================================= … … 140 144 ! initialize "serial/parallel" related stuff 141 145 ! CALL init_phys_lmdz(iim,jjm+1,llm,1,(/(jjm-1)*iim+2/)) 142 CALL init_phys_lmdz(1,1,llm,1,(/1/))143 call initcomgeomphy146 ! CALL init_phys_lmdz(1,1,llm,1,(/1/)) 147 ! call initcomgeomphy 144 148 145 149 !! those are defined in surfdat_h.F90 … … 490 494 491 495 ! initializations, as with iniphysiq.F90 for the 3D GCM 496 call init_physics_distribution(regular_lonlat,4, 497 & 1,1,1,nlayer,1) 498 call init_interface_dyn_phys 492 499 CALL init_regular_lonlat(1,1,longitude,latitude, 493 500 & (/0.,0./),(/0.,0./)) 494 501 call init_geometry(1,longitude,latitude, 502 & (/0.,0.,0.,0./),(/0.,0.,0.,0./), 503 & cell_area) 504 call init_dimphy(1,nlayer) ! Initialize dimphy module 495 505 call ini_planete_mod(nlayer,preff,ap,bp) 496 506 -
trunk/LMDZ.GENERIC/libf/phystd/hydrol.F90
r1542 r1543 10 10 USE surfdat_h 11 11 use comdiurn_h 12 USE comgeomphy, only: cell_area12 USE geometry_mod, only: cell_area 13 13 USE tracer_h 14 14 use slab_ice_h -
trunk/LMDZ.GENERIC/libf/phystd/phyredem.F90
r1542 r1543 10 10 ! create physics restart file and write time-independent variables 11 11 use comsoil_h, only: volcapa, mlayer 12 use comgeomphy, only: cell_area12 use geometry_mod, only: cell_area 13 13 use surfdat_h, only: zmea, zstd, zsig, zgam, zthe, & 14 14 emisice, emissiv, & -
trunk/LMDZ.GENERIC/libf/phystd/physiq.F90
r1542 r1543 19 19 use comsaison_h, only: mu0, fract, dist_star, declin, right_ascen 20 20 use comsoil_h, only: nsoilmx, layer, mlayer, inertiedat 21 use comgeomphy, only: latitude, longitude, cell_area21 use geometry_mod, only: latitude, longitude, cell_area 22 22 USE comgeomfi_h, only: totarea, totarea_planet 23 23 USE tracer_h, only: noms, mmol, radius, rho_q, qext, & -
trunk/LMDZ.GENERIC/libf/phystd/rings.F90
r1542 r1543 5 5 6 6 use comdiurn_h, only: sinlat, sinlon, coslat, coslon 7 use comgeomphy, only: latitude ! (rad)7 use geometry_mod, only: latitude ! (rad) 8 8 9 9 implicit none -
trunk/LMDZ.GENERIC/libf/phystd/soil.F
r1542 r1543 8 8 use time_phylmdz_mod, only: daysec 9 9 use planete_mod, only: year_day 10 use comgeomphy, only: longitude, latitude ! in radians10 use geometry_mod, only: longitude, latitude ! in radians 11 11 12 12 implicit none -
trunk/LMDZ.GENERIC/libf/phystd/surface_nature.F
r1542 r1543 4 4 USE surfdat_h 5 5 USE comsoil_h 6 USE comgeomphy, ONLY: cell_area6 USE geometry_mod, ONLY: cell_area 7 7 USE tracer_h 8 8 -
trunk/LMDZ.GENERIC/libf/phystd/writediagfi.F
r1542 r1543 40 40 !================================================================= 41 41 use surfdat_h, only: phisfi 42 use comgeomphy, only: cell_area42 use geometry_mod, only: cell_area 43 43 use time_phylmdz_mod, only: ecritphy, day_step, iphysiq, day_ini 44 44 USE mod_phys_lmdz_para, only : is_parallel, is_mpi_root, -
trunk/LMDZ.GENERIC/libf/phystd/writediagsoil.F90
r1542 r1543 13 13 14 14 use comsoil_h, only: nsoilmx, inertiedat 15 use comgeomphy, only: cell_area15 use geometry_mod, only: cell_area 16 16 use time_phylmdz_mod, only: ecritphy, day_step, iphysiq 17 17 use mod_phys_lmdz_para, only : is_mpi_root, is_master, gather -
trunk/LMDZ.GENERIC/libf/phystd/writediagspecIR.F
r1542 r1543 43 43 ! Addition by RW (2010) to allow OLR to be saved in .nc format 44 44 use radinc_h, only : L_NSPECTI 45 use comgeomphy, only: cell_area45 use geometry_mod, only: cell_area 46 46 use mod_phys_lmdz_para, only : is_mpi_root, is_master, gather 47 47 use mod_grid_phy_lmdz, only : klon_glo, Grid1Dto2D_glo, -
trunk/LMDZ.GENERIC/libf/phystd/writediagspecVI.F
r1542 r1543 43 43 ! Addition by RW (2010) to allow OSR to be saved in .nc format 44 44 use radinc_h, only : L_NSPECTV 45 use comgeomphy, only: cell_area45 use geometry_mod, only: cell_area 46 46 use mod_phys_lmdz_para, only : is_mpi_root, is_master, gather 47 47 use mod_grid_phy_lmdz, only : klon_glo, Grid1Dto2D_glo, -
trunk/LMDZ.MARS/README
r1541 r1543 2270 2270 comgeomphy.F90 instead 2271 2271 2272 == 22/04/2016 == EM 2273 - Updates and cleanup wrt dynamics/physics separation: 2274 Removed init_phys_lmdz.F90 and comgeomphy.F90 from phymars; 2275 comgeomphy is replaced by geometry_mod (located in phy_common). 2276 Added physics_distribution_mod.F90 in phy_common and 2277 mod_interface_dyn_phys.F90 in dynphy_lonlat. 2278 Added nrtype.F90 (contains math const. like PI, etc.) in "misc" -
trunk/LMDZ.MARS/libf/aeronomars/moldiff_red.F90
r1541 r1543 3 3 4 4 use tracer_mod, only: noms, mmol 5 use comgeomphy, only: cell_area5 use geometry_mod, only: cell_area 6 6 7 7 implicit none -
trunk/LMDZ.MARS/libf/dyn3d/gcm.F
r1523 r1543 7 7 use filtreg_mod, only: inifilr 8 8 ! use comgeomphy, only: initcomgeomphy 9 USE mod_const_mpi, ONLY: COMM_LMDZ 9 10 USE comvert_mod, ONLY: ap,bp 10 11 USE comconst_mod, ONLY: daysec,dtvr,dtphys,dtdiss,rad,g,r,cpp … … 160 161 c----------------------------------------------------------------------- 161 162 CALL defrun_new( 99, .TRUE. ) 162 163 !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!164 ! FH 2008/05/02165 ! A nettoyer. On ne veut qu'une ou deux routines d'interface166 ! dynamique -> physique pour l'initialisation167 !#ifdef CPP_PHYS168 CALL init_phys_lmdz(iim,jjp1,llm,1,(/(jjm-1)*iim+2/))169 ! call initcomgeomphy ! now done in iniphysiq170 !#endif171 !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!172 163 173 164 ! Initialize tracers … … 254 245 !#ifdef CPP_PHYS 255 246 ! CALL iniphysiq(iim,jjm,llm,daysec,day_ini,dtphys/nsplit_phys, 256 CALL iniphysiq(iim,jjm,llm,daysec,day_ini,dtphys, 247 CALL iniphysiq(iim,jjm,llm, 248 & (jjm-1)*iim+2,comm_lmdz, 249 & daysec,day_ini,dtphys, 257 250 & rlatu,rlatv,rlonu,rlonv, 258 251 & aire,cu,cv,rad,g,r,cpp, -
trunk/LMDZ.MARS/libf/dynphy_lonlat/phymars/iniphysiq_mod.F90
r1541 r1543 3 3 CONTAINS 4 4 5 subroutine iniphysiq(ii,jj,nlayer,punjours, pdayref,ptimestep, & 6 rlatu,rlatv,rlonu,rlonv,aire,cu,cv, & 5 subroutine iniphysiq(ii,jj,nlayer, & 6 nbp, communicator, & 7 punjours, pdayref,ptimestep, & 8 rlatu,rlatv,rlonu,rlonv,aire,cu,cv, & 7 9 prad,pg,pr,pcpp,iflag_phys) 8 10 9 use dimphy, only : klev ! number of atmospheric levels10 use mod_grid_phy_lmdz, only : klon_glo ! number of atmospheric columns11 ! (on full grid)11 use dimphy, only : init_dimphy 12 use mod_grid_phy_lmdz, only : klon_glo, & ! number of atmospheric columns (on full grid) 13 regular_lonlat ! regular longitude-latitude grid type 12 14 use mod_phys_lmdz_para, only : klon_omp, & ! number of columns (on local omp grid) 13 15 klon_omp_begin, & ! start index of local omp subgrid 14 16 klon_omp_end, & ! end index of local omp subgrid 15 17 klon_mpi_begin ! start indes of columns (on local mpi grid) 16 17 use comgeomphy, only : initcomgeomphy, &18 cell_area, & ! physics grid area (m2)19 dx, & ! cu coeff. (u_covariant = cu * u)20 dy, & ! cv coeff. (v_covariant = cv * v)21 longitude, & ! longitudes (rad)22 latitude ! latitudes (rad)18 use geometry_mod, only: init_geometry 19 !use comgeomphy, only : initcomgeomphy, & 20 ! cell_area, & ! physics grid area (m2) 21 ! dx, & ! cu coeff. (u_covariant = cu * u) 22 ! dy, & ! cv coeff. (v_covariant = cv * v) 23 ! longitude, & ! longitudes (rad) 24 ! latitude ! latitudes (rad) 23 25 use infotrac, only : nqtot ! number of advected tracers 24 26 use comgeomfi_h, only: ini_fillgeom 25 27 use temps_mod, only: day_ini, hour_ini 26 28 use phys_state_var_init_mod, only: phys_state_var_init 29 use physics_distribution_mod, only: init_physics_distribution 27 30 use regular_lonlat_mod, only: init_regular_lonlat, & 28 31 east, west, north, south, & 29 32 north_east, north_west, & 30 33 south_west, south_east 34 use mod_interface_dyn_phys, only: init_interface_dyn_phys 31 35 32 36 implicit none … … 43 47 integer,intent(in) :: ii ! number of atmospheric coulumns along longitudes 44 48 integer,intent(in) :: jj ! number of atompsheric columns along latitudes 49 integer,intent(in) :: nbp ! number of physics columns for this MPI process 50 integer,intent(in) :: communicator ! MPI communicator 45 51 real,intent(in) :: rlatu(jj+1) ! latitudes of the physics grid 46 52 real,intent(in) :: rlatv(jj) ! latitude boundaries of the physics grid … … 55 61 56 62 integer :: ibegin,iend,offset 57 integer :: i,j 63 integer :: i,j,k 58 64 character(len=20) :: modname='iniphysiq' 59 65 character(len=80) :: abort_message … … 66 72 67 73 ! global array, on full physics grid: 68 real,allocatable :: latfi(:) 69 real,allocatable :: lonfi(:) 70 real,allocatable :: cufi(:) 71 real,allocatable :: cvfi(:) 72 real,allocatable :: airefi(:) 74 real,allocatable :: latfi_glo(:) 75 real,allocatable :: lonfi_glo(:) 76 real,allocatable :: cufi_glo(:) 77 real,allocatable :: cvfi_glo(:) 78 real,allocatable :: airefi_glo(:) 79 real,allocatable :: boundslonfi_glo(:,:) 80 real,allocatable :: boundslatfi_glo(:,:) 81 82 ! local arrays, on given MPI/OpenMP domain: 83 real,allocatable,save :: latfi(:) 84 real,allocatable,save :: lonfi(:) 85 real,allocatable,save :: cufi(:) 86 real,allocatable,save :: cvfi(:) 87 real,allocatable,save :: airefi(:) 88 real,allocatable,save :: boundslonfi(:,:) 89 real,allocatable,save :: boundslatfi(:,:) 90 !$OMP THREADPRIVATE (latfi,lonfi,cufi,cvfi,airefi,boundslonfi,boundslatfi) 91 73 92 74 93 pi=2.*asin(1.0) 75 94 76 IF (nlayer.NE.klev) THEN 77 write(*,*) 'STOP in ',trim(modname) 78 write(*,*) 'Problem with dimensions :' 79 write(*,*) 'nlayer = ',nlayer 80 write(*,*) 'klev = ',klev 81 abort_message = '' 82 CALL abort_gcm (modname,abort_message,1) 83 ENDIF 84 85 !IF (ngrid.NE.klon_glo) THEN 86 ! write(*,*) 'STOP in ',trim(modname) 87 ! write(*,*) 'Problem with dimensions :' 88 ! write(*,*) 'ngrid = ',ngrid 89 ! write(*,*) 'klon = ',klon_glo 90 ! abort_message = '' 91 ! CALL abort_gcm (modname,abort_message,1) 92 !ENDIF 95 ! Initialize Physics distibution and parameters and interface with dynamics 96 CALL init_physics_distribution(regular_lonlat,4, & 97 nbp,ii,jj+1,nlayer,communicator) 98 CALL init_interface_dyn_phys 93 99 94 100 ! init regular global longitude-latitude grid points and boundaries … … 115 121 116 122 ! Generate global arrays on full physics grid 117 allocate(latfi(klon_glo),lonfi(klon_glo),cufi(klon_glo),cvfi(klon_glo)) 118 latfi(1)=rlatu(1) 119 lonfi(1)=0. 120 cufi(1) = cu(1) 121 cvfi(1) = cv(1) 123 allocate(latfi_glo(klon_glo),lonfi_glo(klon_glo)) 124 allocate(cufi_glo(klon_glo),cvfi_glo(klon_glo)) 125 allocate(airefi_glo(klon_glo)) 126 allocate(boundslonfi_glo(klon_glo,4)) 127 allocate(boundslatfi_glo(klon_glo,4)) 128 129 ! North pole 130 latfi_glo(1)=rlatu(1) 131 lonfi_glo(1)=0. 132 cufi_glo(1) = cu(1) 133 cvfi_glo(1) = cv(1) 134 boundslonfi_glo(1,north_east)=0 135 boundslatfi_glo(1,north_east)=PI/2 136 boundslonfi_glo(1,north_west)=2*PI 137 boundslatfi_glo(1,north_west)=PI/2 138 boundslonfi_glo(1,south_west)=2*PI 139 boundslatfi_glo(1,south_west)=rlatv(1) 140 boundslonfi_glo(1,south_east)=0 141 boundslatfi_glo(1,south_east)=rlatv(1) 122 142 DO j=2,jj 123 143 DO i=1,ii 124 latfi((j-2)*ii+1+i)= rlatu(j) 125 lonfi((j-2)*ii+1+i)= rlonv(i) 126 cufi((j-2)*ii+1+i) = cu((j-1)*(ii+1)+i) 127 cvfi((j-2)*ii+1+i) = cv((j-1)*(ii+1)+i) 144 k=(j-2)*ii+1+i 145 latfi_glo((j-2)*ii+1+i)= rlatu(j) 146 lonfi_glo((j-2)*ii+1+i)= rlonv(i) 147 cufi_glo((j-2)*ii+1+i) = cu((j-1)*(ii+1)+i) 148 cvfi_glo((j-2)*ii+1+i) = cv((j-1)*(ii+1)+i) 149 boundslonfi_glo(k,north_east)=rlonu(i) 150 boundslatfi_glo(k,north_east)=rlatv(j-1) 151 boundslonfi_glo(k,north_west)=rlonu(i+1) 152 boundslatfi_glo(k,north_west)=rlatv(j-1) 153 boundslonfi_glo(k,south_west)=rlonu(i+1) 154 boundslatfi_glo(k,south_west)=rlatv(j) 155 boundslonfi_glo(k,south_east)=rlonu(i) 156 boundslatfi_glo(k,south_east)=rlatv(j) 128 157 ENDDO 129 158 ENDDO 130 latfi(klon_glo)= rlatu(jj+1) 131 lonfi(klon_glo)= 0. 132 cufi(klon_glo) = cu((ii+1)*jj+1) 133 cvfi(klon_glo) = cv((ii+1)*jj-ii) 159 ! South pole 160 latfi_glo(klon_glo)= rlatu(jj+1) 161 lonfi_glo(klon_glo)= 0. 162 cufi_glo(klon_glo) = cu((ii+1)*jj+1) 163 cvfi_glo(klon_glo) = cv((ii+1)*jj-ii) 164 boundslonfi_glo(klon_glo,north_east)= 0 165 boundslatfi_glo(klon_glo,north_east)= rlatv(jj) 166 boundslonfi_glo(klon_glo,north_west)= 2*PI 167 boundslatfi_glo(klon_glo,north_west)= rlatv(jj) 168 boundslonfi_glo(klon_glo,south_west)= 2*PI 169 boundslatfi_glo(klon_glo,south_west)= -PI/2 170 boundslonfi_glo(klon_glo,south_east)= 0 171 boundslatfi_glo(klon_glo,south_east)= -Pi/2 134 172 135 173 ! build airefi(), mesh area on physics grid 136 allocate(airefi(klon_glo)) 137 CALL gr_dyn_fi(1,ii+1,jj+1,klon_glo,aire,airefi) 174 CALL gr_dyn_fi(1,ii+1,jj+1,klon_glo,aire,airefi_glo) 138 175 ! Poles are single points on physics grid 139 airefi (1)=sum(aire(1:ii,1))140 airefi (klon_glo)=sum(aire(1:ii,jj+1))176 airefi_glo(1)=sum(aire(1:ii,1)) 177 airefi_glo(klon_glo)=sum(aire(1:ii,jj+1)) 141 178 142 179 ! Sanity check: do total planet area match between physics and dynamics? 143 180 total_area_dyn=sum(aire(1:ii,1:jj+1)) 144 total_area_phy=sum(airefi (1:klon_glo))181 total_area_phy=sum(airefi_glo(1:klon_glo)) 145 182 IF (total_area_dyn/=total_area_phy) THEN 146 183 WRITE (lunout, *) 'iniphysiq: planet total surface discrepancy !!!' … … 158 195 !$OMP PARALLEL 159 196 ! Now generate local lon/lat/cu/cv/area arrays 160 call initcomgeomphy 161 162 !!!!$OMP PARALLEL PRIVATE(ibegin,iend) 163 !!!$OMP+ SHARED(parea,pcu,pcv,plon,plat) 197 allocate(latfi(klon_omp),lonfi(klon_omp),cufi(klon_omp),cvfi(klon_omp)) 198 allocate(airefi(klon_omp)) 199 allocate(boundslonfi(klon_omp,4)) 200 allocate(boundslatfi(klon_omp,4)) 201 !call initcomgeomphy 164 202 165 203 offset=klon_mpi_begin-1 166 cell_area(1:klon_omp)=airefi(offset+klon_omp_begin:offset+klon_omp_end) 167 dx(1:klon_omp)=cufi(offset+klon_omp_begin:offset+klon_omp_end) 168 dy(1:klon_omp)=cvfi(offset+klon_omp_begin:offset+klon_omp_end) 169 longitude(1:klon_omp)=lonfi(offset+klon_omp_begin:offset+klon_omp_end) 170 latitude(1:klon_omp)=latfi(offset+klon_omp_begin:offset+klon_omp_end) 204 airefi(1:klon_omp)=airefi_glo(offset+klon_omp_begin:offset+klon_omp_end) 205 cufi(1:klon_omp)=cufi_glo(offset+klon_omp_begin:offset+klon_omp_end) 206 cvfi(1:klon_omp)=cvfi_glo(offset+klon_omp_begin:offset+klon_omp_end) 207 lonfi(1:klon_omp)=lonfi_glo(offset+klon_omp_begin:offset+klon_omp_end) 208 latfi(1:klon_omp)=latfi_glo(offset+klon_omp_begin:offset+klon_omp_end) 209 boundslonfi(1:klon_omp,:)=boundslonfi_glo(offset+klon_omp_begin:offset+klon_omp_end,:) 210 boundslatfi(1:klon_omp,:)=boundslatfi_glo(offset+klon_omp_begin:offset+klon_omp_end,:) 211 212 ! copy over local grid longitudes and latitudes 213 CALL init_geometry(klon_omp,lonfi,latfi,boundslonfi,boundslatfi, & 214 airefi,cufi,cvfi) 171 215 172 216 ! copy some fundamental parameters to physics 173 217 ! and do some initializations 218 219 call init_dimphy(klon_omp,nlayer) ! Initialize dimphy module 174 220 call phys_state_var_init(klon_omp,nlayer,nqtot, & 175 221 day_ini,hour_ini,punjours,ptimestep, & 176 222 prad,pg,pr,pcpp) 177 call ini_fillgeom(klon_omp,lat itude,longitude,cell_area)223 call ini_fillgeom(klon_omp,latfi,lonfi,airefi) 178 224 call conf_phys(klon_omp,nlayer,nqtot) 179 225 -
trunk/LMDZ.MARS/libf/dynphy_lonlat/phymars/newstart.F
r1524 r1543 15 15 c======================================================================= 16 16 17 use ioipsl_getincom, only: getin 17 use ioipsl_getincom, only: getin 18 use mod_phys_lmdz_para, only: is_parallel, is_sequential, 19 & is_mpi_root, is_omp_root, 20 & is_master 18 21 use infotrac, only: infotrac_init, nqtot, tname 19 22 use tracer_mod, only: noms, mmol, … … 29 32 use phyredem, only: physdem0, physdem1 30 33 use iostart, only: open_startphy 31 use comgeomphy, only: initcomgeomphy32 ! use planete_h33 34 use dimradmars_mod, only: tauscaling 34 35 use turb_mod, only: q2 35 use comgeomfi_h, only: ini_fillgeom36 36 use filtreg_mod, only: inifilr 37 USE mod_const_mpi, ONLY: COMM_LMDZ 37 38 USE comvert_mod, ONLY: ap,bp,pa,preff 38 39 USE comconst_mod, ONLY: lllm,daysec,dtphys,dtvr, … … 41 42 USE temps_mod, ONLY: day_ini,hour_ini 42 43 USE ener_mod, ONLY: etot0,ptot0,ztot0,stot0,ang0 43 USE phys_state_var_init_mod, ONLY: phys_state_var_init44 USE iniphysiq_mod, ONLY: iniphysiq 44 45 45 46 implicit none 46 47 47 #include "dimensions.h"48 include "dimensions.h" 48 49 integer, parameter :: ngridmx = (2+(jjm-1)*iim - 1/jjm) 49 #include "paramet.h"50 #include "comgeom2.h"51 #include "comdissnew.h"52 #include "clesph0.h"53 #include "netcdf.inc"54 #include "datafile.h"50 include "paramet.h" 51 include "comgeom2.h" 52 include "comdissnew.h" 53 include "clesph0.h" 54 include "netcdf.inc" 55 include "datafile.h" 55 56 c======================================================================= 56 57 c Declarations … … 181 182 planet_type="mars" 182 183 183 ! initialize "serial/parallel" related stuff 184 CALL init_phys_lmdz(iim,jjp1,llm,1,(/(jjm-1)*iim+2/)) 185 call initcomgeomphy 186 184 ! initialize "serial/parallel" related stuff: 185 ! (required because we call tabfi() below, before calling iniphysiq) 186 is_sequential=.true. 187 is_parallel=.false. 188 is_mpi_root=.true. 189 is_omp_root=.true. 190 is_master=.true. 191 187 192 ! Load tracer number and names: 188 ! call iniadvtrac(nqtot,numvanle)189 193 call infotrac_init 190 194 ! allocate arrays … … 335 339 mugaz = p_mugaz 336 340 daysec = p_daysec 337 ! write(*,*) 'aire',aire338 341 339 342 … … 349 352 idum=0 350 353 351 c Initialisation coordonnees /aires 352 c ------------------------------- 353 ! Note: rlatu(:) and rlonv(:) are commons defined in "comgeom.h" 354 ! rlatu() and rlonv() are given in radians 355 latfi(1)=rlatu(1) 356 lonfi(1)=0. 357 DO j=2,jjm 358 DO i=1,iim 359 latfi((j-2)*iim+1+i)=rlatu(j) 360 lonfi((j-2)*iim+1+i)=rlonv(i) 361 ENDDO 362 ENDDO 363 latfi(ngridmx)=rlatu(jjp1) 364 lonfi(ngridmx)=0. 365 366 ! build airefi(), mesh area on physics grid 367 CALL gr_dyn_fi(1,iip1,jjp1,ngridmx,aire,airefi) 368 ! Poles are single points on physics grid 369 airefi(1)=sum(aire(1:iim,1)) 370 airefi(ngridmx)=sum(aire(1:iim,jjm+1)) 371 372 ! also initialize various physics flags/settings which might be needed 373 ! (for instance initracer needs to know about some flags, and/or 374 ! 'datafile' path may be changed by user) 375 call phys_state_var_init(ngridmx,llm,nqtot, 376 & day_ini,hour_ini,daysec,dtphys, 377 & rad,g,r,cpp) 378 call ini_fillgeom(ngridmx,latfi,lonfi,airefi) 379 call conf_phys(ngridmx,llm,nqtot) 354 ! Initialize the physics 355 CALL iniphysiq(iim,jjm,llm, 356 & (jjm-1)*iim+2,comm_lmdz, 357 & daysec,day_ini,dtphys, 358 & rlatu,rlatv,rlonu,rlonv, 359 & aire,cu,cv,rad,g,r,cpp, 360 & 1) 380 361 381 362 c======================================================================= -
trunk/LMDZ.MARS/libf/dynphy_lonlat/phymars/start2archive.F
r1433 r1543 23 23 use surfdat_h, only: ini_surfdat_h, qsurf 24 24 use comsoil_h, only: ini_comsoil_h 25 use comgeomphy, only: initcomgeomphy25 ! use comgeomphy, only: initcomgeomphy 26 26 use filtreg_mod, only: inifilr 27 USE mod_const_mpi, ONLY: COMM_LMDZ 27 28 use control_mod, only: planet_type 28 29 USE comvert_mod, ONLY: ap,bp 29 USE comconst_mod, ONLY: g,cpp30 USE comconst_mod, ONLY: daysec,dtphys,rad,g,r,cpp 30 31 USE logic_mod, ONLY: grireg 31 32 USE temps_mod, ONLY: day_ini,hour_ini 33 USE iniphysiq_mod, ONLY: iniphysiq 32 34 implicit none 33 35 34 #include "dimensions.h"36 include "dimensions.h" 35 37 integer, parameter :: ngridmx = (2+(jjm-1)*iim - 1/jjm) 36 #include "paramet.h"37 #include "comdissip.h"38 #include "comgeom.h"39 #include "netcdf.inc"38 include "paramet.h" 39 include "comdissip.h" 40 include "comgeom.h" 41 include "netcdf.inc" 40 42 41 43 c----------------------------------------------------------------------- … … 59 61 c Variable Physiques (grille physique) 60 62 c ------------------------------------ 61 REAL tsurf(ngridmx) 63 REAL tsurf(ngridmx) ! Surface temperature 62 64 REAL tsoil(ngridmx,nsoilmx) ! Soil temperature 63 REAL co2ice(ngridmx) 65 REAL co2ice(ngridmx) ! CO2 ice layer 64 66 REAL tauscaling(ngridmx) ! dust conversion factor 65 67 REAL q2(ngridmx,llm+1) … … 116 118 CALL defrun_new(99, .TRUE. ) 117 119 grireg = .TRUE. 118 ! initialize "serial/parallel" related stuff 119 CALL init_phys_lmdz(iim,jjp1,llm,1,(/(jjm-1)*iim+2/)) 120 call initcomgeomphy 120 121 121 planet_type='mars' 122 122 … … 125 125 c======================================================================= 126 126 ! Load tracer number and names: 127 ! call iniadvtrac(nqtot,numvanle)128 127 call infotrac_init 129 128 … … 131 130 allocate(q(ip1jmp1,llm,nqtot)) 132 131 allocate(qsurfS(ip1jmp1,nqtot)) 133 call ini_surfdat_h(ngridmx,nqtot)134 call ini_comsoil_h(ngridmx)135 132 136 133 … … 139 136 . ps,phis,timedyn) 140 137 138 c----------------------------------------------------------------------- 139 c Initialisations 140 c----------------------------------------------------------------------- 141 142 CALL defrun_new(99, .FALSE. ) 143 call iniconst 144 call inigeom 145 call inifilr 146 147 ! Initialize the physics 148 CALL iniphysiq(iim,jjm,llm, 149 & (jjm-1)*iim+2,comm_lmdz, 150 & daysec,day_ini,dtphys, 151 & rlatu,rlatv,rlonu,rlonv, 152 & aire,cu,cv,rad,g,r,cpp, 153 & 1) 141 154 142 155 fichnom = 'startfi.nc' … … 185 198 c ***************************************************************** 186 199 187 c-----------------------------------------------------------------------188 c Initialisations189 c-----------------------------------------------------------------------190 191 CALL defrun_new(99, .FALSE. )192 call iniconst193 call inigeom194 call inifilr195 200 CALL pression(ip1jmp1, ap, bp, ps, p3d) 196 201 call exner_hyb(ip1jmp1, ps, p3d, beta, pks, pk, pkf) -
trunk/LMDZ.MARS/libf/phy_common/ioipsl_getin_p_mod.F90
r1528 r1543 12 12 USE mod_phys_lmdz_mpi_data, ONLY : is_mpi_root 13 13 USE mod_phys_lmdz_omp_data, ONLY : is_omp_root 14 use mod_phys_lmdz_para, only: bcast14 USE mod_phys_lmdz_transfert_para, ONLY : bcast 15 15 !- 16 16 IMPLICIT NONE -
trunk/LMDZ.MARS/libf/phy_common/mod_grid_phy_lmdz.F90
r1521 r1543 1 1 ! 2 !$ Header$2 !$Id $ 3 3 ! 4 4 MODULE mod_grid_phy_lmdz … … 7 7 PRIVATE :: grid1dTo2d_glo_igen, grid1dTo2d_glo_rgen, grid1dTo2d_glo_lgen, & 8 8 grid2dTo1d_glo_igen, grid2dTo1d_glo_rgen, grid2dTo1d_glo_lgen 9 9 10 INTEGER,PARAMETER :: unstructured=0 11 INTEGER,PARAMETER :: regular_lonlat=1 12 13 INTEGER,SAVE :: grid_type 14 INTEGER,SAVE :: nvertex 10 15 INTEGER,SAVE :: nbp_lon ! == iim 11 INTEGER,SAVE :: nbp_lat ! == jjmp1 16 INTEGER,SAVE :: nbp_lat ! == jjmp1 (or == 1 if running 1D model) 12 17 INTEGER,SAVE :: nbp_lev ! == llm 13 INTEGER,SAVE :: klon_glo 18 INTEGER,SAVE :: klon_glo ! total number of atmospheric columns 14 19 15 20 INTERFACE grid1dTo2d_glo … … 32 37 33 38 34 SUBROUTINE init_grid_phy_lmdz( iim,jjp1,llm)39 SUBROUTINE init_grid_phy_lmdz(grid_type_,nvertex_,nbp_lon_,nbp_lat_,nbp_lev_) 35 40 IMPLICIT NONE 36 INTEGER, INTENT(in) :: iim 37 INTEGER, INTENT(in) :: jjp1 38 INTEGER, INTENT(in) :: llm 39 40 nbp_lon=iim 41 nbp_lat=jjp1 42 nbp_lev=llm 43 klon_glo=(iim*jjp1)-2*(iim-1) 44 45 ! Ehouarn: handle 1D case: 46 if ((iim.eq.1).and.(jjp1.eq.2)) then 47 nbp_lat=1 48 klon_glo=1 49 endif 41 INTEGER,INTENT(IN) :: grid_type_ 42 INTEGER,INTENT(IN) :: nvertex_ 43 INTEGER, INTENT(IN) :: nbp_lon_ 44 INTEGER, INTENT(IN) :: nbp_lat_ 45 INTEGER, INTENT(IN) :: nbp_lev_ 46 47 grid_type = grid_type_ 48 nvertex = nvertex_ 49 nbp_lon = nbp_lon_ 50 nbp_lat = nbp_lat_ 51 nbp_lev = nbp_lev_ 52 53 IF (nbp_lon*nbp_lat==1) THEN 54 klon_glo=1 55 ELSE 56 klon_glo=(nbp_lon*nbp_lat)-2*(nbp_lon-1) 57 ENDIF 50 58 51 59 END SUBROUTINE init_grid_phy_lmdz … … 283 291 284 292 !---------------------------------------------------------------- 285 ! fonctions generiques (privees)293 ! Generic (private) fonctions 286 294 !---------------------------------------------------------------- 295 287 296 SUBROUTINE grid1dTo2d_glo_igen(VarIn,VarOut,dimsize) 297 288 298 IMPLICIT NONE 289 299 … … 320 330 321 331 SUBROUTINE grid1dTo2d_glo_rgen(VarIn,VarOut,dimsize) 332 322 333 IMPLICIT NONE 323 334 … … 353 364 354 365 SUBROUTINE grid1dTo2d_glo_lgen(VarIn,VarOut,dimsize) 366 355 367 IMPLICIT NONE 356 368 … … 386 398 387 399 SUBROUTINE grid2dTo1d_glo_igen(VarIn,VarOut,dimsize) 400 388 401 IMPLICIT NONE 389 402 … … 408 421 409 422 SUBROUTINE grid2dTo1d_glo_rgen(VarIn,VarOut,dimsize) 423 410 424 IMPLICIT NONE 411 425 … … 430 444 431 445 SUBROUTINE grid2dTo1d_glo_lgen(VarIn,VarOut,dimsize) 446 432 447 IMPLICIT NONE 433 448 -
trunk/LMDZ.MARS/libf/phy_common/mod_phys_lmdz_mpi_data.F90
r1521 r1543 3 3 ! 4 4 MODULE mod_phys_lmdz_mpi_data 5 USE mod_const_mpi, only: MPI_REAL_LMDZ, COMM_LMDZ 5 ! USE mod_const_mpi 6 6 7 7 INTEGER,SAVE :: ii_begin … … 35 35 INTEGER,SAVE :: mpi_rank 36 36 INTEGER,SAVE :: mpi_size 37 INTEGER,SAVE :: mpi_root 37 INTEGER,SAVE :: mpi_master 38 ! INTEGER,SAVE :: mpi_root 38 39 LOGICAL,SAVE :: is_mpi_root 39 40 LOGICAL,SAVE :: is_using_mpi … … 43 44 LOGICAL,SAVE :: is_south_pole 44 45 INTEGER,SAVE :: COMM_LMDZ_PHY 46 INTEGER,SAVE :: MPI_REAL_LMDZ ! MPI_REAL8 45 47 46 48 CONTAINS 47 49 48 SUBROUTINE Init_phys_lmdz_mpi_data(iim,jjp1,nb_proc,distrib) 49 USE mod_const_mpi, ONLY : COMM_LMDZ 50 ! SUBROUTINE Init_phys_lmdz_mpi_data(iim,jjp1,nb_proc,distrib) 51 SUBROUTINE init_phys_lmdz_mpi_data(nbp, nbp_lon, nbp_lat, communicator) 52 ! USE mod_const_mpi, ONLY : COMM_LMDZ 50 53 IMPLICIT NONE 51 INTEGER,INTENT(in) :: iim 52 INTEGER,INTENT(in) :: jjp1 53 INTEGER,INTENT(in) :: nb_proc 54 INTEGER,INTENT(in) :: distrib(0:nb_proc-1) 55 54 #ifdef CPP_MPI 55 INCLUDE 'mpif.h' 56 #endif 57 INTEGER,INTENT(in) :: nbp 58 INTEGER,INTENT(in) :: nbp_lon 59 INTEGER,INTENT(in) :: nbp_lat 60 INTEGER,INTENT(in) :: communicator 61 62 INTEGER,ALLOCATABLE :: distrib(:) 56 63 INTEGER :: ierr 57 64 INTEGER :: klon_glo … … 64 71 #endif 65 72 66 if ( iim.eq.1) then73 if ((nbp_lon.eq.1).and.(nbp_lat.eq.1)) then ! running 1D column model 67 74 klon_glo=1 68 75 else 69 klon_glo=iim*(jjp1-2)+2 76 ! The usual global physics grid: 1 point for each pole and nbp_lon points 77 ! for all other latitudes 78 klon_glo=nbp_lon*(nbp_lat-2)+2 70 79 endif 71 80 72 COMM_LMDZ_PHY= COMM_LMDZ81 COMM_LMDZ_PHY=communicator 73 82 74 83 IF (is_using_mpi) THEN 75 84 #ifdef CPP_MPI 85 MPI_REAL_LMDZ=MPI_REAL8 76 86 CALL MPI_COMM_SIZE(COMM_LMDZ_PHY,mpi_size,ierr) 77 87 CALL MPI_COMM_RANK(COMM_LMDZ_PHY,mpi_rank,ierr) … … 82 92 ENDIF 83 93 94 ALLOCATE(distrib(0:mpi_size-1)) 95 96 IF (is_using_mpi) THEN 97 #ifdef CPP_MPI 98 CALL MPI_ALLGATHER(nbp,1,MPI_INTEGER,distrib,1,MPI_INTEGER,COMM_LMDZ_PHY,ierr) 99 #endif 100 ELSE 101 distrib(:)=nbp 102 ENDIF 103 104 84 105 IF (mpi_rank == 0) THEN 85 mpi_ root= 0106 mpi_master = 0 86 107 is_mpi_root = .true. 87 108 ENDIF … … 115 136 116 137 117 klon_mpi_para_nb(0:mpi_size-1)=distrib(0: nb_proc-1)138 klon_mpi_para_nb(0:mpi_size-1)=distrib(0:mpi_size-1) 118 139 119 140 DO i=0,mpi_size-1 … … 132 153 ij_para_begin(i) = 1 133 154 ELSE 134 ij_para_begin(i) = klon_mpi_para_begin(i)+ iim-1155 ij_para_begin(i) = klon_mpi_para_begin(i)+nbp_lon-1 135 156 ENDIF 136 157 137 jj_para_begin(i) = (ij_para_begin(i)-1)/ iim+ 1138 ii_para_begin(i) = MOD(ij_para_begin(i)-1, iim) + 1158 jj_para_begin(i) = (ij_para_begin(i)-1)/nbp_lon + 1 159 ii_para_begin(i) = MOD(ij_para_begin(i)-1,nbp_lon) + 1 139 160 140 161 141 ij_para_end(i) = klon_mpi_para_end(i)+iim-1 142 jj_para_end(i) = (ij_para_end(i)-1)/iim + 1 143 ii_para_end(i) = MOD(ij_para_end(i)-1,iim) + 1 144 145 ! Ehouarn: handle 1D case: 146 if (klon_glo.eq.1) then 147 klon_mpi_para_end(i) = 1 148 klon_mpi_para_nb(i) = 1 149 ij_para_end(i) = 1 150 jj_para_end(i) = 1 151 ii_para_end(i) = 1 152 endif 162 ij_para_end(i) = klon_mpi_para_end(i)+nbp_lon-1 163 jj_para_end(i) = (ij_para_end(i)-1)/nbp_lon + 1 164 ii_para_end(i) = MOD(ij_para_end(i)-1,nbp_lon) + 1 165 153 166 154 167 ij_para_nb(i) = ij_para_end(i)-ij_para_begin(i)+1 … … 174 187 175 188 SUBROUTINE print_module_data 189 ! USE print_control_mod, ONLY: lunout 176 190 IMPLICIT NONE 177 !INCLUDE "iniprint.h"178 179 WRITE( *,*) 'ii_begin =', ii_begin180 WRITE( *,*) 'ii_end =', ii_end181 WRITE( *,*) 'jj_begin =',jj_begin182 WRITE( *,*) 'jj_end =', jj_end183 WRITE( *,*) 'jj_nb =', jj_nb184 WRITE( *,*) 'ij_begin =', ij_begin185 WRITE( *,*) 'ij_end =', ij_end186 WRITE( *,*) 'ij_nb =', ij_nb187 WRITE( *,*) 'klon_mpi_begin =', klon_mpi_begin188 WRITE( *,*) 'klon_mpi_end =', klon_mpi_end189 WRITE( *,*) 'klon_mpi =', klon_mpi190 WRITE( *,*) 'jj_para_nb =', jj_para_nb191 WRITE( *,*) 'jj_para_begin =', jj_para_begin192 WRITE( *,*) 'jj_para_end =', jj_para_end193 WRITE( *,*) 'ii_para_begin =', ii_para_begin194 WRITE( *,*) 'ii_para_end =', ii_para_end195 WRITE( *,*) 'ij_para_nb =', ij_para_nb196 WRITE( *,*) 'ij_para_begin =', ij_para_begin197 WRITE( *,*) 'ij_para_end =', ij_para_end198 WRITE( *,*) 'klon_mpi_para_nb =', klon_mpi_para_nb199 WRITE( *,*) 'klon_mpi_para_begin =', klon_mpi_para_begin200 WRITE( *,*) 'klon_mpi_para_end =', klon_mpi_para_end201 WRITE( *,*) 'mpi_rank =', mpi_rank202 WRITE( *,*) 'mpi_size =', mpi_size203 WRITE( *,*) 'mpi_root =', mpi_root204 WRITE( *,*) 'is_mpi_root =', is_mpi_root205 WRITE( *,*) 'is_north_pole =', is_north_pole206 WRITE( *,*) 'is_south_pole =', is_south_pole207 WRITE( *,*) 'COMM_LMDZ_PHY =', COMM_LMDZ_PHY191 INCLUDE "iniprint.h" 192 193 WRITE(lunout,*) 'ii_begin =', ii_begin 194 WRITE(lunout,*) 'ii_end =', ii_end 195 WRITE(lunout,*) 'jj_begin =',jj_begin 196 WRITE(lunout,*) 'jj_end =', jj_end 197 WRITE(lunout,*) 'jj_nb =', jj_nb 198 WRITE(lunout,*) 'ij_begin =', ij_begin 199 WRITE(lunout,*) 'ij_end =', ij_end 200 WRITE(lunout,*) 'ij_nb =', ij_nb 201 WRITE(lunout,*) 'klon_mpi_begin =', klon_mpi_begin 202 WRITE(lunout,*) 'klon_mpi_end =', klon_mpi_end 203 WRITE(lunout,*) 'klon_mpi =', klon_mpi 204 WRITE(lunout,*) 'jj_para_nb =', jj_para_nb 205 WRITE(lunout,*) 'jj_para_begin =', jj_para_begin 206 WRITE(lunout,*) 'jj_para_end =', jj_para_end 207 WRITE(lunout,*) 'ii_para_begin =', ii_para_begin 208 WRITE(lunout,*) 'ii_para_end =', ii_para_end 209 WRITE(lunout,*) 'ij_para_nb =', ij_para_nb 210 WRITE(lunout,*) 'ij_para_begin =', ij_para_begin 211 WRITE(lunout,*) 'ij_para_end =', ij_para_end 212 WRITE(lunout,*) 'klon_mpi_para_nb =', klon_mpi_para_nb 213 WRITE(lunout,*) 'klon_mpi_para_begin =', klon_mpi_para_begin 214 WRITE(lunout,*) 'klon_mpi_para_end =', klon_mpi_para_end 215 WRITE(lunout,*) 'mpi_rank =', mpi_rank 216 WRITE(lunout,*) 'mpi_size =', mpi_size 217 WRITE(lunout,*) 'mpi_master =', mpi_master 218 WRITE(lunout,*) 'is_mpi_root =', is_mpi_root 219 WRITE(lunout,*) 'is_north_pole =', is_north_pole 220 WRITE(lunout,*) 'is_south_pole =', is_south_pole 221 WRITE(lunout,*) 'COMM_LMDZ_PHY =', COMM_LMDZ_PHY 208 222 209 223 END SUBROUTINE print_module_data -
trunk/LMDZ.MARS/libf/phy_common/mod_phys_lmdz_mpi_transfert.F90
r1521 r1543 9 9 bcast_mpi_i,bcast_mpi_i1,bcast_mpi_i2,bcast_mpi_i3,bcast_mpi_i4, & 10 10 bcast_mpi_r,bcast_mpi_r1,bcast_mpi_r2,bcast_mpi_r3,bcast_mpi_r4, & 11 11 bcast_mpi_l,bcast_mpi_l1,bcast_mpi_l2,bcast_mpi_l3,bcast_mpi_l4 12 12 END INTERFACE 13 13 … … 15 15 MODULE PROCEDURE scatter_mpi_i,scatter_mpi_i1,scatter_mpi_i2,scatter_mpi_i3, & 16 16 scatter_mpi_r,scatter_mpi_r1,scatter_mpi_r2,scatter_mpi_r3, & 17 17 scatter_mpi_l,scatter_mpi_l1,scatter_mpi_l2,scatter_mpi_l3 18 18 END INTERFACE 19 19 … … 22 22 MODULE PROCEDURE gather_mpi_i,gather_mpi_i1,gather_mpi_i2,gather_mpi_i3, & 23 23 gather_mpi_r,gather_mpi_r1,gather_mpi_r2,gather_mpi_r3, & 24 24 gather_mpi_l,gather_mpi_l1,gather_mpi_l2,gather_mpi_l3 25 25 END INTERFACE 26 26 … … 28 28 MODULE PROCEDURE scatter2D_mpi_i,scatter2D_mpi_i1,scatter2D_mpi_i2,scatter2D_mpi_i3, & 29 29 scatter2D_mpi_r,scatter2D_mpi_r1,scatter2D_mpi_r2,scatter2D_mpi_r3, & 30 30 scatter2D_mpi_l,scatter2D_mpi_l1,scatter2D_mpi_l2,scatter2D_mpi_l3 31 31 END INTERFACE 32 32 … … 34 34 MODULE PROCEDURE gather2D_mpi_i,gather2D_mpi_i1,gather2D_mpi_i2,gather2D_mpi_i3, & 35 35 gather2D_mpi_r,gather2D_mpi_r1,gather2D_mpi_r2,gather2D_mpi_r3, & 36 36 gather2D_mpi_l,gather2D_mpi_l1,gather2D_mpi_l2,gather2D_mpi_l3 37 37 END INTERFACE 38 38 … … 45 45 MODULE PROCEDURE grid1dTo2d_mpi_i,grid1dTo2d_mpi_i1,grid1dTo2d_mpi_i2,grid1dTo2d_mpi_i3, & 46 46 grid1dTo2d_mpi_r,grid1dTo2d_mpi_r1,grid1dTo2d_mpi_r2,grid1dTo2d_mpi_r3, & 47 47 grid1dTo2d_mpi_l,grid1dTo2d_mpi_l1,grid1dTo2d_mpi_l2,grid1dTo2d_mpi_l3 48 48 END INTERFACE 49 49 … … 51 51 MODULE PROCEDURE grid2dTo1d_mpi_i,grid2dTo1d_mpi_i1,grid2dTo1d_mpi_i2,grid2dTo1d_mpi_i3, & 52 52 grid2dTo1d_mpi_r,grid2dTo1d_mpi_r1,grid2dTo1d_mpi_r2,grid2dTo1d_mpi_r3, & 53 53 grid2dTo1d_mpi_l,grid2dTo1d_mpi_l1,grid2dTo1d_mpi_l2,grid2dTo1d_mpi_l3 54 54 END INTERFACE 55 55 … … 514 514 515 515 SUBROUTINE scatter2D_mpi_i(VarIn, VarOut) 516 USE mod_grid_phy_lmdz , only: klon_glo, grid2dTo1d_glo516 USE mod_grid_phy_lmdz 517 517 IMPLICIT NONE 518 518 … … 528 528 529 529 SUBROUTINE scatter2D_mpi_i1(VarIn, VarOut) 530 USE mod_grid_phy_lmdz , only: klon_glo, grid2dTo1d_glo530 USE mod_grid_phy_lmdz 531 531 IMPLICIT NONE 532 532 … … 541 541 542 542 SUBROUTINE scatter2D_mpi_i2(VarIn, VarOut) 543 USE mod_grid_phy_lmdz , only: klon_glo, grid2dTo1d_glo543 USE mod_grid_phy_lmdz 544 544 IMPLICIT NONE 545 545 … … 555 555 556 556 SUBROUTINE scatter2D_mpi_i3(VarIn, VarOut) 557 USE mod_grid_phy_lmdz , only: klon_glo, grid2dTo1d_glo557 USE mod_grid_phy_lmdz 558 558 IMPLICIT NONE 559 559 … … 570 570 571 571 SUBROUTINE scatter2D_mpi_r(VarIn, VarOut) 572 USE mod_grid_phy_lmdz , only: klon_glo, grid2dTo1d_glo572 USE mod_grid_phy_lmdz 573 573 IMPLICIT NONE 574 574 … … 585 585 586 586 SUBROUTINE scatter2D_mpi_r1(VarIn, VarOut) 587 USE mod_grid_phy_lmdz , only: klon_glo, grid2dTo1d_glo587 USE mod_grid_phy_lmdz 588 588 IMPLICIT NONE 589 589 REAL,INTENT(IN),DIMENSION(:,:,:) :: VarIn … … 599 599 600 600 SUBROUTINE scatter2D_mpi_r2(VarIn, VarOut) 601 USE mod_grid_phy_lmdz , only: klon_glo, grid2dTo1d_glo601 USE mod_grid_phy_lmdz 602 602 IMPLICIT NONE 603 603 … … 613 613 614 614 SUBROUTINE scatter2D_mpi_r3(VarIn, VarOut) 615 USE mod_grid_phy_lmdz , only: klon_glo, grid2dTo1d_glo615 USE mod_grid_phy_lmdz 616 616 IMPLICIT NONE 617 617 … … 628 628 629 629 SUBROUTINE scatter2D_mpi_l(VarIn, VarOut) 630 USE mod_grid_phy_lmdz , only: klon_glo, grid2dTo1d_glo630 USE mod_grid_phy_lmdz 631 631 IMPLICIT NONE 632 632 … … 643 643 644 644 SUBROUTINE scatter2D_mpi_l1(VarIn, VarOut) 645 USE mod_grid_phy_lmdz , only: klon_glo, grid2dTo1d_glo645 USE mod_grid_phy_lmdz 646 646 IMPLICIT NONE 647 647 … … 658 658 659 659 SUBROUTINE scatter2D_mpi_l2(VarIn, VarOut) 660 USE mod_grid_phy_lmdz , only: klon_glo, grid2dTo1d_glo660 USE mod_grid_phy_lmdz 661 661 IMPLICIT NONE 662 662 … … 672 672 673 673 SUBROUTINE scatter2D_mpi_l3(VarIn, VarOut) 674 USE mod_grid_phy_lmdz , only: klon_glo, grid2dTo1d_glo674 USE mod_grid_phy_lmdz 675 675 IMPLICIT NONE 676 676 … … 691 691 692 692 SUBROUTINE gather2D_mpi_i(VarIn, VarOut) 693 USE mod_grid_phy_lmdz , only: klon_glo, grid1dTo2d_glo693 USE mod_grid_phy_lmdz 694 694 IMPLICIT NONE 695 695 … … 705 705 706 706 SUBROUTINE gather2D_mpi_i1(VarIn, VarOut) 707 USE mod_grid_phy_lmdz , only: klon_glo, grid1dTo2d_glo707 USE mod_grid_phy_lmdz 708 708 IMPLICIT NONE 709 709 … … 719 719 720 720 SUBROUTINE gather2D_mpi_i2(VarIn, VarOut) 721 USE mod_grid_phy_lmdz , only: klon_glo, grid1dTo2d_glo721 USE mod_grid_phy_lmdz 722 722 IMPLICIT NONE 723 723 … … 733 733 734 734 SUBROUTINE gather2D_mpi_i3(VarIn, VarOut) 735 USE mod_grid_phy_lmdz , only: klon_glo, grid1dTo2d_glo735 USE mod_grid_phy_lmdz 736 736 IMPLICIT NONE 737 737 … … 749 749 750 750 SUBROUTINE gather2D_mpi_r(VarIn, VarOut) 751 USE mod_grid_phy_lmdz , only: klon_glo, grid1dTo2d_glo751 USE mod_grid_phy_lmdz 752 752 IMPLICIT NONE 753 753 … … 763 763 764 764 SUBROUTINE gather2D_mpi_r1(VarIn, VarOut) 765 USE mod_grid_phy_lmdz , only: klon_glo, grid1dTo2d_glo765 USE mod_grid_phy_lmdz 766 766 IMPLICIT NONE 767 767 … … 777 777 778 778 SUBROUTINE gather2D_mpi_r2(VarIn, VarOut) 779 USE mod_grid_phy_lmdz , only: klon_glo, grid1dTo2d_glo779 USE mod_grid_phy_lmdz 780 780 IMPLICIT NONE 781 781 … … 791 791 792 792 SUBROUTINE gather2D_mpi_r3(VarIn, VarOut) 793 USE mod_grid_phy_lmdz , only: klon_glo, grid1dTo2d_glo793 USE mod_grid_phy_lmdz 794 794 IMPLICIT NONE 795 795 … … 807 807 808 808 SUBROUTINE gather2D_mpi_l(VarIn, VarOut) 809 USE mod_grid_phy_lmdz , only: klon_glo, grid1dTo2d_glo809 USE mod_grid_phy_lmdz 810 810 IMPLICIT NONE 811 811 … … 821 821 822 822 SUBROUTINE gather2D_mpi_l1(VarIn, VarOut) 823 USE mod_grid_phy_lmdz , only: klon_glo, grid1dTo2d_glo823 USE mod_grid_phy_lmdz 824 824 IMPLICIT NONE 825 825 … … 835 835 836 836 SUBROUTINE gather2D_mpi_l2(VarIn, VarOut) 837 USE mod_grid_phy_lmdz , only: klon_glo, grid1dTo2d_glo837 USE mod_grid_phy_lmdz 838 838 IMPLICIT NONE 839 839 … … 849 849 850 850 SUBROUTINE gather2D_mpi_l3(VarIn, VarOut) 851 USE mod_grid_phy_lmdz , only: klon_glo, grid1dTo2d_glo851 USE mod_grid_phy_lmdz 852 852 IMPLICIT NONE 853 853 … … 1236 1236 1237 1237 SUBROUTINE bcast_mpi_cgen(var,nb) 1238 USE mod_phys_lmdz_mpi_data , mpi_root_x=>mpi_root1238 USE mod_phys_lmdz_mpi_data 1239 1239 IMPLICIT NONE 1240 1240 … … 1250 1250 1251 1251 #ifdef CPP_MPI 1252 CALL MPI_BCAST(Var,nb,MPI_CHARACTER,mpi_ root_x,COMM_LMDZ_PHY,ierr)1252 CALL MPI_BCAST(Var,nb,MPI_CHARACTER,mpi_master,COMM_LMDZ_PHY,ierr) 1253 1253 #endif 1254 1254 … … 1258 1258 1259 1259 SUBROUTINE bcast_mpi_igen(var,nb) 1260 USE mod_phys_lmdz_mpi_data , mpi_root_x=>mpi_root 1261 IMPLICIT NONE 1262 1260 USE mod_phys_lmdz_mpi_data 1261 IMPLICIT NONE 1262 1263 INTEGER,INTENT(IN) :: nb 1263 1264 INTEGER,DIMENSION(nb),INTENT(INOUT) :: Var 1264 INTEGER,INTENT(IN) :: nb1265 1265 1266 1266 #ifdef CPP_MPI … … 1272 1272 1273 1273 #ifdef CPP_MPI 1274 CALL MPI_BCAST(Var,nb,MPI_INTEGER,mpi_ root_x,COMM_LMDZ_PHY,ierr)1274 CALL MPI_BCAST(Var,nb,MPI_INTEGER,mpi_master,COMM_LMDZ_PHY,ierr) 1275 1275 #endif 1276 1276 … … 1281 1281 1282 1282 SUBROUTINE bcast_mpi_rgen(var,nb) 1283 USE mod_phys_lmdz_mpi_data , mpi_root_x=>mpi_root 1284 IMPLICIT NONE 1285 1283 USE mod_phys_lmdz_mpi_data 1284 IMPLICIT NONE 1285 1286 INTEGER,INTENT(IN) :: nb 1286 1287 REAL,DIMENSION(nb),INTENT(INOUT) :: Var 1288 1289 #ifdef CPP_MPI 1290 INCLUDE 'mpif.h' 1291 #endif 1292 INTEGER :: ierr 1293 1294 IF (.not.is_using_mpi) RETURN 1295 1296 #ifdef CPP_MPI 1297 CALL MPI_BCAST(Var,nb,MPI_REAL_LMDZ,mpi_master,COMM_LMDZ_PHY,ierr) 1298 #endif 1299 1300 END SUBROUTINE bcast_mpi_rgen 1301 1302 1303 1304 1305 SUBROUTINE bcast_mpi_lgen(var,nb) 1306 USE mod_phys_lmdz_mpi_data 1307 IMPLICIT NONE 1308 1287 1309 INTEGER,INTENT(IN) :: nb 1310 LOGICAL,DIMENSION(nb),INTENT(INOUT) :: Var 1288 1311 1289 1312 #ifdef CPP_MPI … … 1295 1318 1296 1319 #ifdef CPP_MPI 1297 CALL MPI_BCAST(Var,nb,MPI_REAL_LMDZ,mpi_root_x,COMM_LMDZ_PHY,ierr) 1298 #endif 1299 1300 END SUBROUTINE bcast_mpi_rgen 1301 1302 1303 1304 1305 SUBROUTINE bcast_mpi_lgen(var,nb) 1306 USE mod_phys_lmdz_mpi_data , mpi_root_x=>mpi_root 1307 IMPLICIT NONE 1308 1309 LOGICAL,DIMENSION(nb),INTENT(INOUT) :: Var 1310 INTEGER,INTENT(IN) :: nb 1311 1312 #ifdef CPP_MPI 1313 INCLUDE 'mpif.h' 1314 #endif 1315 INTEGER :: ierr 1316 1317 IF (.not.is_using_mpi) RETURN 1318 1319 #ifdef CPP_MPI 1320 CALL MPI_BCAST(Var,nb,MPI_LOGICAL,mpi_root_x,COMM_LMDZ_PHY,ierr) 1321 if (ierr.ne.MPI_SUCCESS) then 1322 write(*,*) "bcast_mpi error: ierr=",ierr 1323 stop 1324 endif 1320 CALL MPI_BCAST(Var,nb,MPI_LOGICAL,mpi_master,COMM_LMDZ_PHY,ierr) 1325 1321 #endif 1326 1322 … … 1330 1326 1331 1327 SUBROUTINE scatter_mpi_igen(VarIn, VarOut, dimsize) 1332 USE mod_phys_lmdz_mpi_data , mpi_root_x=>mpi_root1328 USE mod_phys_lmdz_mpi_data 1333 1329 USE mod_grid_phy_lmdz 1334 1330 IMPLICIT NONE … … 1369 1365 #ifdef CPP_MPI 1370 1366 CALL MPI_SCATTERV(VarTmp,counts,displs,MPI_INTEGER,VarOut,klon_mpi*dimsize, & 1371 MPI_INTEGER,mpi_ root_x, COMM_LMDZ_PHY,ierr)1367 MPI_INTEGER,mpi_master, COMM_LMDZ_PHY,ierr) 1372 1368 #endif 1373 1369 … … 1375 1371 1376 1372 SUBROUTINE scatter_mpi_rgen(VarIn, VarOut, dimsize) 1377 USE mod_phys_lmdz_mpi_data , mpi_root_x=>mpi_root1373 USE mod_phys_lmdz_mpi_data 1378 1374 USE mod_grid_phy_lmdz 1379 1375 IMPLICIT NONE … … 1413 1409 #ifdef CPP_MPI 1414 1410 CALL MPI_SCATTERV(VarTmp,counts,displs,MPI_REAL_LMDZ,VarOut,klon_mpi*dimsize, & 1415 MPI_REAL_LMDZ,mpi_ root_x, COMM_LMDZ_PHY,ierr)1411 MPI_REAL_LMDZ,mpi_master, COMM_LMDZ_PHY,ierr) 1416 1412 1417 1413 #endif … … 1421 1417 1422 1418 SUBROUTINE scatter_mpi_lgen(VarIn, VarOut, dimsize) 1423 USE mod_phys_lmdz_mpi_data , mpi_root_x=>mpi_root1419 USE mod_phys_lmdz_mpi_data 1424 1420 USE mod_grid_phy_lmdz 1425 1421 IMPLICIT NONE … … 1459 1455 #ifdef CPP_MPI 1460 1456 CALL MPI_SCATTERV(VarTmp,counts,displs,MPI_LOGICAL,VarOut,klon_mpi*dimsize, & 1461 MPI_LOGICAL,mpi_ root_x, COMM_LMDZ_PHY,ierr)1457 MPI_LOGICAL,mpi_master, COMM_LMDZ_PHY,ierr) 1462 1458 #endif 1463 1459 … … 1468 1464 1469 1465 SUBROUTINE gather_mpi_igen(VarIn, VarOut, dimsize) 1470 USE mod_phys_lmdz_mpi_data , mpi_root_x=>mpi_root1466 USE mod_phys_lmdz_mpi_data 1471 1467 USE mod_grid_phy_lmdz 1472 1468 IMPLICIT NONE … … 1497 1493 displs(rank)=Index-1 1498 1494 counts(rank)=nb*dimsize 1499 1495 Index=Index+nb*dimsize 1500 1496 ENDDO 1501 1497 … … 1504 1500 #ifdef CPP_MPI 1505 1501 CALL MPI_GATHERV(VarIn,klon_mpi*dimsize,MPI_INTEGER,VarTmp,counts,displs, & 1506 MPI_INTEGER,mpi_ root_x, COMM_LMDZ_PHY,ierr)1507 #endif 1508 1509 1502 MPI_INTEGER,mpi_master, COMM_LMDZ_PHY,ierr) 1503 #endif 1504 1505 1510 1506 IF (is_mpi_root) THEN 1511 1507 Index=1 … … 1514 1510 DO i=1,dimsize 1515 1511 VarOut(klon_mpi_para_begin(rank):klon_mpi_para_end(rank),i)=VarTmp(Index:Index+nb-1) 1516 1512 Index=Index+nb 1517 1513 ENDDO 1518 1514 ENDDO … … 1522 1518 1523 1519 SUBROUTINE gather_mpi_rgen(VarIn, VarOut, dimsize) 1524 USE mod_phys_lmdz_mpi_data , mpi_root_x=>mpi_root1520 USE mod_phys_lmdz_mpi_data 1525 1521 USE mod_grid_phy_lmdz 1526 1522 IMPLICIT NONE … … 1546 1542 displs(rank)=Index-1 1547 1543 counts(rank)=nb*dimsize 1548 1544 Index=Index+nb*dimsize 1549 1545 ENDDO 1550 1546 ENDIF … … 1557 1553 #ifdef CPP_MPI 1558 1554 CALL MPI_GATHERV(VarIn,klon_mpi*dimsize,MPI_REAL_LMDZ,VarTmp,counts,displs, & 1559 MPI_REAL_LMDZ,mpi_ root_x, COMM_LMDZ_PHY,ierr)1560 #endif 1561 1555 MPI_REAL_LMDZ,mpi_master, COMM_LMDZ_PHY,ierr) 1556 #endif 1557 1562 1558 IF (is_mpi_root) THEN 1563 1559 Index=1 … … 1566 1562 DO i=1,dimsize 1567 1563 VarOut(klon_mpi_para_begin(rank):klon_mpi_para_end(rank),i)=VarTmp(Index:Index+nb-1) 1568 1564 Index=Index+nb 1569 1565 ENDDO 1570 1566 ENDDO … … 1574 1570 1575 1571 SUBROUTINE gather_mpi_lgen(VarIn, VarOut, dimsize) 1576 USE mod_phys_lmdz_mpi_data , mpi_root_x=>mpi_root1572 USE mod_phys_lmdz_mpi_data 1577 1573 USE mod_grid_phy_lmdz 1578 1574 IMPLICIT NONE … … 1603 1599 displs(rank)=Index-1 1604 1600 counts(rank)=nb*dimsize 1605 1601 Index=Index+nb*dimsize 1606 1602 ENDDO 1607 1603 ENDIF … … 1610 1606 #ifdef CPP_MPI 1611 1607 CALL MPI_GATHERV(VarIn,klon_mpi*dimsize,MPI_LOGICAL,VarTmp,counts,displs, & 1612 MPI_LOGICAL,mpi_ root_x, COMM_LMDZ_PHY,ierr)1613 #endif 1614 1608 MPI_LOGICAL,mpi_master, COMM_LMDZ_PHY,ierr) 1609 #endif 1610 1615 1611 IF (is_mpi_root) THEN 1616 1612 Index=1 … … 1619 1615 DO i=1,dimsize 1620 1616 VarOut(klon_mpi_para_begin(rank):klon_mpi_para_end(rank),i)=VarTmp(Index:Index+nb-1) 1621 1617 Index=Index+nb 1622 1618 ENDDO 1623 1619 ENDDO … … 1629 1625 1630 1626 SUBROUTINE reduce_sum_mpi_igen(VarIn,VarOut,nb) 1631 USE mod_phys_lmdz_mpi_data , mpi_root_x=>mpi_root1627 USE mod_phys_lmdz_mpi_data 1632 1628 USE mod_grid_phy_lmdz 1633 1629 IMPLICIT NONE … … 1637 1633 #endif 1638 1634 1635 INTEGER,INTENT(IN) :: nb 1639 1636 INTEGER,DIMENSION(nb),INTENT(IN) :: VarIn 1640 1637 INTEGER,DIMENSION(nb),INTENT(OUT) :: VarOut 1641 INTEGER,INTENT(IN) :: nb1642 1638 INTEGER :: ierr 1643 1639 … … 1649 1645 1650 1646 #ifdef CPP_MPI 1651 CALL MPI_REDUCE(VarIn,VarOut,nb,MPI_INTEGER,MPI_SUM,mpi_ root_x,COMM_LMDZ_PHY,ierr)1647 CALL MPI_REDUCE(VarIn,VarOut,nb,MPI_INTEGER,MPI_SUM,mpi_master,COMM_LMDZ_PHY,ierr) 1652 1648 #endif 1653 1649 … … 1655 1651 1656 1652 SUBROUTINE reduce_sum_mpi_rgen(VarIn,VarOut,nb) 1657 USE mod_phys_lmdz_mpi_data , mpi_root_x=>mpi_root1653 USE mod_phys_lmdz_mpi_data 1658 1654 USE mod_grid_phy_lmdz 1659 1655 … … 1664 1660 #endif 1665 1661 1662 INTEGER,INTENT(IN) :: nb 1666 1663 REAL,DIMENSION(nb),INTENT(IN) :: VarIn 1667 1664 REAL,DIMENSION(nb),INTENT(OUT) :: VarOut 1668 INTEGER,INTENT(IN) :: nb1669 1665 INTEGER :: ierr 1670 1666 … … 1675 1671 1676 1672 #ifdef CPP_MPI 1677 CALL MPI_REDUCE(VarIn,VarOut,nb,MPI_REAL_LMDZ,MPI_SUM,mpi_ root_x,COMM_LMDZ_PHY,ierr)1673 CALL MPI_REDUCE(VarIn,VarOut,nb,MPI_REAL_LMDZ,MPI_SUM,mpi_master,COMM_LMDZ_PHY,ierr) 1678 1674 #endif 1679 1675 … … 1711 1707 DO ij=1,nbp_lon 1712 1708 VarOut(ij,i)=VarIn(1,i) 1713 1709 ENDDO 1714 1710 ENDDO 1715 1711 ENDIF … … 1719 1715 DO ij=nbp_lon*(jj_nb-1)+1,nbp_lon*jj_nb 1720 1716 VarOut(ij,i)=VarIn(klon_mpi,i) 1721 1717 ENDDO 1722 1718 ENDDO 1723 1719 ENDIF … … 1755 1751 DO ij=1,nbp_lon 1756 1752 VarOut(ij,i)=VarIn(1,i) 1757 1753 ENDDO 1758 1754 ENDDO 1759 1755 ENDIF … … 1763 1759 DO ij=nbp_lon*(jj_nb-1)+1,nbp_lon*jj_nb 1764 1760 VarOut(ij,i)=VarIn(klon_mpi,i) 1765 1761 ENDDO 1766 1762 ENDDO 1767 1763 ENDIF … … 1800 1796 DO ij=1,nbp_lon 1801 1797 VarOut(ij,i)=VarIn(1,i) 1802 1798 ENDDO 1803 1799 ENDDO 1804 1800 ENDIF … … 1808 1804 DO ij=nbp_lon*(jj_nb-1)+1,nbp_lon*jj_nb 1809 1805 VarOut(ij,i)=VarIn(klon_mpi,i) 1810 1806 ENDDO 1811 1807 ENDDO 1812 1808 ENDIF … … 1905 1901 1906 1902 END MODULE mod_phys_lmdz_mpi_transfert 1903 -
trunk/LMDZ.MARS/libf/phy_common/mod_phys_lmdz_omp_data.F90
r1521 r1543 1 1 ! 2 !$Id: mod_phys_lmdz_omp_data.F90 1575 2011-09-21 13:57:48Z jghattas$2 !$Id: mod_phys_lmdz_omp_data.F90 2326 2015-07-10 12:24:29Z emillour $ 3 3 ! 4 4 MODULE mod_phys_lmdz_omp_data … … 91 91 SUBROUTINE Print_module_data 92 92 IMPLICIT NONE 93 !INCLUDE "iniprint.h"93 INCLUDE "iniprint.h" 94 94 95 95 !$OMP CRITICAL 96 WRITE( *,*)'--------> TASK ',omp_rank97 WRITE( *,*)'omp_size =',omp_size98 WRITE( *,*)'omp_rank =',omp_rank99 WRITE( *,*)'is_omp_root =',is_omp_root100 WRITE( *,*)'klon_omp_para_nb =',klon_omp_para_nb101 WRITE( *,*)'klon_omp_para_begin =',klon_omp_para_begin102 WRITE( *,*)'klon_omp_para_end =',klon_omp_para_end103 WRITE( *,*)'klon_omp =',klon_omp104 WRITE( *,*)'klon_omp_begin =',klon_omp_begin105 WRITE( *,*)'klon_omp_end =',klon_omp_end96 WRITE(lunout,*)'--------> TASK ',omp_rank 97 WRITE(lunout,*)'omp_size =',omp_size 98 WRITE(lunout,*)'omp_rank =',omp_rank 99 WRITE(lunout,*)'is_omp_root =',is_omp_root 100 WRITE(lunout,*)'klon_omp_para_nb =',klon_omp_para_nb 101 WRITE(lunout,*)'klon_omp_para_begin =',klon_omp_para_begin 102 WRITE(lunout,*)'klon_omp_para_end =',klon_omp_para_end 103 WRITE(lunout,*)'klon_omp =',klon_omp 104 WRITE(lunout,*)'klon_omp_begin =',klon_omp_begin 105 WRITE(lunout,*)'klon_omp_end =',klon_omp_end 106 106 !$OMP END CRITICAL 107 107 -
trunk/LMDZ.MARS/libf/phy_common/mod_phys_lmdz_omp_transfert.F90
r1521 r1543 25 25 bcast_omp_i,bcast_omp_i1,bcast_omp_i2,bcast_omp_i3,bcast_omp_i4, & 26 26 bcast_omp_r,bcast_omp_r1,bcast_omp_r2,bcast_omp_r3,bcast_omp_r4, & 27 27 bcast_omp_l,bcast_omp_l1,bcast_omp_l2,bcast_omp_l3,bcast_omp_l4 28 28 END INTERFACE 29 29 … … 31 31 MODULE PROCEDURE scatter_omp_i,scatter_omp_i1,scatter_omp_i2,scatter_omp_i3, & 32 32 scatter_omp_r,scatter_omp_r1,scatter_omp_r2,scatter_omp_r3, & 33 33 scatter_omp_l,scatter_omp_l1,scatter_omp_l2,scatter_omp_l3 34 34 END INTERFACE 35 35 … … 38 38 MODULE PROCEDURE gather_omp_i,gather_omp_i1,gather_omp_i2,gather_omp_i3, & 39 39 gather_omp_r,gather_omp_r1,gather_omp_r2,gather_omp_r3, & 40 40 gather_omp_l,gather_omp_l1,gather_omp_l2,gather_omp_l3 41 41 END INTERFACE 42 42 … … 48 48 49 49 50 PUBLIC bcast_omp,scatter_omp,gather_omp,reduce_sum_omp 50 PUBLIC bcast_omp,scatter_omp,gather_omp,reduce_sum_omp, omp_barrier 51 51 52 52 CONTAINS 53 53 54 SUBROUTINE omp_barrier 55 IMPLICIT NONE 56 57 !$OMP BARRIER 58 59 END SUBROUTINE omp_barrier 60 54 61 SUBROUTINE check_buffer_i(buff_size) 55 62 IMPLICIT NONE … … 733 740 IMPLICIT NONE 734 741 742 INTEGER,INTENT(IN) :: Nb 735 743 INTEGER,DIMENSION(Nb),INTENT(INOUT) :: Var 736 744 INTEGER,DIMENSION(Nb),INTENT(INOUT) :: Buff 737 INTEGER,INTENT(IN) :: Nb738 745 739 746 INTEGER :: i … … 757 764 IMPLICIT NONE 758 765 766 INTEGER,INTENT(IN) :: Nb 759 767 REAL,DIMENSION(Nb),INTENT(INOUT) :: Var 760 768 REAL,DIMENSION(Nb),INTENT(INOUT) :: Buff 761 INTEGER,INTENT(IN) :: Nb762 769 763 770 INTEGER :: i … … 780 787 IMPLICIT NONE 781 788 789 INTEGER,INTENT(IN) :: Nb 782 790 LOGICAL,DIMENSION(Nb),INTENT(INOUT) :: Var 783 791 LOGICAL,DIMENSION(Nb),INTENT(INOUT) :: Buff 784 INTEGER,INTENT(IN) :: Nb785 792 786 793 INTEGER :: i … … 802 809 803 810 SUBROUTINE scatter_omp_igen(VarIn,VarOut,dimsize,Buff) 804 USE mod_phys_lmdz_omp_data , only: klon_omp, klon_omp_begin811 USE mod_phys_lmdz_omp_data 805 812 USE mod_phys_lmdz_mpi_data, ONLY : klon_mpi 806 813 IMPLICIT NONE … … 833 840 834 841 SUBROUTINE scatter_omp_rgen(VarIn,VarOut,dimsize,Buff) 835 USE mod_phys_lmdz_omp_data , only: klon_omp, klon_omp_begin842 USE mod_phys_lmdz_omp_data 836 843 USE mod_phys_lmdz_mpi_data, ONLY : klon_mpi 837 844 IMPLICIT NONE … … 864 871 865 872 SUBROUTINE scatter_omp_lgen(VarIn,VarOut,dimsize,Buff) 866 USE mod_phys_lmdz_omp_data , only: klon_omp, klon_omp_begin873 USE mod_phys_lmdz_omp_data 867 874 USE mod_phys_lmdz_mpi_data, ONLY : klon_mpi 868 875 IMPLICIT NONE … … 898 905 899 906 SUBROUTINE gather_omp_igen(VarIn,VarOut,dimsize,Buff) 900 USE mod_phys_lmdz_omp_data , only: klon_omp, klon_omp_begin907 USE mod_phys_lmdz_omp_data 901 908 USE mod_phys_lmdz_mpi_data, ONLY : klon_mpi 902 909 IMPLICIT NONE … … 930 937 931 938 SUBROUTINE gather_omp_rgen(VarIn,VarOut,dimsize,Buff) 932 USE mod_phys_lmdz_omp_data , only: klon_omp, klon_omp_begin939 USE mod_phys_lmdz_omp_data 933 940 USE mod_phys_lmdz_mpi_data, ONLY : klon_mpi 934 941 IMPLICIT NONE … … 962 969 963 970 SUBROUTINE gather_omp_lgen(VarIn,VarOut,dimsize,Buff) 964 USE mod_phys_lmdz_omp_data , only: klon_omp, klon_omp_begin971 USE mod_phys_lmdz_omp_data 965 972 USE mod_phys_lmdz_mpi_data, ONLY : klon_mpi 966 973 IMPLICIT NONE -
trunk/LMDZ.MARS/libf/phy_common/mod_phys_lmdz_para.F90
r1521 r1543 16 16 CONTAINS 17 17 18 SUBROUTINE Init_phys_lmdz_para( iim,jjp1,nb_proc,distrib)18 SUBROUTINE Init_phys_lmdz_para(nbp,nbp_lon,nbp_lat,communicator) 19 19 IMPLICIT NONE 20 INTEGER,INTENT(in) :: iim21 INTEGER,INTENT(in) :: jjp122 INTEGER,INTENT(in) :: nb _proc23 INTEGER,INTENT(in) :: distrib(0:nb_proc-1)20 INTEGER,INTENT(in) :: nbp 21 INTEGER,INTENT(in) :: nbp_lon 22 INTEGER,INTENT(in) :: nbp_lat 23 INTEGER,INTENT(in) :: communicator 24 24 25 CALL Init_phys_lmdz_mpi_data( iim,jjp1,nb_proc,distrib)25 CALL Init_phys_lmdz_mpi_data(nbp,nbp_lon,nbp_lat,communicator) 26 26 !$OMP PARALLEL 27 27 CALL Init_phys_lmdz_omp_data(klon_mpi) … … 45 45 46 46 SUBROUTINE Test_transfert 47 USE mod_grid_phy_lmdz, only: klon_glo, nbp_lev, nbp_lon, nbp_lat, & 48 grid1dTo2d_glo, grid2dTo1d_glo 47 USE mod_grid_phy_lmdz 49 48 IMPLICIT NONE 50 !INCLUDE "iniprint.h"49 INCLUDE "iniprint.h" 51 50 52 51 REAL :: Test_Field1d_glo(klon_glo,nbp_lev) … … 82 81 !$OMP MASTER 83 82 Checksum=sum(Test_Field1d_glo-tmp1d_glo) 84 WRITE( *,*) "------> Checksum =",Checksum," MUST BE 0"83 WRITE(lunout,*) "------> Checksum =",Checksum," MUST BE 0" 85 84 !$OMP END MASTER 86 85 ENDIF … … 94 93 !$OMP MASTER 95 94 Checksum=sum(Test_Field1d_glo-tmp1d_glo) 96 WRITE( *,*) "------> Checksum =",Checksum," MUST BE 0"95 WRITE(lunout,*) "------> Checksum =",Checksum," MUST BE 0" 97 96 !$OMP END MASTER 98 97 ENDIF … … 104 103 !$OMP MASTER 105 104 Checksum=sum(Test_Field1d_glo*omp_size*mpi_size-tmp1d_glo) 106 WRITE( *,*) "------> Checksum =",Checksum," MUST BE 0"105 WRITE(lunout,*) "------> Checksum =",Checksum," MUST BE 0" 107 106 !$OMP END MASTER 108 107 ENDIF -
trunk/LMDZ.MARS/libf/phymars/aeropacity.F
r1541 r1543 8 8 & igcm_dust_submicron, rho_dust, rho_ice, 9 9 & nqdust 10 use comgeomphy, only: latitude ! grid point latitudes (rad)10 use geometry_mod, only: latitude ! grid point latitudes (rad) 11 11 use comgeomfi_h, only: sinlat ! sines of grid point latitudes 12 12 #ifdef DUSTSTORM 13 use comgeomphy, only: longitude13 use geometry_mod, only: longitude 14 14 use tracer_mod, only: r3n_q, ref_r0, igcm_dust_number 15 15 #endif -
trunk/LMDZ.MARS/libf/phymars/albedocaps.F90
r1541 r1543 6 6 ! to use the 'getin' routine 7 7 use ioipsl_getincom, only: getin 8 use comgeomphy, only: latitude ! grid point latitudes (rad)8 use geometry_mod, only: latitude ! grid point latitudes (rad) 9 9 use surfdat_h, only: TESicealbedo, TESice_Ncoef, TESice_Scoef, & 10 10 emisice, albedice, watercaptag, albedo_h2o_ice, & … … 87 87 subroutine TES_icecap_albedo(zls,ig,alb,icap) 88 88 89 use comgeomphy, only: latitude, longitude ! in radians89 use geometry_mod, only: latitude, longitude ! in radians 90 90 use surfdat_h, only: albedice, TESice_Ncoef, TESice_Scoef 91 91 use netcdf, only: nf90_open, NF90_NOWRITE, NF90_NOERR, & -
trunk/LMDZ.MARS/libf/phymars/co2snow.F
r1541 r1543 3 3 4 4 use surfdat_h, only: iceradius, dtemisice 5 use comgeomphy, only: latitude ! grid point latitudes (rad)5 use geometry_mod, only: latitude ! grid point latitudes (rad) 6 6 use time_phylmdz_mod, only: daysec 7 7 IMPLICIT NONE -
trunk/LMDZ.MARS/libf/phymars/dyn1d/testphys1d.F
r1541 r1543 3 3 ! to use 'getin' 4 4 USE ioipsl_getincom, only: getin 5 use dimphy, only : init_dimphy 6 use mod_grid_phy_lmdz, only : regular_lonlat 5 7 use infotrac, only: nqtot, tname 6 8 use comsoil_h, only: volcapa, layer, mlayer, inertiedat, nsoilmx … … 12 14 use slope_mod, only: theta_sl, psi_sl 13 15 use phyredem, only: physdem0,physdem1 14 use comgeomphy, only: initcomgeomphy, cell_area16 use geometry_mod, only: init_geometry 15 17 use planete_h, only: year_day, periheli, aphelie, peri_day, 16 18 & obliquit, emin_turb, lmixmin … … 21 23 USE comvert_mod, ONLY: ap,bp,aps,bps,pa,preff,sig 22 24 USE logic_mod, ONLY: hybrid 25 use physics_distribution_mod, only: init_physics_distribution 23 26 use regular_lonlat_mod, only: init_regular_lonlat 27 use mod_interface_dyn_phys, only: init_interface_dyn_phys 24 28 USE phys_state_var_init_mod, ONLY: phys_state_var_init 25 29 IMPLICIT NONE … … 106 110 Logical tracerdyn 107 111 integer :: nq=1 ! number of tracers 108 real :: latitude(1), longitude(1) 112 real :: latitude(1), longitude(1), cell_area(1) 109 113 110 114 character*2 str2 … … 119 123 ! initialize "serial/parallel" related stuff 120 124 ! CALL init_phys_lmdz(iim,jjp1,llm,1,(/(jjm-1)*iim+2/)) 121 CALL init_phys_lmdz(1,1,llm,1,(/1/))122 call initcomgeomphy125 ! CALL init_phys_lmdz(1,1,llm,1,(/1/)) 126 ! call initcomgeomphy 123 127 124 128 c ------------------------------------------------------ … … 483 487 !Mars possible matter with dtphys in input and include!!! 484 488 ! Initializations below should mimick what is done in iniphysiq for 3D GCM 489 call init_physics_distribution(regular_lonlat,4, 490 & 1,1,1,nlayer,1) 491 call init_interface_dyn_phys 485 492 call init_regular_lonlat(1,1,longitude,latitude, 486 493 & (/0.,0./),(/0.,0./)) 494 call init_geometry(1,longitude,latitude, 495 & (/0.,0.,0.,0./),(/0.,0.,0.,0./), 496 & cell_area) 497 call init_dimphy(1,nlayer) ! Initialize dimphy module 487 498 call phys_state_var_init(1,llm,nq, 488 499 . day0,time,daysec,dtphys,rad,g,r,cpp) -
trunk/LMDZ.MARS/libf/phymars/eofdump_mod.F90
r1541 r1543 80 80 subroutine ineofdump(ngrid,nlayer) 81 81 82 use comgeomphy, only: longitude, latitude82 use geometry_mod, only: longitude, latitude 83 83 use comcstfi_h, only: pi 84 84 use time_phylmdz_mod, only: daysec, dtphys -
trunk/LMDZ.MARS/libf/phymars/getslopes.F90
r1541 r1543 1 1 subroutine getslopes(ngrid,geopot) 2 2 3 use comgeomphy, only: longitude, latitude ! in radians3 use geometry_mod, only: longitude, latitude ! in radians 4 4 use slope_mod, only: theta_sl, psi_sl 5 5 use comcstfi_h, only: g, rad, pi -
trunk/LMDZ.MARS/libf/phymars/newcondens.F
r1541 r1543 8 8 use tracer_mod, only: noms 9 9 use surfdat_h, only: emissiv, phisfi 10 use comgeomphy, only: latitude ! grid point latitudes (rad)10 use geometry_mod, only: latitude ! grid point latitudes (rad) 11 11 use planete_h 12 12 USE comcstfi_h -
trunk/LMDZ.MARS/libf/phymars/phyredem.F90
r1541 r1543 14 14 use infotrac, only: nqtot, tname 15 15 use comsoil_h, only: inertiedat, volcapa, mlayer 16 use comgeomphy, only: cell_area16 use geometry_mod, only: cell_area 17 17 use surfdat_h, only: zmea, zstd, zsig, zgam, zthe, & 18 18 z0_default, albedice, emisice, emissiv, & -
trunk/LMDZ.MARS/libf/phymars/physiq.F
r1541 r1543 15 15 use comsoil_h, only: inertiedat, ! soil thermal inertia 16 16 & tsoil, nsoilmx ! number of subsurface layers 17 use comgeomphy, only: longitude, latitude, cell_area17 use geometry_mod, only: longitude, latitude, cell_area 18 18 use comgeomfi_h, only: sinlon, coslon, sinlat, coslat 19 19 use surfdat_h, only: phisfi, albedodat, zmea, zstd, zsig, zgam, -
trunk/LMDZ.MARS/libf/phymars/read_dust_scenario.F90
r1541 r1543 4 4 5 5 use netcdf 6 use comgeomphy, only: latitude, longitude ! in radians6 use geometry_mod, only: latitude, longitude ! in radians 7 7 implicit none 8 8 -
trunk/LMDZ.MARS/libf/phymars/surfini.F
r1541 r1543 4 4 use netcdf 5 5 use tracer_mod, only: nqmx, noms 6 use comgeomphy, only: longitude, latitude ! in radians6 use geometry_mod, only: longitude, latitude ! in radians 7 7 use surfdat_h, only: watercaptag, frost_albedo_threshold, 8 8 & albedo_h2o_ice, inert_h2o_ice, albedodat, -
trunk/LMDZ.MARS/libf/phymars/tabfi.F
r1524 r1543 56 56 implicit none 57 57 58 #include "netcdf.inc"58 include "netcdf.inc" 59 59 60 60 c----------------------------------------------------------------------- … … 148 148 c Read 'controle' array 149 149 c 150 ! ierr = NF_INQ_VARID (nid, "controle", nvarid)151 ! IF (ierr .NE. NF_NOERR) THEN152 ! PRINT*, "Tabfi: Could not find <controle> data"153 ! CALL abort154 ! ENDIF155 !#ifdef NC_DOUBLE156 ! ierr = NF_GET_VAR_DOUBLE(nid, nvarid, tab_cntrl)157 !#else158 ! ierr = NF_GET_VAR_REAL(nid, nvarid, tab_cntrl)159 !#endif160 ! IF (ierr .NE. NF_NOERR) THEN161 ! PRINT*, "Tabfi: Failed reading <controle> array"162 ! CALL abort163 ! ENDIF164 165 150 call get_var("controle",tab_cntrl,found) 166 151 if (.not.found) then … … 173 158 c Initialization of some physical constants 174 159 c informations on physics grid 175 ! if(ngridmx.ne.tab_cntrl(tab0+1)) then176 ! print*,'tabfi: WARNING !!! tab_cntrl(tab0+1).ne.ngridmx'177 ! print*,tab_cntrl(tab0+1),ngridmx178 ! endif179 160 lmax = nint(tab_cntrl(tab0+2)) 180 161 day_ini = tab_cntrl(tab0+3) -
trunk/LMDZ.MARS/libf/phymars/writediagfi.F
r1541 r1543 40 40 !================================================================= 41 41 use surfdat_h, only: phisfi 42 use comgeomphy, only: cell_area42 use geometry_mod, only: cell_area 43 43 use time_phylmdz_mod, only: ecritphy, day_step, iphysiq, day_ini 44 44 USE mod_phys_lmdz_para, only : is_parallel, is_mpi_root, -
trunk/LMDZ.MARS/libf/phymars/writediagsoil.F90
r1541 r1543 13 13 14 14 use comsoil_h, only: nsoilmx, inertiedat 15 use comgeomphy, only: cell_area15 use geometry_mod, only: cell_area 16 16 use time_phylmdz_mod, only: ecritphy, day_step, iphysiq 17 17 use mod_phys_lmdz_para, only : is_mpi_root, is_master, gather -
trunk/LMDZ.TITAN/libf/phytitan/calchim.F
r1530 r1543 17 17 use common_mod, only:utilaer,maer,prodaer,csn,csh,psurfhaze, 18 18 . NLEV,NLRT,NC,ND,NR 19 USE comgeomphy, only: rlatd20 19 USE moyzon_mod, only: tmoy,playmoy,zlaymoy,zlevmoy,klat 21 20 use mod_grid_phy_lmdz, only: nbp_lat -
trunk/LMDZ.TITAN/libf/phytitan/muphys3D.F
r1056 r1543 65 65 use dimphy 66 66 c use radcommon_h, only : volume,rayon,vrat,drayon,dvolume 67 USE comgeomphy, only: rlatd67 USE geometry_mod, only: latitude 68 68 69 69 IMPLICIT NONE … … 235 235 c*************************************************************** 236 236 if((IHOR.eq.1) 237 & .or.( rlatd(IHOR).ne.rlatd(im1))237 & .or.(latitude(IHOR).ne.latitude(im1)) 238 238 & .or.(microfi.eq.2)) then 239 239 c*************************************************************** -
trunk/LMDZ.TITAN/libf/phytitan/pg2.old
r175 r1543 42 42 43 43 use dimphy 44 USE comgeomphy44 USE geometry_mod, ONLY: latitude 45 45 #include "dimensions.h" 46 46 #include "microtab.h" … … 180 180 lati(1) = 0.5*RPI 181 181 DO ig=2,ngrid-1 182 lati(ig) = rlatd(2+(ig-2)*iim)*RPI/180.182 lati(ig) = latitude(2+(ig-2)*iim)*RPI/180. 183 183 ENDDO 184 184 lati(ngrid) = -0.5*RPI … … 197 197 c print*,"ENTREE PG2 PREMIER APPEL" 198 198 c print*,airetot,' airetot?= ',4.*RPI*RA*RA 199 c print*,1, rlatd(1),aire(1),aire(1)/airetot,' aires'199 c print*,1,latitude(1),aire(1),aire(1)/airetot,' aires' 200 200 c DO ig=2,ngrid-1 201 c print*,ig, rlatd(2+(ig-2)*iim),aire(ig),aire(ig)/airetot,' aires'201 c print*,ig,latitude(2+(ig-2)*iim),aire(ig),aire(ig)/airetot,' aires' 202 202 c ENDDO 203 c print*,ngrid, rlatd(klon),aire(ngrid),aire(ngrid)/airetot,' aires'203 c print*,ngrid,latitude(klon),aire(ngrid),aire(ngrid)/airetot,' aires' 204 204 c stop 205 205 -
trunk/LMDZ.TITAN/libf/phytitan/phyetat0.F90
r1530 r1543 13 13 USE iostart 14 14 USE infotrac 15 USE comgeomphy, only: rlatd,rlond15 USE geometry_mod, only: latitude,longitude 16 16 USE time_phylmdz_mod, only: itau_phy, raz_date 17 17 … … 76 76 77 77 ! read latitudes 78 call get_field("latitude", rlatd,found)78 call get_field("latitude",latitude,found) 79 79 IF (.not.found) THEN 80 80 PRINT*, 'phyetat0: Le champ <latitude> est absent' … … 83 83 84 84 ! read longitudes 85 call get_field("longitude", rlond,found)85 call get_field("longitude",longitude,found) 86 86 IF (.not.found) THEN 87 87 PRINT*, 'phyetat0: Le champ <longitude> est absent' … … 319 319 resch4(1) = 0. ! pole nord = 1 point 320 320 DO i=2,klon 321 if (( rlatd(i).ge.75..and.rlatd(i).le.85.).or. &322 ( rlatd(i).ge.-85.and.rlatd(i).le.-75.)) then321 if ((latitude(i).ge.75..and.latitude(i).le.85.).or. & 322 (latitude(i).ge.-85.and.latitude(i).le.-75.)) then 323 323 resch4(i) = 2. 324 324 else … … 352 352 353 353 ! do some more initializations 354 call init_iophy_new( rlatd,rlond)354 call init_iophy_new(latitude,longitude) 355 355 356 356 end subroutine phyetat0 -
trunk/LMDZ.TITAN/libf/phytitan/phyredem.F90
r1530 r1543 12 12 put_var,put_field 13 13 USE infotrac 14 USE comgeomphy, only: rlatd,rlond14 USE geometry_mod, only: longitude, latitude 15 15 USE time_phylmdz_mod, only: day_end, annee_ref, itau_phy, raz_date 16 16 … … 64 64 65 65 CALL put_field("longitude", & 66 "Longitudes de la grille physique", rlond)66 "Longitudes de la grille physique",longitude) 67 67 68 CALL put_field("latitude","Latitudes de la grille physique",rlatd) 68 CALL put_field("latitude", & 69 "Latitudes de la grille physique",latitude) 69 70 70 71 ! variables -
trunk/LMDZ.TITAN/libf/phytitan/physiq.F
r1530 r1543 58 58 USE infotrac 59 59 use dimphy 60 USE comgeomphy60 USE geometry_mod, ONLY: longitude, latitude, cell_area, dx, dy 61 61 use cpdet_mod, only: cpdet, t2tpot 62 62 USE mod_phys_lmdz_para, only : is_parallel,jj_nb … … 678 678 IF (if_ebil.ge.1) THEN 679 679 ztit='after dynamic' 680 CALL diagetpq( airephy,ztit,ip_ebil,1,1,dtime680 CALL diagetpq(cell_area,ztit,ip_ebil,1,1,dtime 681 681 e , t_seri,zero_v2,zero_v2,zero_v2,u_seri,v_seri,paprs,pplay 682 682 s , d_h_vcol, d_qt, d_qw, d_ql, d_qs, d_ec) … … 685 685 C est egale a la variation de la physique au pas de temps precedent. 686 686 C Donc la somme de ces 2 variations devrait etre nulle. 687 call diagphy( airephy,ztit,ip_ebil687 call diagphy(cell_area,ztit,ip_ebil 688 688 e , zero_v, zero_v, zero_v, zero_v, zero_v 689 689 e , zero_v, zero_v, zero_v, ztsol … … 726 726 DO k = 1, klev 727 727 DO i = 1, klon 728 omega(i,k) = RG*flxmw(i,k) / airephy(i)728 omega(i,k) = RG*flxmw(i,k) / cell_area(i) 729 729 END DO 730 730 END DO … … 786 786 787 787 DO i=2,klon 788 if ( rlatd(i).ne.rlatd(i-1)) then788 if (latitude(i).ne.latitude(i-1)) then 789 789 DO l=1,klev 790 790 c zzlaybar(i,l)=(zphibar(i,l)+zphisbar(i))/RG … … 861 861 862 862 c dans zenang, Ls en degres ; dans mucorr, Ls en radians 863 call mucorr(klon,zls, rlatd,rmu0bar,fractbar)863 call mucorr(klon,zls,latitude,rmu0bar,fractbar) 864 864 IF (cycle_diurne) THEN 865 865 zdtime=dtime*REAL(radpas) ! pas de temps du rayonnement (s) 866 CALL zenang(zlsdeg,gmtime,zdtime, rlatd,rlond,rmu0,fract)866 CALL zenang(zlsdeg,gmtime,zdtime,latitude,longitude,rmu0,fract) 867 867 ELSE 868 868 rmu0 = rmu0bar … … 917 917 $ paprs,pplay,ppk,radsol,falbe, 918 918 e solsw, sollw, sollwdown, fder, 919 e rlond, rlatd, cuphy, cvphy,919 e longitude, latitude, dx, dy, 920 920 e debut, lafin, 921 921 s d_t_vdf,d_u_vdf,d_v_vdf,d_ts, … … 966 966 IF (if_ebil.ge.2) THEN 967 967 ztit='after clmain' 968 CALL diagetpq( airephy,ztit,ip_ebil,2,1,dtime968 CALL diagetpq(cell_area,ztit,ip_ebil,2,1,dtime 969 969 e , t_seri,zero_v2,zero_v2,zero_v2,u_seri,v_seri,paprs,pplay 970 970 s , d_h_vcol, d_qt, d_qw, d_ql, d_qs, d_ec) 971 call diagphy( airephy,ztit,ip_ebil971 call diagphy(cell_area,ztit,ip_ebil 972 972 e , zero_v, zero_v, zero_v, zero_v, sens 973 973 e , zero_v, zero_v, zero_v, ztsol … … 1055 1055 IF (if_ebil.ge.2) THEN 1056 1056 ztit='after dry_adjust' 1057 CALL diagetpq( airephy,ztit,ip_ebil,2,2,dtime1057 CALL diagetpq(cell_area,ztit,ip_ebil,2,2,dtime 1058 1058 e , t_seri,zero_v2,zero_v2,zero_v2,u_seri,v_seri,paprs,pplay 1059 1059 s , d_h_vcol, d_qt, d_qw, d_ql, d_qs, d_ec) 1060 call diagphy( airephy,ztit,ip_ebil1060 call diagphy(cell_area,ztit,ip_ebil 1061 1061 e , zero_v, zero_v, zero_v, zero_v, sens 1062 1062 e , zero_v, zero_v, zero_v, ztsol … … 1270 1270 IF (if_ebil.ge.2) THEN 1271 1271 ztit='after rad' 1272 CALL diagetpq( airephy,ztit,ip_ebil,2,2,dtime1272 CALL diagetpq(cell_area,ztit,ip_ebil,2,2,dtime 1273 1273 e , t_seri,zero_v2,zero_v2,zero_v2,u_seri,v_seri,paprs,pplay 1274 1274 s , d_h_vcol, d_qt, d_qw, d_ql, d_qs, d_ec) 1275 call diagphy( airephy,ztit,ip_ebil1275 call diagphy(cell_area,ztit,ip_ebil 1276 1276 e , topsw, toplw, solsw, sollw, zero_v 1277 1277 e , zero_v, zero_v, zero_v, ztsol … … 1368 1368 c A ADAPTER POUR VENUS ET TITAN!!! 1369 1369 c CALL lift_noro(klon,klev,dtime,paprs,pplay, 1370 c e rlatd,zmea,zstd,zpic,zgam,zthe,zpic,zval,1370 c e latitude,zmea,zstd,zpic,zgam,zthe,zpic,zval, 1371 1371 c e igwd,idx,itest, 1372 1372 c e t_seri, u_seri, v_seri, … … 1425 1425 c==================================================================== 1426 1426 if (ballons.eq.1) then 1427 CALL ballon(30,pdtphys,rjourvrai,gmtime, rlatd,rlond,1427 CALL ballon(30,pdtphys,rjourvrai,gmtime,latitude,longitude, 1428 1428 c C t,pplay,u,v,pphi) ! alt above surface (smoothed for GCM) 1429 1429 C t,pplay,u,v,zphi) ! alt above planet average radius … … 1459 1459 CALL aaam_bud (27,klon,klev,rjourvrai,gmtime, 1460 1460 C ra,rg,romega, 1461 C rlatd,rlond,pphis,1461 C latitude,longitude,pphis, 1462 1462 C zustrdr,zustrli,zustrcl, 1463 1463 C zvstrdr,zvstrli,zvstrcl, … … 1499 1499 IF (if_ebil.ge.1) THEN 1500 1500 ztit='after physic' 1501 CALL diagetpq( airephy,ztit,ip_ebil,1,1,dtime1501 CALL diagetpq(cell_area,ztit,ip_ebil,1,1,dtime 1502 1502 e , t_seri,zero_v2,zero_v2,zero_v2,u_seri,v_seri,paprs,pplay 1503 1503 s , d_h_vcol, d_qt, d_qw, d_ql, d_qs, d_ec) … … 1506 1506 C est egale a la variation de la physique au pas de temps precedent. 1507 1507 C Donc la somme de ces 2 variations devrait etre nulle. 1508 call diagphy( airephy,ztit,ip_ebil1508 call diagphy(cell_area,ztit,ip_ebil 1509 1509 e , topsw, toplw, solsw, sollw, sens 1510 1510 e , zero_v, zero_v, zero_v, ztsol … … 1545 1545 c DO k = 1, klev 1546 1546 c DO i = 1, klon 1547 c mang(i,k) = RA*cos( rlatd(i)*RPI/180.)1548 c . *(u_seri(i,k)+RA*cos( rlatd(i)*RPI/180.)*ROMEGA)1549 c . * airephy(i)*(paprs(i,k)-paprs(i,k+1))/RG1547 c mang(i,k) = RA*cos(latitude(i)*RPI/180.) 1548 c . *(u_seri(i,k)+RA*cos(latitude(i)*RPI/180.)*ROMEGA) 1549 c . *cell_area(i)*(paprs(i,k)-paprs(i,k+1))/RG 1550 1550 c mangtot=mangtot+mang(i,k) 1551 1551 c ENDDO -
trunk/LMDZ.TITAN/libf/phytitan/radlwsw.F
r1530 r1543 35 35 c====================================================================== 36 36 use dimphy 37 USE comgeomphy38 37 USE phys_state_var_mod, only: falbe,heat,cool,radsol, 39 38 . topsw,toplw,solsw,sollw,sollwdown,lwnet,swnet, -
trunk/LMDZ.TITAN/libf/phytitan/radtitan.F
r1461 r1543 33 33 USE infotrac 34 34 use dimphy 35 USE comgeomphy35 USE geometry_mod, ONLY: latitude 36 36 USE optcld, only : iniqcld 37 37 use moyzon_mod, only:plevmoy … … 214 214 somcoslat=0. 215 215 do j=1,klon 216 coslat(j) = cos( rlatd(j)*RPI/180.)216 coslat(j) = cos(latitude(j)*RPI/180.) 217 217 somcoslat=somcoslat+coslat(j) 218 218 enddo -
trunk/LMDZ.TITAN/libf/phytitan/write_histday.h
r1056 r1543 14 14 15 15 call histwrite_phy(nid_day,.false.,"phis",itau_w,pphis) 16 call histwrite_phy(nid_day,.false.,"aire",itau_w, airephy)16 call histwrite_phy(nid_day,.false.,"aire",itau_w,cell_area) 17 17 18 18 ccccccc axe Ls ... Faudrait le reduire a axe temporel seulement... -
trunk/LMDZ.TITAN/libf/phytitan/write_histins.h
r1056 r1543 14 14 15 15 call histwrite_phy(nid_ins,.false.,"phis",itau_w,pphis) 16 call histwrite_phy(nid_ins,.false.,"aire",itau_w, airephy)16 call histwrite_phy(nid_ins,.false.,"aire",itau_w,cell_area) 17 17 18 18 ccccccc axe Ls ... Faudrait le reduire a axe temporel seulement... -
trunk/LMDZ.TITAN/libf/phytitan/write_histmth.h
r1356 r1543 11 11 12 12 call histwrite_phy(nid_mth,.false.,"phis",itau_w,pphis) 13 call histwrite_phy(nid_mth,.false.,"aire",itau_w, airephy)13 call histwrite_phy(nid_mth,.false.,"aire",itau_w,cell_area) 14 14 15 15 ccccccc axe Ls ... Faudrait le reduire a axe temporel seulement... -
trunk/LMDZ.VENUS/libf/phyvenus/clmain.F
r1530 r1543 11 11 . paprs,pplay,ppk,radsol,albe, 12 12 . solsw, sollw, sollwdown, fder, 13 . rlon, rlat, cufi, cvfi,13 . rlon, rlat, dx, dy, 14 14 . debut, lafin, 15 15 . d_t,d_u,d_v,d_ts, … … 52 52 c radsol---input-R- flux radiatif net (positif vers le sol) en W/m**2 53 53 c rlat-----input-R- latitude en degree 54 c cufi-----input-R- resolution des mailles en x (m)55 c cvfi-----input-R- resolution des mailles en y (m)54 c dx-----input-R- resolution des mailles en x (m) 55 c dy-----input-R- resolution des mailles en y (m) 56 56 c 57 57 c d_t------output-R- le changement pour "t" … … 80 80 ! ADAPTATION GCM POUR CP(T) 81 81 real ppk(klon,klev) 82 REAL rlon(klon), rlat(klon), cufi(klon), cvfi(klon)82 REAL rlon(klon), rlat(klon), dx(klon), dy(klon) 83 83 REAL d_t(klon, klev) 84 84 REAL d_u(klon, klev), d_v(klon, klev) … … 398 398 ! ADAPTATION GCM POUR CP(T) 399 399 CALL clqh(dtime, itap, debut,lafin, 400 e rlon, rlat, cufi, cvfi,400 e rlon, rlat, dx, dy, 401 401 e knon, 402 402 e soil_model, ytsoil, … … 465 465 466 466 SUBROUTINE clqh(dtime,itime, debut,lafin, 467 e rlon, rlat, cufi, cvfi,467 e rlon, rlat, dx, dy, 468 468 e knon, 469 469 $ soil_model,tsoil, … … 510 510 REAL albedo(klon) ! albedo de la surface 511 511 real rmu0(klon) ! cosinus de l'angle solaire zenithal 512 real rlon(klon), rlat(klon), cufi(klon), cvfi(klon)512 real rlon(klon), rlat(klon), dx(klon), dy(klon) 513 513 c 514 514 REAL d_t(klon,klev) ! incrementation de "t" … … 686 686 CALL interfsurf_hq(itime, dtime, rmu0, 687 687 e klon, nbp_lon, nbp_lat-1, knon, 688 e rlon, rlat, cufi, cvfi,688 e rlon, rlat, dx, dy, 689 689 e debut, lafin, soil_model, nsoilmx,tsoil, 690 690 e zlev1, u1lay, v1lay, temp_air, epot_air, -
trunk/LMDZ.VENUS/libf/phyvenus/dyn1d/rcm1d.F
r1525 r1543 3 3 USE infotrac 4 4 use control_mod, only: planet_type, day_step 5 use comgeomphy6 5 USE phys_state_var_mod 7 6 use chemparam_mod … … 14 13 use conc, only: rho 15 14 USE iniphysiq_mod, ONLY: iniphysiq 16 15 USE mod_const_mpi, ONLY: comm_lmdz 17 16 IMPLICIT NONE 18 17 … … 199 198 c --------------------------------- 200 199 201 CALL init_phys_lmdz(iim,jjm,llm,1,(/1/))200 ! CALL init_phys_lmdz(iim,jjm,llm,1,(/1/)) 202 201 203 202 c la surface de chaque maille est inutile en 1D ---> … … 210 209 c e.g. for cell boundaries, which are meaningless in 1D; so pad these 211 210 c with '0.' when necessary 212 CALL iniphysiq(1,1,llm,daysec,day0,dtphys, 211 CALL iniphysiq(1,1,llm, 212 & 1,comm_lmdz, 213 & daysec,day0,dtphys, 213 214 & (/lati(1),0./),(/0./), 214 215 & (/0.,0./),(/long(1),0./), -
trunk/LMDZ.VENUS/libf/phyvenus/new_cloud_sedim.F
r1530 r1543 7 7 USE infotrac 8 8 USE dimphy 9 USE comgeomphy10 9 USE chemparam_mod 11 10 IMPLICIT NONE -
trunk/LMDZ.VENUS/libf/phyvenus/nirco2abs.F
r1530 r1543 3 3 4 4 use dimphy 5 use comgeomphy, only: rlatd, rlond5 use geometry_mod, only: longitude, latitude 6 6 use chemparam_mod, only: i_co2, i_o 7 7 c use compo_hedin83_mod2 … … 236 236 zday_int = (n-1)/float(nstep) 237 237 238 CALL zenang(0.,zday_int,RDAY/nstep, rlatd,rlond,238 CALL zenang(0.,zday_int,RDAY/nstep,latitude,longitude, 239 239 s mu0_int,fract_int) 240 240 -
trunk/LMDZ.VENUS/libf/phyvenus/phyetat0.F90
r1530 r1543 13 13 USE iostart 14 14 USE infotrac 15 USE comgeomphy, only: rlatd,rlond15 use geometry_mod, only: longitude, latitude 16 16 USE time_phylmdz_mod, only: itau_phy, raz_date 17 17 … … 65 65 66 66 ! read latitudes 67 call get_field("latitude", rlatd,found)67 call get_field("latitude",latitude,found) 68 68 IF (.not.found) THEN 69 69 PRINT*, 'phyetat0: Le champ <latitude> est absent' … … 72 72 73 73 ! read longitudes 74 call get_field("longitude", rlond,found)74 call get_field("longitude",longitude,found) 75 75 IF (.not.found) THEN 76 76 PRINT*, 'phyetat0: Le champ <longitude> est absent' … … 344 344 345 345 ! do some more initializations 346 call init_iophy_new( rlatd,rlond)346 call init_iophy_new(latitude,longitude) 347 347 348 348 end subroutine phyetat0 -
trunk/LMDZ.VENUS/libf/phyvenus/phyredem.F90
r1530 r1543 12 12 put_var,put_field 13 13 USE infotrac 14 USE comgeomphy, only: rlatd,rlond14 use geometry_mod, only: longitude, latitude 15 15 USE time_phylmdz_mod, only: day_end, annee_ref, itau_phy, raz_date 16 16 … … 62 62 63 63 CALL put_field("longitude", & 64 "Longitudes de la grille physique", rlond)64 "Longitudes de la grille physique",longitude) 65 65 66 CALL put_field("latitude","Latitudes de la grille physique",rlatd) 66 CALL put_field("latitude", & 67 "Latitudes de la grille physique",latitude) 67 68 68 69 ! variables -
trunk/LMDZ.VENUS/libf/phyvenus/physiq.F
r1530 r1543 58 58 USE infotrac 59 59 use dimphy 60 USE comgeomphy60 USE geometry_mod, only: longitude, latitude, cell_area, dx, dy 61 61 USE mod_phys_lmdz_para, only : is_parallel,jj_nb 62 62 USE phys_state_var_mod ! Variables sauvegardees de la physique … … 665 665 DO k = 1, klev 666 666 DO i = 1, klon 667 ilat=( rlatd(i)/5.625) + 17.667 ilat=(latitude(i)/5.625) + 17. 668 668 delta_temp(i,k)=mat_dtemp(INT(ilat),k) 669 669 ENDDO … … 719 719 IF (if_ebil.ge.1) THEN 720 720 ztit='after dynamic' 721 CALL diagetpq( airephy,ztit,ip_ebil,1,1,dtime721 CALL diagetpq(cell_area,ztit,ip_ebil,1,1,dtime 722 722 e , t_seri,zero_v2,zero_v2,zero_v2,u_seri,v_seri,paprs,pplay 723 723 s , d_h_vcol, d_qt, d_qw, d_ql, d_qs, d_ec) … … 726 726 C est egale a la variation de la physique au pas de temps precedent. 727 727 C Donc la somme de ces 2 variations devrait etre nulle. 728 call diagphy( airephy,ztit,ip_ebil728 call diagphy(cell_area,ztit,ip_ebil 729 729 e , zero_v, zero_v, zero_v, zero_v, zero_v 730 730 e , zero_v, zero_v, zero_v, ztsol … … 767 767 DO k = 1, klev 768 768 DO i = 1, klon 769 omega(i,k) = RG*flxmw(i,k) / airephy(i)769 omega(i,k) = RG*flxmw(i,k) / cell_area(i) 770 770 END DO 771 771 END DO … … 832 832 IF (cycle_diurne) THEN 833 833 zdtime=dtime*REAL(radpas) ! pas de temps du rayonnement (s) 834 CALL zenang(zlongi,gmtime,zdtime, rlatd,rlond,rmu0,fract)834 CALL zenang(zlongi,gmtime,zdtime,latitude,longitude,rmu0,fract) 835 835 ELSE 836 call mucorr(klon,zlongi, rlatd,rmu0,fract)836 call mucorr(klon,zlongi,latitude,rmu0,fract) 837 837 ENDIF 838 838 … … 859 859 I debut,lafin,nqmax, 860 860 I nlon,nlev,dtime,paprs, 861 I rlatd,rlond,861 I latitude,longitude, 862 862 O tr_seri) 863 863 … … 869 869 DO k = 1, klev 870 870 DO i = 1, klon 871 ilat=( rlatd(i)/5.625) + 17.872 ! PRINT*,INT(ilat), rlatd(i),mat_dtemp(INT(ilat),k)871 ilat=(latitude(i)/5.625) + 17. 872 ! PRINT*,INT(ilat),latitude(i),mat_dtemp(INT(ilat),k) 873 873 delta_temp(i,k)=mat_dtemp(INT(ilat),k) 874 874 ENDDO … … 888 888 I nqmax, 889 889 I klon, 890 I rlatd,891 I rlond,890 I latitude, 891 I longitude, 892 892 I nlev, 893 893 I dtime, … … 902 902 I nqmax, 903 903 I klon, 904 I rlatd,905 I rlond,904 I latitude, 905 I longitude, 906 906 I nlev, 907 907 I dtime, … … 926 926 CALL new_cloud_sedim( 927 927 I klon, 928 I 929 I 928 I nlev, 929 I dtime, 930 930 I pplay, 931 I 932 I 931 I paprs, 932 I t_seri+delta_temp, 933 933 I tr_seri, 934 O 935 O 936 I 934 O d_tr_sed, 935 O d_tr_ssed, 936 I nqmax, 937 937 O Fsedim) 938 938 else … … 940 940 CALL new_cloud_sedim( 941 941 I klon, 942 I 943 I 942 I nlev, 943 I dtime, 944 944 I pplay, 945 I 946 I 945 I paprs, 946 I t_seri, 947 947 I tr_seri, 948 O 949 O 950 I 948 O d_tr_sed, 949 O d_tr_ssed, 950 I nqmax, 951 951 O Fsedim) 952 952 … … 1032 1032 $ paprs,pplay,ppk,radsol,falbe, 1033 1033 e solsw, sollw, sollwdown, fder, 1034 e rlond, rlatd, cuphy, cvphy,1034 e longitude, latitude, dx, dy, 1035 1035 e debut, lafin, 1036 1036 s d_t_vdf,d_u_vdf,d_v_vdf,d_ts, … … 1082 1082 IF (if_ebil.ge.2) THEN 1083 1083 ztit='after clmain' 1084 CALL diagetpq( airephy,ztit,ip_ebil,2,1,dtime1084 CALL diagetpq(cell_area,ztit,ip_ebil,2,1,dtime 1085 1085 e , t_seri,zero_v2,zero_v2,zero_v2,u_seri,v_seri,paprs,pplay 1086 1086 s , d_h_vcol, d_qt, d_qw, d_ql, d_qs, d_ec) 1087 call diagphy( airephy,ztit,ip_ebil1087 call diagphy(cell_area,ztit,ip_ebil 1088 1088 e , zero_v, zero_v, zero_v, zero_v, sens 1089 1089 e , zero_v, zero_v, zero_v, ztsol … … 1175 1175 IF (if_ebil.ge.2) THEN 1176 1176 ztit='after dry_adjust' 1177 CALL diagetpq( airephy,ztit,ip_ebil,2,2,dtime1177 CALL diagetpq(cell_area,ztit,ip_ebil,2,2,dtime 1178 1178 e , t_seri,zero_v2,zero_v2,zero_v2,u_seri,v_seri,paprs,pplay 1179 1179 s , d_h_vcol, d_qt, d_qw, d_ql, d_qs, d_ec) 1180 call diagphy( airephy,ztit,ip_ebil1180 call diagphy(cell_area,ztit,ip_ebil 1181 1181 e , zero_v, zero_v, zero_v, zero_v, sens 1182 1182 e , zero_v, zero_v, zero_v, ztsol … … 1215 1215 IF(callnlte.or.callthermos) THEN 1216 1216 call compo_hedin83_mod(pplay,rmu0, 1217 & 1217 & co2vmr_gcm,covmr_gcm,ovmr_gcm,n2vmr_gcm,nvmr_gcm) 1218 1218 1219 1219 IF(ok_chem) then … … 1234 1234 ENDIF 1235 1235 1236 ENDIF 1236 ENDIF 1237 1237 1238 1238 c … … 1261 1261 d_t_nlte(:,:)=0. 1262 1262 1263 ENDIF 1263 ENDIF 1264 1264 1265 1265 c Find number of layers for LTE radiation calculations … … 1293 1293 & cool, d_t_nirco2,d_t_nlte, dtsw, dtlw) 1294 1294 ELSE 1295 1296 1297 1295 dtsw(:,:)=heat(:,:) 1296 dtlw(:,:)=-1*cool(:,:) 1297 ENDIF 1298 1298 1299 1299 DO k=1,klev … … 1319 1319 $ rmu0,pdtphys,gmtime,rjourvrai, 1320 1320 $ tr_seri, d_tr, d_t_euv ) 1321 1321 1322 1322 DO k=1,klev 1323 1323 DO ig=1,klon … … 1405 1405 IF (if_ebil.ge.2) THEN 1406 1406 ztit='after rad' 1407 CALL diagetpq( airephy,ztit,ip_ebil,2,2,dtime1407 CALL diagetpq(cell_area,ztit,ip_ebil,2,2,dtime 1408 1408 e , t_seri,zero_v2,zero_v2,zero_v2,u_seri,v_seri,paprs,pplay 1409 1409 s , d_h_vcol, d_qt, d_qw, d_ql, d_qs, d_ec) 1410 call diagphy( airephy,ztit,ip_ebil1410 call diagphy(cell_area,ztit,ip_ebil 1411 1411 e , topsw, toplw, solsw, sollw, zero_v 1412 1412 e , zero_v, zero_v, zero_v, ztsol … … 1424 1424 do i=1,klon 1425 1425 do k=2,klev 1426 1427 1428 1426 ztlev(i,k) = (t_seri(i,k)+t_seri(i,k-1))/2. 1427 zpklev(i,k) = sqrt(ppk(i,k)*ppk(i,k-1)) 1428 enddo 1429 1429 enddo 1430 1430 call t2tpot(klon*klev,ztlev, ztetalev,zpklev) … … 1432 1432 do i=1,klon 1433 1433 do k=2,klev 1434 1435 1434 zdtetalev(i,k) = ztetalay(i,k)-ztetalay(i,k-1) 1435 zdzlev(i,k) = (zphi(i,k)-zphi(i,k-1))/RG 1436 1436 zn2(i,k) = RG*zdtetalev(i,k)/(ztetalev(i,k)*zdzlev(i,k)) 1437 1437 zn2(i,k) = max(zn2(i,k),1.e-12) ! securite 1438 1438 enddo 1439 1439 zn2(i,1) = 1.e-12 ! securite 1440 1440 enddo … … 1479 1479 d_u_oro = 0. 1480 1480 d_v_oro = 0. 1481 1482 1481 zustrdr = 0. 1482 zvstrdr = 0. 1483 1483 c 1484 1484 ENDIF ! fin de test sur ok_orodr … … 1508 1508 c A ADAPTER POUR VENUS!!! 1509 1509 c CALL lift_noro(klon,klev,dtime,paprs,pplay, 1510 c e rlatd,zmea,zstd,zpic,zgam,zthe,zpic,zval,1510 c e latitude,zmea,zstd,zpic,zgam,zthe,zpic,zval, 1511 1511 c e igwd,idx,itest, 1512 1512 c e t_seri, u_seri, v_seri, … … 1567 1567 c==================================================================== 1568 1568 if (ballons.eq.1) then 1569 CALL ballon(30,pdtphys,rjourvrai,gmtime*RDAY,rlatd,rlond,1570 c C 1571 C 1569 CALL ballon(30,pdtphys,rjourvrai,gmtime*RDAY,latitude,longitude, 1570 c C t,pplay,u,v,pphi) ! alt above surface (smoothed for GCM) 1571 C t,pplay,u,v,zphi) ! alt above planet average radius 1572 1572 endif !ballons 1573 1573 … … 1601 1601 CALL aaam_bud (27,klon,klev,rjourvrai,gmtime*RDAY, 1602 1602 C ra,rg,romega, 1603 C rlatd,rlond,pphis,1603 C latitude,longitude,pphis, 1604 1604 C zustrdr,zustrli,zustrcl, 1605 1605 C zvstrdr,zvstrli,zvstrcl, … … 1641 1641 IF (if_ebil.ge.1) THEN 1642 1642 ztit='after physic' 1643 CALL diagetpq( airephy,ztit,ip_ebil,1,1,dtime1643 CALL diagetpq(cell_area,ztit,ip_ebil,1,1,dtime 1644 1644 e , t_seri,zero_v2,zero_v2,zero_v2,u_seri,v_seri,paprs,pplay 1645 1645 s , d_h_vcol, d_qt, d_qw, d_ql, d_qs, d_ec) … … 1648 1648 C est egale a la variation de la physique au pas de temps precedent. 1649 1649 C Donc la somme de ces 2 variations devrait etre nulle. 1650 call diagphy( airephy,ztit,ip_ebil1650 call diagphy(cell_area,ztit,ip_ebil 1651 1651 e , topsw, toplw, solsw, sollw, sens 1652 1652 e , zero_v, zero_v, zero_v, ztsol … … 1687 1687 c DO k = 1, klev 1688 1688 c DO i = 1, klon 1689 c mang(i,k) = RA*cos( rlatd(i)*RPI/180.)1690 c . *(u_seri(i,k)+RA*cos( rlatd(i)*RPI/180.)*ROMEGA)1691 c . * airephy(i)*(paprs(i,k)-paprs(i,k+1))/RG1689 c mang(i,k) = RA*cos(latitude(i)*RPI/180.) 1690 c . *(u_seri(i,k)+RA*cos(latitude(i)*RPI/180.)*ROMEGA) 1691 c . *cell_area(i)*(paprs(i,k)-paprs(i,k+1))/RG 1692 1692 c mangtot=mangtot+mang(i,k) 1693 1693 c ENDDO -
trunk/LMDZ.VENUS/libf/phyvenus/phytrac_emiss.F
r1530 r1543 30 30 USE infotrac 31 31 use dimphy 32 USE comgeomphy32 USE geometry_mod, only: cell_area 33 33 USE chemparam_mod,only:M_tr 34 34 USE mod_grid_phy_lmdz, ONLY: nbp_lon, nbp_lat … … 139 139 & .and.((xlon(i)+deltalon).gt.lon_volcan(ilon)) ) then 140 140 ig_volcan(ilat,ilon)= i 141 area_emiss(ilat,ilon) = airephy(i)141 area_emiss(ilat,ilon) = cell_area(i) 142 142 print*,"Lat,lon=",ilat,ilon," OK" 143 143 end if -
trunk/LMDZ.VENUS/libf/phyvenus/phytrac_relax.F
r1530 r1543 27 27 USE infotrac 28 28 use dimphy 29 USE comgeomphy30 29 USE chemparam_mod,only:M_tr 31 30 IMPLICIT none -
trunk/LMDZ.VENUS/libf/phyvenus/radlwsw.F
r1530 r1543 27 27 c====================================================================== 28 28 use dimphy 29 USE comgeomphy29 USE geometry_mod, ONLY: latitude 30 30 USE phys_state_var_mod, only: heat,cool,radsol, 31 31 . topsw,toplw,solsw,sollw,sollwdown,lwnet,swnet … … 164 164 lat=1 165 165 else 166 if (abs( rlatd(j)).le.50.) then166 if (abs(latitude(j)).le.50.) then 167 167 lat=1 168 elseif (abs( rlatd(j)).le.60.) then168 elseif (abs(latitude(j)).le.60.) then 169 169 lat=2 170 elseif (abs( rlatd(j)).le.70.) then170 elseif (abs(latitude(j)).le.70.) then 171 171 lat=3 172 elseif (abs( rlatd(j)).le.80.) then172 elseif (abs(latitude(j)).le.80.) then 173 173 lat=4 174 174 else -
trunk/LMDZ.VENUS/libf/phyvenus/radlwsw.NewtonCool
r1530 r1543 24 24 c====================================================================== 25 25 use dimphy 26 USE comgeomphy26 USE geometry_mod, ONLY: latitude 27 27 USE phys_state_var_mod, only: heat,cool,radsol, 28 28 . topsw,toplw,solsw,sollw,sollwdown,lwnet,swnet … … 98 98 ztemp = tempCLee(level)*(1-fact)+tempCLee(level+1)*fact 99 99 zdt = dt_epCLee(level)*(1-fact)+dt_epCLee(level+1)*fact 100 c zt_eq(i,k) = ztemp + zdt*(cos( rlatd(i)*RPI/180.)-2./RPI)101 zt_eq(i,k) = ztemp + zdt*(cos( rlatd(i)*RPI/180.)-RPI/4.)100 c zt_eq(i,k) = ztemp + zdt*(cos(latitude(i)*RPI/180.)-2./RPI) 101 zt_eq(i,k) = ztemp + zdt*(cos(latitude(i)*RPI/180.)-RPI/4.) 102 102 103 103 enddo -
trunk/LMDZ.VENUS/libf/phyvenus/write_histday.h
r1310 r1543 12 12 13 13 call histwrite_phy(nid_day,.false.,"phis",itau_w,pphis) 14 call histwrite_phy(nid_day,.false.,"aire",itau_w, airephy)14 call histwrite_phy(nid_day,.false.,"aire",itau_w,cell_area) 15 15 call histwrite_phy(nid_day,.false.,"tsol",itau_w,ftsol) 16 16 call histwrite_phy(nid_day,.false.,"psol",itau_w,paprs(:,1)) -
trunk/LMDZ.VENUS/libf/phyvenus/write_histins.h
r1453 r1543 12 12 13 13 call histwrite_phy(nid_ins,.false.,"phis",itau_w,pphis) 14 call histwrite_phy(nid_ins,.false.,"aire",itau_w, airephy)14 call histwrite_phy(nid_ins,.false.,"aire",itau_w,cell_area) 15 15 call histwrite_phy(nid_ins,.false.,"tsol",itau_w,ftsol) 16 16 call histwrite_phy(nid_ins,.false.,"psol",itau_w,paprs(:,1)) -
trunk/LMDZ.VENUS/libf/phyvenus/write_histmth.h
r1518 r1543 12 12 13 13 call histwrite_phy(nid_mth,.false.,"phis",itau_w,pphis) 14 call histwrite_phy(nid_mth,.false.,"aire",itau_w, airephy)14 call histwrite_phy(nid_mth,.false.,"aire",itau_w,cell_area) 15 15 call histwrite_phy(nid_mth,.false.,"tsol",itau_w,ftsol) 16 16 call histwrite_phy(nid_mth,.false.,"psol",itau_w,paprs(:,1))
Note: See TracChangeset
for help on using the changeset viewer.