source: trunk/LMDZ.MARS/libf/phymars/mod_phys_lmdz_mpi_data.F90 @ 1242

Last change on this file since 1242 was 1130, checked in by emillour, 11 years ago

Mars GCM:
Series of changes to enable running in parallel (using LMDZ.COMMON dynamics);
Current LMDZ.MARS can still notheless be compiled and run in serial mode
"as previously".
Summary of main changes:

  • Main programs (newstart, start2archive, xvik) that used to be in dyn3d have been moved to phymars.
  • dyn3d/control.h is now module control_mod.F90
  • rearanged input/outputs routines everywhere to handle serial/MPI cases. physdem.F => phyredem.F90 , phyetat0.F => phyetat0.F90 ; all read/write routines for startfi files are gathered in module iostart.F90
  • added parallelism related routines init_phys_lmdz.F90, comgeomphy.F90, dimphy.F90, iniphysiq.F90, mod_grid_phy_lmdz.F90, mod_phys_lmdz_mpi_data.F90, mod_phys_lmdz_mpi_transfert.F90, mod_phys_lmdz_omp_data.F90, mod_phys_lmdz_omp_transfert.F90, mod_phys_lmdz_para.F90, mod_phys_lmdz_transfert_para.F90 in phymars and mod_const_mpi.F90 in dyn3d (for compliance with parallel case)
  • created generic routines 'planetwide_maxval' and 'planetwide_minval', in module "planetwide_mod", that enable obtaining the min and max of a field over the whole planet.

EM

File size: 5.7 KB
Line 
1!
2!$Header$
3!
4MODULE mod_phys_lmdz_mpi_data
5  USE mod_const_mpi, only: MPI_REAL_LMDZ, COMM_LMDZ
6 
7  INTEGER,SAVE :: ii_begin
8  INTEGER,SAVE :: ii_end
9  INTEGER,SAVE :: jj_begin
10  INTEGER,SAVE :: jj_end
11  INTEGER,SAVE :: jj_nb
12  INTEGER,SAVE :: ij_begin
13  INTEGER,SAVE :: ij_end
14  INTEGER,SAVE :: ij_nb
15  INTEGER,SAVE :: klon_mpi_begin
16  INTEGER,SAVE :: klon_mpi_end
17  INTEGER,SAVE :: klon_mpi
18 
19  INTEGER,SAVE,ALLOCATABLE,DIMENSION(:) :: jj_para_nb
20  INTEGER,SAVE,ALLOCATABLE,DIMENSION(:) :: jj_para_begin
21  INTEGER,SAVE,ALLOCATABLE,DIMENSION(:) :: jj_para_end
22
23  INTEGER,SAVE,ALLOCATABLE,DIMENSION(:) :: ii_para_begin
24  INTEGER,SAVE,ALLOCATABLE,DIMENSION(:) :: ii_para_end
25
26  INTEGER,SAVE,ALLOCATABLE,DIMENSION(:) :: ij_para_nb
27  INTEGER,SAVE,ALLOCATABLE,DIMENSION(:) :: ij_para_begin
28  INTEGER,SAVE,ALLOCATABLE,DIMENSION(:) :: ij_para_end
29
30  INTEGER,SAVE,ALLOCATABLE,DIMENSION(:) :: klon_mpi_para_nb
31  INTEGER,SAVE,ALLOCATABLE,DIMENSION(:) :: klon_mpi_para_begin
32  INTEGER,SAVE,ALLOCATABLE,DIMENSION(:) :: klon_mpi_para_end
33
34 
35  INTEGER,SAVE :: mpi_rank
36  INTEGER,SAVE :: mpi_size
37  INTEGER,SAVE :: mpi_root
38  LOGICAL,SAVE :: is_mpi_root
39  LOGICAL,SAVE :: is_using_mpi
40 
41 
42  LOGICAL,SAVE :: is_north_pole
43  LOGICAL,SAVE :: is_south_pole
44  INTEGER,SAVE :: COMM_LMDZ_PHY
45
46CONTAINS
47 
48  SUBROUTINE Init_phys_lmdz_mpi_data(iim,jjp1,nb_proc,distrib)
49  USE mod_const_mpi, ONLY : COMM_LMDZ
50  IMPLICIT NONE
51    INTEGER,INTENT(in) :: iim
52    INTEGER,INTENT(in) :: jjp1
53    INTEGER,INTENT(in) :: nb_proc
54    INTEGER,INTENT(in) :: distrib(0:nb_proc-1)
55   
56    INTEGER :: ierr
57    INTEGER :: klon_glo
58    INTEGER :: i
59   
60#ifdef CPP_MPI
61    is_using_mpi=.TRUE.
62#else
63    is_using_mpi=.FALSE.
64#endif
65   
66    if (iim.eq.1) then
67       klon_glo=1
68    else
69       klon_glo=iim*(jjp1-2)+2
70    endif
71   
72    COMM_LMDZ_PHY=COMM_LMDZ
73
74    IF (is_using_mpi) THEN   
75#ifdef CPP_MPI
76      CALL MPI_COMM_SIZE(COMM_LMDZ_PHY,mpi_size,ierr)   
77      CALL MPI_COMM_RANK(COMM_LMDZ_PHY,mpi_rank,ierr)
78#endif
79    ELSE
80      mpi_size=1
81      mpi_rank=0
82    ENDIF
83   
84    IF (mpi_rank == 0) THEN
85      mpi_root = 0
86      is_mpi_root = .true.
87    ENDIF
88   
89    IF (mpi_rank == 0) THEN
90      is_north_pole = .TRUE.
91    ELSE
92      is_north_pole = .FALSE.
93    ENDIF
94   
95    IF (mpi_rank == mpi_size-1) THEN
96      is_south_pole = .TRUE.
97    ELSE
98      is_south_pole = .FALSE.
99    ENDIF
100   
101    ALLOCATE(jj_para_nb(0:mpi_size-1))
102    ALLOCATE(jj_para_begin(0:mpi_size-1))
103    ALLOCATE(jj_para_end(0:mpi_size-1))
104   
105    ALLOCATE(ij_para_nb(0:mpi_size-1))
106    ALLOCATE(ij_para_begin(0:mpi_size-1))
107    ALLOCATE(ij_para_end(0:mpi_size-1))
108   
109    ALLOCATE(ii_para_begin(0:mpi_size-1))
110    ALLOCATE(ii_para_end(0:mpi_size-1))
111
112    ALLOCATE(klon_mpi_para_nb(0:mpi_size-1))
113    ALLOCATE(klon_mpi_para_begin(0:mpi_size-1))
114    ALLOCATE(klon_mpi_para_end(0:mpi_size-1))
115 
116     
117    klon_mpi_para_nb(0:mpi_size-1)=distrib(0:nb_proc-1)
118
119    DO i=0,mpi_size-1
120      IF (i==0) THEN
121        klon_mpi_para_begin(i)=1
122      ELSE
123        klon_mpi_para_begin(i)=klon_mpi_para_end(i-1)+1
124      ENDIF
125        klon_mpi_para_end(i)=klon_mpi_para_begin(i)+klon_mpi_para_nb(i)-1
126    ENDDO
127
128
129    DO i=0,mpi_size-1
130     
131      IF (i==0) THEN
132        ij_para_begin(i) = 1
133      ELSE
134        ij_para_begin(i) = klon_mpi_para_begin(i)+iim-1
135      ENDIF
136
137      jj_para_begin(i) = (ij_para_begin(i)-1)/iim + 1
138      ii_para_begin(i) = MOD(ij_para_begin(i)-1,iim) + 1
139
140     
141      ij_para_end(i) = klon_mpi_para_end(i)+iim-1
142      jj_para_end(i) = (ij_para_end(i)-1)/iim + 1
143      ii_para_end(i) = MOD(ij_para_end(i)-1,iim) + 1
144
145      ! Ehouarn: handle 1D case:
146      if (klon_glo.eq.1) then
147        klon_mpi_para_end(i) = 1
148        klon_mpi_para_nb(i) = 1
149        ij_para_end(i) = 1
150        jj_para_end(i) = 1
151        ii_para_end(i) = 1
152      endif
153
154      ij_para_nb(i) = ij_para_end(i)-ij_para_begin(i)+1
155      jj_para_nb(i) = jj_para_end(i)-jj_para_begin(i)+1
156         
157    ENDDO
158 
159    ii_begin = ii_para_begin(mpi_rank)
160    ii_end   = ii_para_end(mpi_rank)
161    jj_begin = jj_para_begin(mpi_rank)
162    jj_end   = jj_para_end(mpi_rank)
163    jj_nb    = jj_para_nb(mpi_rank)
164    ij_begin = ij_para_begin(mpi_rank)
165    ij_end   = ij_para_end(mpi_rank)
166    ij_nb    = ij_para_nb(mpi_rank)
167    klon_mpi_begin = klon_mpi_para_begin(mpi_rank)
168    klon_mpi_end   = klon_mpi_para_end(mpi_rank)
169    klon_mpi       = klon_mpi_para_nb(mpi_rank)
170   
171    CALL Print_module_data
172   
173  END SUBROUTINE Init_phys_lmdz_mpi_data
174
175  SUBROUTINE print_module_data
176  IMPLICIT NONE
177!  INCLUDE "iniprint.h"
178 
179    WRITE(*,*) 'ii_begin =', ii_begin
180    WRITE(*,*) 'ii_end =', ii_end
181    WRITE(*,*) 'jj_begin =',jj_begin
182    WRITE(*,*) 'jj_end =', jj_end
183    WRITE(*,*) 'jj_nb =', jj_nb
184    WRITE(*,*) 'ij_begin =', ij_begin
185    WRITE(*,*) 'ij_end =', ij_end
186    WRITE(*,*) 'ij_nb =', ij_nb
187    WRITE(*,*) 'klon_mpi_begin =', klon_mpi_begin
188    WRITE(*,*) 'klon_mpi_end =', klon_mpi_end
189    WRITE(*,*) 'klon_mpi =', klon_mpi
190    WRITE(*,*) 'jj_para_nb =', jj_para_nb
191    WRITE(*,*) 'jj_para_begin =', jj_para_begin
192    WRITE(*,*) 'jj_para_end =', jj_para_end
193    WRITE(*,*) 'ii_para_begin =', ii_para_begin
194    WRITE(*,*) 'ii_para_end =', ii_para_end
195    WRITE(*,*) 'ij_para_nb =', ij_para_nb
196    WRITE(*,*) 'ij_para_begin =', ij_para_begin
197    WRITE(*,*) 'ij_para_end =', ij_para_end
198    WRITE(*,*) 'klon_mpi_para_nb =', klon_mpi_para_nb
199    WRITE(*,*) 'klon_mpi_para_begin =', klon_mpi_para_begin
200    WRITE(*,*) 'klon_mpi_para_end  =', klon_mpi_para_end
201    WRITE(*,*) 'mpi_rank =', mpi_rank
202    WRITE(*,*) 'mpi_size =', mpi_size
203    WRITE(*,*) 'mpi_root =', mpi_root
204    WRITE(*,*) 'is_mpi_root =', is_mpi_root
205    WRITE(*,*) 'is_north_pole =', is_north_pole
206    WRITE(*,*) 'is_south_pole =', is_south_pole
207    WRITE(*,*) 'COMM_LMDZ_PHY =', COMM_LMDZ_PHY
208 
209  END SUBROUTINE print_module_data
210 
211END MODULE mod_phys_lmdz_mpi_data
Note: See TracBrowser for help on using the repository browser.