1 | ! $Id$ |
---|
2 | |
---|
3 | MODULE parallel_lmdz |
---|
4 | USE mod_const_mpi |
---|
5 | USE lmdz_mpi, ONLY: using_mpi |
---|
6 | USE IOIPSL |
---|
7 | INTEGER, PARAMETER :: halo_max = 3 |
---|
8 | |
---|
9 | LOGICAL, SAVE :: using_omp ! .TRUE. if using OpenMP |
---|
10 | LOGICAL, SAVE :: is_master ! .TRUE. if the core is both MPI & OpenMP master |
---|
11 | !$OMP THREADPRIVATE(is_master) |
---|
12 | |
---|
13 | INTEGER, save :: mpi_size |
---|
14 | INTEGER, save :: mpi_rank |
---|
15 | INTEGER, save :: jj_begin |
---|
16 | INTEGER, save :: jj_end |
---|
17 | INTEGER, save :: jj_nb |
---|
18 | INTEGER, save :: ij_begin |
---|
19 | INTEGER, save :: ij_end |
---|
20 | logical, save :: pole_nord |
---|
21 | logical, save :: pole_sud |
---|
22 | |
---|
23 | INTEGER, save :: jjb_u |
---|
24 | INTEGER, save :: jje_u |
---|
25 | INTEGER, save :: jjnb_u |
---|
26 | INTEGER, save :: jjb_v |
---|
27 | INTEGER, save :: jje_v |
---|
28 | INTEGER, save :: jjnb_v |
---|
29 | |
---|
30 | INTEGER, save :: ijb_u |
---|
31 | INTEGER, save :: ije_u |
---|
32 | INTEGER, save :: ijnb_u |
---|
33 | |
---|
34 | INTEGER, save :: ijb_v |
---|
35 | INTEGER, save :: ije_v |
---|
36 | INTEGER, save :: ijnb_v |
---|
37 | |
---|
38 | INTEGER, ALLOCATABLE, SAVE, DIMENSION(:) :: jj_begin_para |
---|
39 | INTEGER, ALLOCATABLE, SAVE, DIMENSION(:) :: jj_end_para |
---|
40 | INTEGER, ALLOCATABLE, SAVE, DIMENSION(:) :: jj_nb_para |
---|
41 | INTEGER, save :: OMP_CHUNK |
---|
42 | INTEGER, save :: omp_rank |
---|
43 | INTEGER, save :: omp_size |
---|
44 | !$OMP THREADPRIVATE(omp_rank) |
---|
45 | |
---|
46 | TYPE distrib |
---|
47 | INTEGER :: jj_begin |
---|
48 | INTEGER :: jj_end |
---|
49 | INTEGER :: jj_nb |
---|
50 | INTEGER :: ij_begin |
---|
51 | INTEGER :: ij_end |
---|
52 | |
---|
53 | INTEGER :: jjb_u |
---|
54 | INTEGER :: jje_u |
---|
55 | INTEGER :: jjnb_u |
---|
56 | INTEGER :: jjb_v |
---|
57 | INTEGER :: jje_v |
---|
58 | INTEGER :: jjnb_v |
---|
59 | |
---|
60 | INTEGER :: ijb_u |
---|
61 | INTEGER :: ije_u |
---|
62 | INTEGER :: ijnb_u |
---|
63 | |
---|
64 | INTEGER :: ijb_v |
---|
65 | INTEGER :: ije_v |
---|
66 | INTEGER :: ijnb_v |
---|
67 | |
---|
68 | INTEGER, pointer :: jj_begin_para(:) => NULL() |
---|
69 | INTEGER, pointer :: jj_end_para(:) => NULL() |
---|
70 | INTEGER, pointer :: jj_nb_para(:) => NULL() |
---|
71 | END TYPE distrib |
---|
72 | |
---|
73 | INTERFACE ASSIGNMENT (=) |
---|
74 | MODULE PROCEDURE copy_distrib |
---|
75 | END INTERFACE |
---|
76 | TYPE(distrib), SAVE :: current_dist |
---|
77 | |
---|
78 | CONTAINS |
---|
79 | |
---|
80 | SUBROUTINE init_parallel |
---|
81 | USE lmdz_vampir |
---|
82 | USE lmdz_mpi |
---|
83 | USE lmdz_iniprint, ONLY: lunout, prt_level |
---|
84 | USE lmdz_dimensions, ONLY: iim, jjm, llm, ndm |
---|
85 | USE lmdz_paramet |
---|
86 | IMPLICIT NONE |
---|
87 | |
---|
88 | |
---|
89 | |
---|
90 | INTEGER :: ierr |
---|
91 | INTEGER :: i, j |
---|
92 | INTEGER :: type_size |
---|
93 | INTEGER, DIMENSION(3) :: blocklen, type |
---|
94 | INTEGER :: comp_id |
---|
95 | CHARACTER(LEN = 4) :: num |
---|
96 | CHARACTER(LEN = 20) :: filename |
---|
97 | |
---|
98 | #ifdef CPP_OMP |
---|
99 | INTEGER :: OMP_GET_NUM_THREADS |
---|
100 | EXTERNAL OMP_GET_NUM_THREADS |
---|
101 | INTEGER :: OMP_GET_THREAD_NUM |
---|
102 | EXTERNAL OMP_GET_THREAD_NUM |
---|
103 | #endif |
---|
104 | |
---|
105 | #ifdef CPP_OMP |
---|
106 | using_OMP=.TRUE. |
---|
107 | #else |
---|
108 | using_OMP = .FALSE. |
---|
109 | #endif |
---|
110 | |
---|
111 | CALL InitVampir |
---|
112 | |
---|
113 | IF (using_mpi) THEN |
---|
114 | CALL MPI_COMM_SIZE(COMM_LMDZ, mpi_size, ierr) |
---|
115 | CALL MPI_COMM_RANK(COMM_LMDZ, mpi_rank, ierr) |
---|
116 | ELSE |
---|
117 | mpi_size = 1 |
---|
118 | mpi_rank = 0 |
---|
119 | ENDIF |
---|
120 | |
---|
121 | |
---|
122 | ! Open text output file with mpi_rank in suffix of file name |
---|
123 | IF (lunout /= 5 .AND. lunout /= 6) THEN |
---|
124 | WRITE(num, '(I4.4)') mpi_rank |
---|
125 | filename = 'lmdz.out_' // num |
---|
126 | IF (mpi_rank /= 0) THEN |
---|
127 | OPEN(UNIT = lunout, FILE = TRIM(filename), ACTION = 'write', & |
---|
128 | STATUS = 'unknown', FORM = 'formatted', IOSTAT = ierr) |
---|
129 | ENDIF |
---|
130 | ENDIF |
---|
131 | |
---|
132 | allocate(jj_begin_para(0:mpi_size - 1)) |
---|
133 | allocate(jj_end_para(0:mpi_size - 1)) |
---|
134 | allocate(jj_nb_para(0:mpi_size - 1)) |
---|
135 | |
---|
136 | DO i = 0, mpi_size - 1 |
---|
137 | jj_nb_para(i) = (jjm + 1) / mpi_size |
---|
138 | IF (i < MOD((jjm + 1), mpi_size)) jj_nb_para(i) = jj_nb_para(i) + 1 |
---|
139 | |
---|
140 | IF (jj_nb_para(i) <= 1) THEN |
---|
141 | WRITE(lunout, *)"Arret : le nombre de bande de lattitude par process est trop faible (<2)." |
---|
142 | WRITE(lunout, *)" ---> diminuez le nombre de CPU ou augmentez la taille en lattitude" |
---|
143 | |
---|
144 | IF (using_mpi) CALL MPI_ABORT(COMM_LMDZ, -1, ierr) |
---|
145 | |
---|
146 | endif |
---|
147 | |
---|
148 | enddo |
---|
149 | |
---|
150 | ! jj_nb_para(0)=11 |
---|
151 | ! jj_nb_para(1)=25 |
---|
152 | ! jj_nb_para(2)=25 |
---|
153 | ! jj_nb_para(3)=12 |
---|
154 | |
---|
155 | j = 1 |
---|
156 | |
---|
157 | DO i = 0, mpi_size - 1 |
---|
158 | |
---|
159 | jj_begin_para(i) = j |
---|
160 | jj_end_para(i) = j + jj_Nb_para(i) - 1 |
---|
161 | j = j + jj_Nb_para(i) |
---|
162 | |
---|
163 | enddo |
---|
164 | |
---|
165 | jj_begin = jj_begin_para(mpi_rank) |
---|
166 | jj_end = jj_end_para(mpi_rank) |
---|
167 | jj_nb = jj_nb_para(mpi_rank) |
---|
168 | |
---|
169 | ij_begin = (jj_begin - 1) * iip1 + 1 |
---|
170 | ij_end = jj_end * iip1 |
---|
171 | |
---|
172 | IF (mpi_rank==0) THEN |
---|
173 | pole_nord = .TRUE. |
---|
174 | else |
---|
175 | pole_nord = .FALSE. |
---|
176 | endif |
---|
177 | |
---|
178 | IF (mpi_rank==mpi_size - 1) THEN |
---|
179 | pole_sud = .TRUE. |
---|
180 | else |
---|
181 | pole_sud = .FALSE. |
---|
182 | endif |
---|
183 | |
---|
184 | WRITE(lunout, *)"init_parallel: jj_begin", jj_begin |
---|
185 | WRITE(lunout, *)"init_parallel: jj_end", jj_end |
---|
186 | WRITE(lunout, *)"init_parallel: ij_begin", ij_begin |
---|
187 | WRITE(lunout, *)"init_parallel: ij_end", ij_end |
---|
188 | jjb_u = MAX(jj_begin - halo_max, 1) |
---|
189 | jje_u = MIN(jj_end + halo_max, jjp1) |
---|
190 | jjnb_u = jje_u - jjb_u + 1 |
---|
191 | |
---|
192 | jjb_v = MAX(jj_begin - halo_max, 1) |
---|
193 | jje_v = MIN(jj_end + halo_max, jjm) |
---|
194 | jjnb_v = jje_v - jjb_v + 1 |
---|
195 | |
---|
196 | ijb_u = MAX(ij_begin - halo_max * iip1, 1) |
---|
197 | ije_u = MIN(ij_end + halo_max * iip1, ip1jmp1) |
---|
198 | ijnb_u = ije_u - ijb_u + 1 |
---|
199 | |
---|
200 | ijb_v = MAX(ij_begin - halo_max * iip1, 1) |
---|
201 | ije_v = MIN(ij_end + halo_max * iip1, ip1jm) |
---|
202 | ijnb_v = ije_v - ijb_v + 1 |
---|
203 | |
---|
204 | !$OMP PARALLEL |
---|
205 | |
---|
206 | #ifdef CPP_OMP |
---|
207 | !$OMP MASTER |
---|
208 | omp_size=OMP_GET_NUM_THREADS() |
---|
209 | !$OMP END MASTER |
---|
210 | !$OMP BARRIER |
---|
211 | omp_rank=OMP_GET_THREAD_NUM() |
---|
212 | |
---|
213 | !Config Key = omp_chunk |
---|
214 | !Config Desc = taille des blocs openmp |
---|
215 | !Config Def = 1 |
---|
216 | !Config Help = defini la taille des packets d'itération openmp |
---|
217 | !Config distribue a chaque tache lors de l'entree dans une |
---|
218 | !Config boucle parallelisee |
---|
219 | |
---|
220 | !$OMP MASTER |
---|
221 | omp_chunk=(llm+1)/omp_size |
---|
222 | IF (MOD(llm+1,omp_size)/=0) omp_chunk=omp_chunk+1 |
---|
223 | CALL getin('omp_chunk',omp_chunk) |
---|
224 | !$OMP END MASTER |
---|
225 | !$OMP BARRIER |
---|
226 | #else |
---|
227 | omp_size = 1 |
---|
228 | omp_rank = 0 |
---|
229 | #endif |
---|
230 | !$OMP END PARALLEL |
---|
231 | CALL create_distrib(jj_nb_para, current_dist) |
---|
232 | |
---|
233 | IF ((mpi_rank==0).AND.(omp_rank==0)) THEN |
---|
234 | is_master = .TRUE. |
---|
235 | ELSE |
---|
236 | is_master = .FALSE. |
---|
237 | ENDIF |
---|
238 | |
---|
239 | END SUBROUTINE init_parallel |
---|
240 | |
---|
241 | SUBROUTINE create_distrib(jj_nb_new, d) |
---|
242 | USE lmdz_dimensions, ONLY: iim, jjm, llm, ndm |
---|
243 | USE lmdz_paramet |
---|
244 | IMPLICIT NONE |
---|
245 | |
---|
246 | |
---|
247 | |
---|
248 | INTEGER, INTENT(IN) :: jj_Nb_New(0:MPI_Size - 1) |
---|
249 | TYPE(distrib), INTENT(INOUT) :: d |
---|
250 | INTEGER :: i |
---|
251 | |
---|
252 | IF (.NOT. ASSOCIATED(d%jj_nb_para)) ALLOCATE(d%jj_nb_para(0:MPI_Size - 1)) |
---|
253 | IF (.NOT. ASSOCIATED(d%jj_begin_para)) ALLOCATE(d%jj_begin_para(0:MPI_Size - 1)) |
---|
254 | IF (.NOT. ASSOCIATED(d%jj_end_para)) ALLOCATE(d%jj_end_para(0:MPI_Size - 1)) |
---|
255 | |
---|
256 | d%jj_Nb_Para = jj_Nb_New |
---|
257 | |
---|
258 | d%jj_begin_para(0) = 1 |
---|
259 | d%jj_end_para(0) = d%jj_Nb_Para(0) |
---|
260 | |
---|
261 | DO i = 1, mpi_size - 1 |
---|
262 | |
---|
263 | d%jj_begin_para(i) = d%jj_end_para(i - 1) + 1 |
---|
264 | d%jj_end_para(i) = d%jj_begin_para(i) + d%jj_Nb_para(i) - 1 |
---|
265 | |
---|
266 | enddo |
---|
267 | |
---|
268 | d%jj_begin = d%jj_begin_para(mpi_rank) |
---|
269 | d%jj_end = d%jj_end_para(mpi_rank) |
---|
270 | d%jj_nb = d%jj_nb_para(mpi_rank) |
---|
271 | |
---|
272 | d%ij_begin = (d%jj_begin - 1) * iip1 + 1 |
---|
273 | d%ij_end = d%jj_end * iip1 |
---|
274 | |
---|
275 | d%jjb_u = MAX(d%jj_begin - halo_max, 1) |
---|
276 | d%jje_u = MIN(d%jj_end + halo_max, jjp1) |
---|
277 | d%jjnb_u = d%jje_u - d%jjb_u + 1 |
---|
278 | |
---|
279 | d%jjb_v = MAX(d%jj_begin - halo_max, 1) |
---|
280 | d%jje_v = MIN(d%jj_end + halo_max, jjm) |
---|
281 | d%jjnb_v = d%jje_v - d%jjb_v + 1 |
---|
282 | |
---|
283 | d%ijb_u = MAX(d%ij_begin - halo_max * iip1, 1) |
---|
284 | d%ije_u = MIN(d%ij_end + halo_max * iip1, ip1jmp1) |
---|
285 | d%ijnb_u = d%ije_u - d%ijb_u + 1 |
---|
286 | |
---|
287 | d%ijb_v = MAX(d%ij_begin - halo_max * iip1, 1) |
---|
288 | d%ije_v = MIN(d%ij_end + halo_max * iip1, ip1jm) |
---|
289 | d%ijnb_v = d%ije_v - d%ijb_v + 1 |
---|
290 | |
---|
291 | END SUBROUTINE create_distrib |
---|
292 | |
---|
293 | |
---|
294 | SUBROUTINE Set_Distrib(d) |
---|
295 | USE lmdz_dimensions, ONLY: iim, jjm, llm, ndm |
---|
296 | USE lmdz_paramet |
---|
297 | IMPLICIT NONE |
---|
298 | |
---|
299 | |
---|
300 | TYPE(distrib), INTENT(IN) :: d |
---|
301 | |
---|
302 | jj_begin = d%jj_begin |
---|
303 | jj_end = d%jj_end |
---|
304 | jj_nb = d%jj_nb |
---|
305 | ij_begin = d%ij_begin |
---|
306 | ij_end = d%ij_end |
---|
307 | |
---|
308 | jjb_u = d%jjb_u |
---|
309 | jje_u = d%jje_u |
---|
310 | jjnb_u = d%jjnb_u |
---|
311 | jjb_v = d%jjb_v |
---|
312 | jje_v = d%jje_v |
---|
313 | jjnb_v = d%jjnb_v |
---|
314 | |
---|
315 | ijb_u = d%ijb_u |
---|
316 | ije_u = d%ije_u |
---|
317 | ijnb_u = d%ijnb_u |
---|
318 | |
---|
319 | ijb_v = d%ijb_v |
---|
320 | ije_v = d%ije_v |
---|
321 | ijnb_v = d%ijnb_v |
---|
322 | |
---|
323 | jj_begin_para(:) = d%jj_begin_para(:) |
---|
324 | jj_end_para(:) = d%jj_end_para(:) |
---|
325 | jj_nb_para(:) = d%jj_nb_para(:) |
---|
326 | current_dist = d |
---|
327 | |
---|
328 | END SUBROUTINE Set_Distrib |
---|
329 | |
---|
330 | SUBROUTINE copy_distrib(dist, new_dist) |
---|
331 | USE lmdz_dimensions, ONLY: iim, jjm, llm, ndm |
---|
332 | USE lmdz_paramet |
---|
333 | IMPLICIT NONE |
---|
334 | |
---|
335 | |
---|
336 | TYPE(distrib), INTENT(INOUT) :: dist |
---|
337 | TYPE(distrib), INTENT(IN) :: new_dist |
---|
338 | |
---|
339 | dist%jj_begin = new_dist%jj_begin |
---|
340 | dist%jj_end = new_dist%jj_end |
---|
341 | dist%jj_nb = new_dist%jj_nb |
---|
342 | dist%ij_begin = new_dist%ij_begin |
---|
343 | dist%ij_end = new_dist%ij_end |
---|
344 | |
---|
345 | dist%jjb_u = new_dist%jjb_u |
---|
346 | dist%jje_u = new_dist%jje_u |
---|
347 | dist%jjnb_u = new_dist%jjnb_u |
---|
348 | dist%jjb_v = new_dist%jjb_v |
---|
349 | dist%jje_v = new_dist%jje_v |
---|
350 | dist%jjnb_v = new_dist%jjnb_v |
---|
351 | |
---|
352 | dist%ijb_u = new_dist%ijb_u |
---|
353 | dist%ije_u = new_dist%ije_u |
---|
354 | dist%ijnb_u = new_dist%ijnb_u |
---|
355 | |
---|
356 | dist%ijb_v = new_dist%ijb_v |
---|
357 | dist%ije_v = new_dist%ije_v |
---|
358 | dist%ijnb_v = new_dist%ijnb_v |
---|
359 | |
---|
360 | dist%jj_begin_para(:) = new_dist%jj_begin_para(:) |
---|
361 | dist%jj_end_para(:) = new_dist%jj_end_para(:) |
---|
362 | dist%jj_nb_para(:) = new_dist%jj_nb_para(:) |
---|
363 | |
---|
364 | END SUBROUTINE copy_distrib |
---|
365 | |
---|
366 | |
---|
367 | SUBROUTINE get_current_distrib(d) |
---|
368 | USE lmdz_dimensions, ONLY: iim, jjm, llm, ndm |
---|
369 | USE lmdz_paramet |
---|
370 | IMPLICIT NONE |
---|
371 | |
---|
372 | |
---|
373 | TYPE(distrib), INTENT(OUT) :: d |
---|
374 | |
---|
375 | d = current_dist |
---|
376 | |
---|
377 | END SUBROUTINE get_current_distrib |
---|
378 | |
---|
379 | SUBROUTINE Finalize_parallel |
---|
380 | USE lmdz_mpi |
---|
381 | USE lmdz_wxios ! ug Pour les sorties XIOS |
---|
382 | USE lmdz_dimensions, ONLY: iim, jjm, llm, ndm |
---|
383 | USE lmdz_paramet |
---|
384 | |
---|
385 | |
---|
386 | #ifdef CPP_COUPLE |
---|
387 | ! Use of Oasis-MCT coupler |
---|
388 | #if defined CPP_OMCT |
---|
389 | USE mod_prism |
---|
390 | #else |
---|
391 | USE mod_prism_proto |
---|
392 | #endif |
---|
393 | ! Ehouarn: surface_data module is in 'phylmd' ... |
---|
394 | USE surface_data, ONLY: type_ocean |
---|
395 | IMPLICIT NONE |
---|
396 | #else |
---|
397 | IMPLICIT NONE |
---|
398 | ! without the surface_data module, we declare (and set) a dummy 'type_ocean' |
---|
399 | CHARACTER(LEN = 6), parameter :: type_ocean = "dummy" |
---|
400 | #endif |
---|
401 | |
---|
402 | INTEGER :: ierr |
---|
403 | INTEGER :: i |
---|
404 | |
---|
405 | IF (allocated(jj_begin_para)) deallocate(jj_begin_para) |
---|
406 | IF (allocated(jj_end_para)) deallocate(jj_end_para) |
---|
407 | IF (allocated(jj_nb_para)) deallocate(jj_nb_para) |
---|
408 | |
---|
409 | IF (type_ocean == 'couple') THEN |
---|
410 | #ifdef CPP_COUPLE |
---|
411 | IF (using_xios) THEN |
---|
412 | !Fermeture propre de XIOS |
---|
413 | CALL wxios_close() |
---|
414 | CALL prism_terminate_proto(ierr) |
---|
415 | IF (ierr .NE. PRISM_Ok) THEN |
---|
416 | CALL abort_gcm('Finalize_parallel',' Probleme dans prism_terminate_proto ',1) |
---|
417 | ENDIF |
---|
418 | ELSE |
---|
419 | CALL prism_terminate_proto(ierr) |
---|
420 | IF (ierr .NE. PRISM_Ok) THEN |
---|
421 | CALL abort_gcm('Finalize_parallel',' Probleme dans prism_terminate_proto ',1) |
---|
422 | endif |
---|
423 | ENDIF |
---|
424 | #else |
---|
425 | CALL abort_gcm('Finalize_parallel', 'type_ocean = couple but CPP_COUPLE not defined', 1) |
---|
426 | #endif |
---|
427 | else |
---|
428 | IF (using_xios) THEN |
---|
429 | !Fermeture propre de XIOS |
---|
430 | CALL wxios_close() |
---|
431 | ENDIF |
---|
432 | IF (using_mpi) CALL MPI_FINALIZE(ierr) |
---|
433 | end if |
---|
434 | |
---|
435 | END SUBROUTINE Finalize_parallel |
---|
436 | |
---|
437 | SUBROUTINE Pack_Data(Field, ij, ll, row, Buffer) |
---|
438 | USE lmdz_dimensions, ONLY: iim, jjm, llm, ndm |
---|
439 | USE lmdz_paramet |
---|
440 | IMPLICIT NONE |
---|
441 | |
---|
442 | |
---|
443 | |
---|
444 | INTEGER, INTENT(IN) :: ij, ll, row |
---|
445 | REAL, DIMENSION(ij, ll), INTENT(IN) :: Field |
---|
446 | REAL, DIMENSION(ll * iip1 * row), INTENT(OUT) :: Buffer |
---|
447 | |
---|
448 | INTEGER :: Pos |
---|
449 | INTEGER :: i, l |
---|
450 | |
---|
451 | Pos = 0 |
---|
452 | DO l = 1, ll |
---|
453 | DO i = 1, row * iip1 |
---|
454 | Pos = Pos + 1 |
---|
455 | Buffer(Pos) = Field(i, l) |
---|
456 | enddo |
---|
457 | enddo |
---|
458 | |
---|
459 | END SUBROUTINE Pack_data |
---|
460 | |
---|
461 | SUBROUTINE Unpack_Data(Field, ij, ll, row, Buffer) |
---|
462 | USE lmdz_dimensions, ONLY: iim, jjm, llm, ndm |
---|
463 | USE lmdz_paramet |
---|
464 | IMPLICIT NONE |
---|
465 | |
---|
466 | |
---|
467 | |
---|
468 | INTEGER, INTENT(IN) :: ij, ll, row |
---|
469 | REAL, DIMENSION(ij, ll), INTENT(OUT) :: Field |
---|
470 | REAL, DIMENSION(ll * iip1 * row), INTENT(IN) :: Buffer |
---|
471 | |
---|
472 | INTEGER :: Pos |
---|
473 | INTEGER :: i, l |
---|
474 | |
---|
475 | Pos = 0 |
---|
476 | |
---|
477 | DO l = 1, ll |
---|
478 | DO i = 1, row * iip1 |
---|
479 | Pos = Pos + 1 |
---|
480 | Field(i, l) = Buffer(Pos) |
---|
481 | enddo |
---|
482 | enddo |
---|
483 | |
---|
484 | END SUBROUTINE UnPack_data |
---|
485 | |
---|
486 | |
---|
487 | SUBROUTINE barrier |
---|
488 | USE lmdz_mpi |
---|
489 | IMPLICIT NONE |
---|
490 | INTEGER :: ierr |
---|
491 | |
---|
492 | !$OMP CRITICAL (MPI) |
---|
493 | IF (using_mpi) CALL MPI_Barrier(COMM_LMDZ, ierr) |
---|
494 | !$OMP END CRITICAL (MPI) |
---|
495 | |
---|
496 | END SUBROUTINE barrier |
---|
497 | |
---|
498 | |
---|
499 | SUBROUTINE exchange_hallo(Field, ij, ll, up, down) |
---|
500 | USE lmdz_mpi |
---|
501 | USE lmdz_vampir |
---|
502 | USE lmdz_dimensions, ONLY: iim, jjm, llm, ndm |
---|
503 | USE lmdz_paramet |
---|
504 | IMPLICIT NONE |
---|
505 | |
---|
506 | |
---|
507 | INTEGER :: ij, ll |
---|
508 | REAL, DIMENSION(ij, ll) :: Field |
---|
509 | INTEGER :: up, down |
---|
510 | |
---|
511 | INTEGER :: ierr |
---|
512 | LOGICAL :: SendUp, SendDown |
---|
513 | LOGICAL :: RecvUp, RecvDown |
---|
514 | INTEGER, DIMENSION(4) :: Request |
---|
515 | INTEGER, DIMENSION(MPI_STATUS_SIZE, 4) :: Status |
---|
516 | |
---|
517 | INTEGER :: NbRequest |
---|
518 | REAL, DIMENSION(:), ALLOCATABLE :: Buffer_Send_up, Buffer_Send_down |
---|
519 | REAL, DIMENSION(:), ALLOCATABLE :: Buffer_Recv_up, Buffer_Recv_down |
---|
520 | INTEGER :: Buffer_size |
---|
521 | |
---|
522 | IF (using_mpi) THEN |
---|
523 | |
---|
524 | CALL barrier |
---|
525 | |
---|
526 | CALL VTb(VThallo) |
---|
527 | |
---|
528 | SendUp = .TRUE. |
---|
529 | SendDown = .TRUE. |
---|
530 | RecvUp = .TRUE. |
---|
531 | RecvDown = .TRUE. |
---|
532 | |
---|
533 | IF (pole_nord) THEN |
---|
534 | SendUp = .FALSE. |
---|
535 | RecvUp = .FALSE. |
---|
536 | ENDIF |
---|
537 | |
---|
538 | IF (pole_sud) THEN |
---|
539 | SendDown = .FALSE. |
---|
540 | RecvDown = .FALSE. |
---|
541 | ENDIF |
---|
542 | |
---|
543 | IF (up==0) THEN |
---|
544 | SendDown = .FALSE. |
---|
545 | RecvUp = .FALSE. |
---|
546 | endif |
---|
547 | |
---|
548 | IF (down==0) THEN |
---|
549 | SendUp = .FALSE. |
---|
550 | RecvDown = .FALSE. |
---|
551 | endif |
---|
552 | |
---|
553 | NbRequest = 0 |
---|
554 | |
---|
555 | IF (SendUp) THEN |
---|
556 | NbRequest = NbRequest + 1 |
---|
557 | buffer_size = down * iip1 * ll |
---|
558 | allocate(Buffer_Send_up(Buffer_size)) |
---|
559 | CALL PACK_Data(Field(ij_begin, 1), ij, ll, down, Buffer_Send_up) |
---|
560 | !$OMP CRITICAL (MPI) |
---|
561 | CALL MPI_ISEND(Buffer_send_up, Buffer_Size, MPI_REAL8, MPI_Rank - 1, 1, & |
---|
562 | COMM_LMDZ, Request(NbRequest), ierr) |
---|
563 | !$OMP END CRITICAL (MPI) |
---|
564 | ENDIF |
---|
565 | |
---|
566 | IF (SendDown) THEN |
---|
567 | NbRequest = NbRequest + 1 |
---|
568 | |
---|
569 | buffer_size = up * iip1 * ll |
---|
570 | allocate(Buffer_Send_down(Buffer_size)) |
---|
571 | CALL PACK_Data(Field(ij_end + 1 - up * iip1, 1), ij, ll, up, Buffer_send_down) |
---|
572 | |
---|
573 | !$OMP CRITICAL (MPI) |
---|
574 | CALL MPI_ISEND(Buffer_send_down, Buffer_Size, MPI_REAL8, MPI_Rank + 1, 1, & |
---|
575 | COMM_LMDZ, Request(NbRequest), ierr) |
---|
576 | !$OMP END CRITICAL (MPI) |
---|
577 | ENDIF |
---|
578 | |
---|
579 | IF (RecvUp) THEN |
---|
580 | NbRequest = NbRequest + 1 |
---|
581 | buffer_size = up * iip1 * ll |
---|
582 | allocate(Buffer_recv_up(Buffer_size)) |
---|
583 | |
---|
584 | !$OMP CRITICAL (MPI) |
---|
585 | CALL MPI_IRECV(Buffer_recv_up, Buffer_size, MPI_REAL8, MPI_Rank - 1, 1, & |
---|
586 | COMM_LMDZ, Request(NbRequest), ierr) |
---|
587 | !$OMP END CRITICAL (MPI) |
---|
588 | |
---|
589 | ENDIF |
---|
590 | |
---|
591 | IF (RecvDown) THEN |
---|
592 | NbRequest = NbRequest + 1 |
---|
593 | buffer_size = down * iip1 * ll |
---|
594 | allocate(Buffer_recv_down(Buffer_size)) |
---|
595 | |
---|
596 | !$OMP CRITICAL (MPI) |
---|
597 | CALL MPI_IRECV(Buffer_recv_down, Buffer_size, MPI_REAL8, MPI_Rank + 1, 1, & |
---|
598 | COMM_LMDZ, Request(NbRequest), ierr) |
---|
599 | !$OMP END CRITICAL (MPI) |
---|
600 | |
---|
601 | ENDIF |
---|
602 | |
---|
603 | IF (NbRequest > 0) CALL MPI_WAITALL(NbRequest, Request, Status, ierr) |
---|
604 | IF (RecvUp) CALL Unpack_Data(Field(ij_begin - up * iip1, 1), ij, ll, up, Buffer_Recv_up) |
---|
605 | IF (RecvDown) CALL Unpack_Data(Field(ij_end + 1, 1), ij, ll, down, Buffer_Recv_down) |
---|
606 | |
---|
607 | CALL VTe(VThallo) |
---|
608 | CALL barrier |
---|
609 | |
---|
610 | ENDIF ! using_mpi |
---|
611 | |
---|
612 | END SUBROUTINE exchange_Hallo |
---|
613 | |
---|
614 | |
---|
615 | SUBROUTINE Gather_Field(Field, ij, ll, rank) |
---|
616 | USE lmdz_mpi |
---|
617 | USE lmdz_iniprint, ONLY: lunout, prt_level |
---|
618 | USE lmdz_dimensions, ONLY: iim, jjm, llm, ndm |
---|
619 | USE lmdz_paramet |
---|
620 | IMPLICIT NONE |
---|
621 | |
---|
622 | |
---|
623 | INTEGER :: ij, ll, rank |
---|
624 | REAL, DIMENSION(ij, ll) :: Field |
---|
625 | REAL, DIMENSION(:), ALLOCATABLE :: Buffer_send |
---|
626 | REAL, DIMENSION(:), ALLOCATABLE :: Buffer_Recv |
---|
627 | INTEGER, DIMENSION(0:MPI_Size - 1) :: Recv_count, displ |
---|
628 | INTEGER :: ierr |
---|
629 | INTEGER :: i |
---|
630 | |
---|
631 | IF (using_mpi) THEN |
---|
632 | |
---|
633 | IF (ij==ip1jmp1) THEN |
---|
634 | allocate(Buffer_send(iip1 * ll * (jj_end - jj_begin + 1))) |
---|
635 | CALL Pack_Data(Field(ij_begin, 1), ij, ll, jj_end - jj_begin + 1, Buffer_send) |
---|
636 | ELSE IF (ij==ip1jm) THEN |
---|
637 | allocate(Buffer_send(iip1 * ll * (min(jj_end, jjm) - jj_begin + 1))) |
---|
638 | CALL Pack_Data(Field(ij_begin, 1), ij, ll, min(jj_end, jjm) - jj_begin + 1, Buffer_send) |
---|
639 | else |
---|
640 | WRITE(lunout, *)ij |
---|
641 | CALL abort_gcm("parallel_lmdz", "erreur dans Gather_Field", 1) |
---|
642 | endif |
---|
643 | |
---|
644 | IF (MPI_Rank==rank) THEN |
---|
645 | allocate(Buffer_Recv(ij * ll)) |
---|
646 | |
---|
647 | !CDIR NOVECTOR |
---|
648 | DO i = 0, MPI_Size - 1 |
---|
649 | |
---|
650 | IF (ij==ip1jmp1) THEN |
---|
651 | Recv_count(i) = (jj_end_para(i) - jj_begin_para(i) + 1) * ll * iip1 |
---|
652 | ELSE IF (ij==ip1jm) THEN |
---|
653 | Recv_count(i) = (min(jj_end_para(i), jjm) - jj_begin_para(i) + 1) * ll * iip1 |
---|
654 | else |
---|
655 | CALL abort_gcm("parallel_lmdz", "erreur dans Gather_Field", 1) |
---|
656 | endif |
---|
657 | |
---|
658 | IF (i==0) THEN |
---|
659 | displ(i) = 0 |
---|
660 | else |
---|
661 | displ(i) = displ(i - 1) + Recv_count(i - 1) |
---|
662 | endif |
---|
663 | |
---|
664 | enddo |
---|
665 | |
---|
666 | else |
---|
667 | ! Ehouarn: When in debug mode, ifort complains (for CALL MPI_GATHERV |
---|
668 | ! below) about Buffer_Recv() being not allocated. |
---|
669 | ! So make a dummy allocation. |
---|
670 | allocate(Buffer_Recv(1)) |
---|
671 | endif ! of if (MPI_Rank==rank) |
---|
672 | |
---|
673 | !$OMP CRITICAL (MPI) |
---|
674 | CALL MPI_GATHERV(Buffer_send, (min(ij_end, ij) - ij_begin + 1) * ll, MPI_REAL8, & |
---|
675 | Buffer_Recv, Recv_count, displ, MPI_REAL8, rank, COMM_LMDZ, ierr) |
---|
676 | !$OMP END CRITICAL (MPI) |
---|
677 | |
---|
678 | IF (MPI_Rank==rank) THEN |
---|
679 | IF (ij==ip1jmp1) THEN |
---|
680 | DO i = 0, MPI_Size - 1 |
---|
681 | CALL Unpack_Data(Field((jj_begin_para(i) - 1) * iip1 + 1, 1), ij, ll, & |
---|
682 | jj_end_para(i) - jj_begin_para(i) + 1, Buffer_Recv(displ(i) + 1)) |
---|
683 | enddo |
---|
684 | ELSE IF (ij==ip1jm) THEN |
---|
685 | DO i = 0, MPI_Size - 1 |
---|
686 | CALL Unpack_Data(Field((jj_begin_para(i) - 1) * iip1 + 1, 1), ij, ll, & |
---|
687 | min(jj_end_para(i), jjm) - jj_begin_para(i) + 1, Buffer_Recv(displ(i) + 1)) |
---|
688 | enddo |
---|
689 | endif |
---|
690 | endif |
---|
691 | ENDIF ! using_mpi |
---|
692 | |
---|
693 | END SUBROUTINE Gather_Field |
---|
694 | |
---|
695 | |
---|
696 | SUBROUTINE AllGather_Field(Field, ij, ll) |
---|
697 | USE lmdz_mpi |
---|
698 | USE lmdz_dimensions, ONLY: iim, jjm, llm, ndm |
---|
699 | USE lmdz_paramet |
---|
700 | IMPLICIT NONE |
---|
701 | |
---|
702 | |
---|
703 | INTEGER :: ij, ll |
---|
704 | REAL, DIMENSION(ij, ll) :: Field |
---|
705 | INTEGER :: ierr |
---|
706 | |
---|
707 | IF (using_mpi) THEN |
---|
708 | CALL Gather_Field(Field, ij, ll, 0) |
---|
709 | !$OMP CRITICAL (MPI) |
---|
710 | CALL MPI_BCAST(Field, ij * ll, MPI_REAL8, 0, COMM_LMDZ, ierr) |
---|
711 | !$OMP END CRITICAL (MPI) |
---|
712 | ENDIF |
---|
713 | |
---|
714 | END SUBROUTINE AllGather_Field |
---|
715 | |
---|
716 | SUBROUTINE Broadcast_Field(Field, ij, ll, rank) |
---|
717 | USE lmdz_mpi |
---|
718 | USE lmdz_dimensions, ONLY: iim, jjm, llm, ndm |
---|
719 | USE lmdz_paramet |
---|
720 | IMPLICIT NONE |
---|
721 | |
---|
722 | |
---|
723 | INTEGER :: ij, ll |
---|
724 | REAL, DIMENSION(ij, ll) :: Field |
---|
725 | INTEGER :: rank |
---|
726 | INTEGER :: ierr |
---|
727 | |
---|
728 | IF (using_mpi) THEN |
---|
729 | |
---|
730 | !$OMP CRITICAL (MPI) |
---|
731 | CALL MPI_BCAST(Field, ij * ll, MPI_REAL8, rank, COMM_LMDZ, ierr) |
---|
732 | !$OMP END CRITICAL (MPI) |
---|
733 | |
---|
734 | ENDIF |
---|
735 | END SUBROUTINE Broadcast_Field |
---|
736 | |
---|
737 | END MODULE parallel_lmdz |
---|