1 | ! $Id$ |
---|
2 | |
---|
3 | MODULE parallel_lmdz |
---|
4 | USE mod_const_mpi |
---|
5 | USE lmdz_mpi, ONLY: using_mpi |
---|
6 | USE IOIPSL |
---|
7 | INTEGER, PARAMETER :: halo_max = 3 |
---|
8 | |
---|
9 | LOGICAL, SAVE :: using_omp ! .TRUE. if using OpenMP |
---|
10 | LOGICAL, SAVE :: is_master ! .TRUE. if the core is both MPI & OpenMP master |
---|
11 | !$OMP THREADPRIVATE(is_master) |
---|
12 | |
---|
13 | INTEGER, save :: mpi_size |
---|
14 | INTEGER, save :: mpi_rank |
---|
15 | INTEGER, save :: jj_begin |
---|
16 | INTEGER, save :: jj_end |
---|
17 | INTEGER, save :: jj_nb |
---|
18 | INTEGER, save :: ij_begin |
---|
19 | INTEGER, save :: ij_end |
---|
20 | logical, save :: pole_nord |
---|
21 | logical, save :: pole_sud |
---|
22 | |
---|
23 | INTEGER, save :: jjb_u |
---|
24 | INTEGER, save :: jje_u |
---|
25 | INTEGER, save :: jjnb_u |
---|
26 | INTEGER, save :: jjb_v |
---|
27 | INTEGER, save :: jje_v |
---|
28 | INTEGER, save :: jjnb_v |
---|
29 | |
---|
30 | INTEGER, save :: ijb_u |
---|
31 | INTEGER, save :: ije_u |
---|
32 | INTEGER, save :: ijnb_u |
---|
33 | |
---|
34 | INTEGER, save :: ijb_v |
---|
35 | INTEGER, save :: ije_v |
---|
36 | INTEGER, save :: ijnb_v |
---|
37 | |
---|
38 | INTEGER, ALLOCATABLE, SAVE, DIMENSION(:) :: jj_begin_para |
---|
39 | INTEGER, ALLOCATABLE, SAVE, DIMENSION(:) :: jj_end_para |
---|
40 | INTEGER, ALLOCATABLE, SAVE, DIMENSION(:) :: jj_nb_para |
---|
41 | INTEGER, save :: OMP_CHUNK |
---|
42 | INTEGER, save :: omp_rank |
---|
43 | INTEGER, save :: omp_size |
---|
44 | !$OMP THREADPRIVATE(omp_rank) |
---|
45 | |
---|
46 | TYPE distrib |
---|
47 | INTEGER :: jj_begin |
---|
48 | INTEGER :: jj_end |
---|
49 | INTEGER :: jj_nb |
---|
50 | INTEGER :: ij_begin |
---|
51 | INTEGER :: ij_end |
---|
52 | |
---|
53 | INTEGER :: jjb_u |
---|
54 | INTEGER :: jje_u |
---|
55 | INTEGER :: jjnb_u |
---|
56 | INTEGER :: jjb_v |
---|
57 | INTEGER :: jje_v |
---|
58 | INTEGER :: jjnb_v |
---|
59 | |
---|
60 | INTEGER :: ijb_u |
---|
61 | INTEGER :: ije_u |
---|
62 | INTEGER :: ijnb_u |
---|
63 | |
---|
64 | INTEGER :: ijb_v |
---|
65 | INTEGER :: ije_v |
---|
66 | INTEGER :: ijnb_v |
---|
67 | |
---|
68 | INTEGER, pointer :: jj_begin_para(:) => NULL() |
---|
69 | INTEGER, pointer :: jj_end_para(:) => NULL() |
---|
70 | INTEGER, pointer :: jj_nb_para(:) => NULL() |
---|
71 | END TYPE distrib |
---|
72 | |
---|
73 | INTERFACE ASSIGNMENT (=) |
---|
74 | MODULE PROCEDURE copy_distrib |
---|
75 | END INTERFACE |
---|
76 | TYPE(distrib), SAVE :: current_dist |
---|
77 | |
---|
78 | CONTAINS |
---|
79 | |
---|
80 | SUBROUTINE init_parallel |
---|
81 | USE lmdz_vampir |
---|
82 | USE lmdz_mpi |
---|
83 | USE lmdz_iniprint, ONLY: lunout, prt_level |
---|
84 | IMPLICIT NONE |
---|
85 | INCLUDE "dimensions.h" |
---|
86 | INCLUDE "paramet.h" |
---|
87 | |
---|
88 | INTEGER :: ierr |
---|
89 | INTEGER :: i, j |
---|
90 | INTEGER :: type_size |
---|
91 | INTEGER, DIMENSION(3) :: blocklen, type |
---|
92 | INTEGER :: comp_id |
---|
93 | CHARACTER(LEN = 4) :: num |
---|
94 | CHARACTER(LEN = 20) :: filename |
---|
95 | |
---|
96 | #ifdef CPP_OMP |
---|
97 | INTEGER :: OMP_GET_NUM_THREADS |
---|
98 | EXTERNAL OMP_GET_NUM_THREADS |
---|
99 | INTEGER :: OMP_GET_THREAD_NUM |
---|
100 | EXTERNAL OMP_GET_THREAD_NUM |
---|
101 | #endif |
---|
102 | |
---|
103 | #ifdef CPP_OMP |
---|
104 | using_OMP=.TRUE. |
---|
105 | #else |
---|
106 | using_OMP = .FALSE. |
---|
107 | #endif |
---|
108 | |
---|
109 | CALL InitVampir |
---|
110 | |
---|
111 | IF (using_mpi) THEN |
---|
112 | CALL MPI_COMM_SIZE(COMM_LMDZ, mpi_size, ierr) |
---|
113 | CALL MPI_COMM_RANK(COMM_LMDZ, mpi_rank, ierr) |
---|
114 | ELSE |
---|
115 | mpi_size = 1 |
---|
116 | mpi_rank = 0 |
---|
117 | ENDIF |
---|
118 | |
---|
119 | |
---|
120 | ! Open text output file with mpi_rank in suffix of file name |
---|
121 | IF (lunout /= 5 .AND. lunout /= 6) THEN |
---|
122 | WRITE(num, '(I4.4)') mpi_rank |
---|
123 | filename = 'lmdz.out_' // num |
---|
124 | IF (mpi_rank /= 0) THEN |
---|
125 | OPEN(UNIT = lunout, FILE = TRIM(filename), ACTION = 'write', & |
---|
126 | STATUS = 'unknown', FORM = 'formatted', IOSTAT = ierr) |
---|
127 | ENDIF |
---|
128 | ENDIF |
---|
129 | |
---|
130 | allocate(jj_begin_para(0:mpi_size - 1)) |
---|
131 | allocate(jj_end_para(0:mpi_size - 1)) |
---|
132 | allocate(jj_nb_para(0:mpi_size - 1)) |
---|
133 | |
---|
134 | do i = 0, mpi_size - 1 |
---|
135 | jj_nb_para(i) = (jjm + 1) / mpi_size |
---|
136 | IF (i < MOD((jjm + 1), mpi_size)) jj_nb_para(i) = jj_nb_para(i) + 1 |
---|
137 | |
---|
138 | IF (jj_nb_para(i) <= 1) THEN |
---|
139 | WRITE(lunout, *)"Arret : le nombre de bande de lattitude par process est trop faible (<2)." |
---|
140 | WRITE(lunout, *)" ---> diminuez le nombre de CPU ou augmentez la taille en lattitude" |
---|
141 | |
---|
142 | IF (using_mpi) CALL MPI_ABORT(COMM_LMDZ, -1, ierr) |
---|
143 | |
---|
144 | endif |
---|
145 | |
---|
146 | enddo |
---|
147 | |
---|
148 | ! jj_nb_para(0)=11 |
---|
149 | ! jj_nb_para(1)=25 |
---|
150 | ! jj_nb_para(2)=25 |
---|
151 | ! jj_nb_para(3)=12 |
---|
152 | |
---|
153 | j = 1 |
---|
154 | |
---|
155 | do i = 0, mpi_size - 1 |
---|
156 | |
---|
157 | jj_begin_para(i) = j |
---|
158 | jj_end_para(i) = j + jj_Nb_para(i) - 1 |
---|
159 | j = j + jj_Nb_para(i) |
---|
160 | |
---|
161 | enddo |
---|
162 | |
---|
163 | jj_begin = jj_begin_para(mpi_rank) |
---|
164 | jj_end = jj_end_para(mpi_rank) |
---|
165 | jj_nb = jj_nb_para(mpi_rank) |
---|
166 | |
---|
167 | ij_begin = (jj_begin - 1) * iip1 + 1 |
---|
168 | ij_end = jj_end * iip1 |
---|
169 | |
---|
170 | IF (mpi_rank==0) THEN |
---|
171 | pole_nord = .TRUE. |
---|
172 | else |
---|
173 | pole_nord = .FALSE. |
---|
174 | endif |
---|
175 | |
---|
176 | IF (mpi_rank==mpi_size - 1) THEN |
---|
177 | pole_sud = .TRUE. |
---|
178 | else |
---|
179 | pole_sud = .FALSE. |
---|
180 | endif |
---|
181 | |
---|
182 | WRITE(lunout, *)"init_parallel: jj_begin", jj_begin |
---|
183 | WRITE(lunout, *)"init_parallel: jj_end", jj_end |
---|
184 | WRITE(lunout, *)"init_parallel: ij_begin", ij_begin |
---|
185 | WRITE(lunout, *)"init_parallel: ij_end", ij_end |
---|
186 | jjb_u = MAX(jj_begin - halo_max, 1) |
---|
187 | jje_u = MIN(jj_end + halo_max, jjp1) |
---|
188 | jjnb_u = jje_u - jjb_u + 1 |
---|
189 | |
---|
190 | jjb_v = MAX(jj_begin - halo_max, 1) |
---|
191 | jje_v = MIN(jj_end + halo_max, jjm) |
---|
192 | jjnb_v = jje_v - jjb_v + 1 |
---|
193 | |
---|
194 | ijb_u = MAX(ij_begin - halo_max * iip1, 1) |
---|
195 | ije_u = MIN(ij_end + halo_max * iip1, ip1jmp1) |
---|
196 | ijnb_u = ije_u - ijb_u + 1 |
---|
197 | |
---|
198 | ijb_v = MAX(ij_begin - halo_max * iip1, 1) |
---|
199 | ije_v = MIN(ij_end + halo_max * iip1, ip1jm) |
---|
200 | ijnb_v = ije_v - ijb_v + 1 |
---|
201 | |
---|
202 | !$OMP PARALLEL |
---|
203 | |
---|
204 | #ifdef CPP_OMP |
---|
205 | !$OMP MASTER |
---|
206 | omp_size=OMP_GET_NUM_THREADS() |
---|
207 | !$OMP END MASTER |
---|
208 | !$OMP BARRIER |
---|
209 | omp_rank=OMP_GET_THREAD_NUM() |
---|
210 | |
---|
211 | !Config Key = omp_chunk |
---|
212 | !Config Desc = taille des blocs openmp |
---|
213 | !Config Def = 1 |
---|
214 | !Config Help = defini la taille des packets d'itération openmp |
---|
215 | !Config distribue a chaque tache lors de l'entree dans une |
---|
216 | !Config boucle parallelisee |
---|
217 | |
---|
218 | !$OMP MASTER |
---|
219 | omp_chunk=(llm+1)/omp_size |
---|
220 | IF (MOD(llm+1,omp_size)/=0) omp_chunk=omp_chunk+1 |
---|
221 | CALL getin('omp_chunk',omp_chunk) |
---|
222 | !$OMP END MASTER |
---|
223 | !$OMP BARRIER |
---|
224 | #else |
---|
225 | omp_size = 1 |
---|
226 | omp_rank = 0 |
---|
227 | #endif |
---|
228 | !$OMP END PARALLEL |
---|
229 | CALL create_distrib(jj_nb_para, current_dist) |
---|
230 | |
---|
231 | IF ((mpi_rank==0).AND.(omp_rank==0)) THEN |
---|
232 | is_master = .TRUE. |
---|
233 | ELSE |
---|
234 | is_master = .FALSE. |
---|
235 | ENDIF |
---|
236 | |
---|
237 | END SUBROUTINE init_parallel |
---|
238 | |
---|
239 | SUBROUTINE create_distrib(jj_nb_new, d) |
---|
240 | IMPLICIT NONE |
---|
241 | INCLUDE "dimensions.h" |
---|
242 | INCLUDE "paramet.h" |
---|
243 | |
---|
244 | INTEGER, INTENT(IN) :: jj_Nb_New(0:MPI_Size - 1) |
---|
245 | TYPE(distrib), INTENT(INOUT) :: d |
---|
246 | INTEGER :: i |
---|
247 | |
---|
248 | IF (.NOT. ASSOCIATED(d%jj_nb_para)) ALLOCATE(d%jj_nb_para(0:MPI_Size - 1)) |
---|
249 | IF (.NOT. ASSOCIATED(d%jj_begin_para)) ALLOCATE(d%jj_begin_para(0:MPI_Size - 1)) |
---|
250 | IF (.NOT. ASSOCIATED(d%jj_end_para)) ALLOCATE(d%jj_end_para(0:MPI_Size - 1)) |
---|
251 | |
---|
252 | d%jj_Nb_Para = jj_Nb_New |
---|
253 | |
---|
254 | d%jj_begin_para(0) = 1 |
---|
255 | d%jj_end_para(0) = d%jj_Nb_Para(0) |
---|
256 | |
---|
257 | do i = 1, mpi_size - 1 |
---|
258 | |
---|
259 | d%jj_begin_para(i) = d%jj_end_para(i - 1) + 1 |
---|
260 | d%jj_end_para(i) = d%jj_begin_para(i) + d%jj_Nb_para(i) - 1 |
---|
261 | |
---|
262 | enddo |
---|
263 | |
---|
264 | d%jj_begin = d%jj_begin_para(mpi_rank) |
---|
265 | d%jj_end = d%jj_end_para(mpi_rank) |
---|
266 | d%jj_nb = d%jj_nb_para(mpi_rank) |
---|
267 | |
---|
268 | d%ij_begin = (d%jj_begin - 1) * iip1 + 1 |
---|
269 | d%ij_end = d%jj_end * iip1 |
---|
270 | |
---|
271 | d%jjb_u = MAX(d%jj_begin - halo_max, 1) |
---|
272 | d%jje_u = MIN(d%jj_end + halo_max, jjp1) |
---|
273 | d%jjnb_u = d%jje_u - d%jjb_u + 1 |
---|
274 | |
---|
275 | d%jjb_v = MAX(d%jj_begin - halo_max, 1) |
---|
276 | d%jje_v = MIN(d%jj_end + halo_max, jjm) |
---|
277 | d%jjnb_v = d%jje_v - d%jjb_v + 1 |
---|
278 | |
---|
279 | d%ijb_u = MAX(d%ij_begin - halo_max * iip1, 1) |
---|
280 | d%ije_u = MIN(d%ij_end + halo_max * iip1, ip1jmp1) |
---|
281 | d%ijnb_u = d%ije_u - d%ijb_u + 1 |
---|
282 | |
---|
283 | d%ijb_v = MAX(d%ij_begin - halo_max * iip1, 1) |
---|
284 | d%ije_v = MIN(d%ij_end + halo_max * iip1, ip1jm) |
---|
285 | d%ijnb_v = d%ije_v - d%ijb_v + 1 |
---|
286 | |
---|
287 | END SUBROUTINE create_distrib |
---|
288 | |
---|
289 | |
---|
290 | SUBROUTINE Set_Distrib(d) |
---|
291 | IMPLICIT NONE |
---|
292 | |
---|
293 | INCLUDE "dimensions.h" |
---|
294 | INCLUDE "paramet.h" |
---|
295 | TYPE(distrib), INTENT(IN) :: d |
---|
296 | |
---|
297 | jj_begin = d%jj_begin |
---|
298 | jj_end = d%jj_end |
---|
299 | jj_nb = d%jj_nb |
---|
300 | ij_begin = d%ij_begin |
---|
301 | ij_end = d%ij_end |
---|
302 | |
---|
303 | jjb_u = d%jjb_u |
---|
304 | jje_u = d%jje_u |
---|
305 | jjnb_u = d%jjnb_u |
---|
306 | jjb_v = d%jjb_v |
---|
307 | jje_v = d%jje_v |
---|
308 | jjnb_v = d%jjnb_v |
---|
309 | |
---|
310 | ijb_u = d%ijb_u |
---|
311 | ije_u = d%ije_u |
---|
312 | ijnb_u = d%ijnb_u |
---|
313 | |
---|
314 | ijb_v = d%ijb_v |
---|
315 | ije_v = d%ije_v |
---|
316 | ijnb_v = d%ijnb_v |
---|
317 | |
---|
318 | jj_begin_para(:) = d%jj_begin_para(:) |
---|
319 | jj_end_para(:) = d%jj_end_para(:) |
---|
320 | jj_nb_para(:) = d%jj_nb_para(:) |
---|
321 | current_dist = d |
---|
322 | |
---|
323 | END SUBROUTINE Set_Distrib |
---|
324 | |
---|
325 | SUBROUTINE copy_distrib(dist, new_dist) |
---|
326 | IMPLICIT NONE |
---|
327 | |
---|
328 | INCLUDE "dimensions.h" |
---|
329 | INCLUDE "paramet.h" |
---|
330 | TYPE(distrib), INTENT(INOUT) :: dist |
---|
331 | TYPE(distrib), INTENT(IN) :: new_dist |
---|
332 | |
---|
333 | dist%jj_begin = new_dist%jj_begin |
---|
334 | dist%jj_end = new_dist%jj_end |
---|
335 | dist%jj_nb = new_dist%jj_nb |
---|
336 | dist%ij_begin = new_dist%ij_begin |
---|
337 | dist%ij_end = new_dist%ij_end |
---|
338 | |
---|
339 | dist%jjb_u = new_dist%jjb_u |
---|
340 | dist%jje_u = new_dist%jje_u |
---|
341 | dist%jjnb_u = new_dist%jjnb_u |
---|
342 | dist%jjb_v = new_dist%jjb_v |
---|
343 | dist%jje_v = new_dist%jje_v |
---|
344 | dist%jjnb_v = new_dist%jjnb_v |
---|
345 | |
---|
346 | dist%ijb_u = new_dist%ijb_u |
---|
347 | dist%ije_u = new_dist%ije_u |
---|
348 | dist%ijnb_u = new_dist%ijnb_u |
---|
349 | |
---|
350 | dist%ijb_v = new_dist%ijb_v |
---|
351 | dist%ije_v = new_dist%ije_v |
---|
352 | dist%ijnb_v = new_dist%ijnb_v |
---|
353 | |
---|
354 | dist%jj_begin_para(:) = new_dist%jj_begin_para(:) |
---|
355 | dist%jj_end_para(:) = new_dist%jj_end_para(:) |
---|
356 | dist%jj_nb_para(:) = new_dist%jj_nb_para(:) |
---|
357 | |
---|
358 | END SUBROUTINE copy_distrib |
---|
359 | |
---|
360 | |
---|
361 | SUBROUTINE get_current_distrib(d) |
---|
362 | IMPLICIT NONE |
---|
363 | |
---|
364 | INCLUDE "dimensions.h" |
---|
365 | INCLUDE "paramet.h" |
---|
366 | TYPE(distrib), INTENT(OUT) :: d |
---|
367 | |
---|
368 | d = current_dist |
---|
369 | |
---|
370 | END SUBROUTINE get_current_distrib |
---|
371 | |
---|
372 | SUBROUTINE Finalize_parallel |
---|
373 | USE lmdz_mpi |
---|
374 | ! ug Pour les sorties XIOS |
---|
375 | USE lmdz_wxios |
---|
376 | |
---|
377 | |
---|
378 | #ifdef CPP_COUPLE |
---|
379 | ! Use of Oasis-MCT coupler |
---|
380 | #if defined CPP_OMCT |
---|
381 | USE mod_prism |
---|
382 | #else |
---|
383 | USE mod_prism_proto |
---|
384 | #endif |
---|
385 | ! Ehouarn: surface_data module is in 'phylmd' ... |
---|
386 | USE surface_data, ONLY: type_ocean |
---|
387 | IMPLICIT NONE |
---|
388 | #else |
---|
389 | IMPLICIT NONE |
---|
390 | ! without the surface_data module, we declare (and set) a dummy 'type_ocean' |
---|
391 | CHARACTER(LEN = 6), parameter :: type_ocean = "dummy" |
---|
392 | #endif |
---|
393 | |
---|
394 | INCLUDE "dimensions.h" |
---|
395 | INCLUDE "paramet.h" |
---|
396 | |
---|
397 | INTEGER :: ierr |
---|
398 | INTEGER :: i |
---|
399 | |
---|
400 | IF (allocated(jj_begin_para)) deallocate(jj_begin_para) |
---|
401 | IF (allocated(jj_end_para)) deallocate(jj_end_para) |
---|
402 | IF (allocated(jj_nb_para)) deallocate(jj_nb_para) |
---|
403 | |
---|
404 | IF (type_ocean == 'couple') THEN |
---|
405 | #ifdef CPP_COUPLE |
---|
406 | IF (using_xios) THEN |
---|
407 | !Fermeture propre de XIOS |
---|
408 | CALL wxios_close() |
---|
409 | CALL prism_terminate_proto(ierr) |
---|
410 | IF (ierr .NE. PRISM_Ok) THEN |
---|
411 | CALL abort_gcm('Finalize_parallel',' Probleme dans prism_terminate_proto ',1) |
---|
412 | ENDIF |
---|
413 | ELSE |
---|
414 | CALL prism_terminate_proto(ierr) |
---|
415 | IF (ierr .NE. PRISM_Ok) THEN |
---|
416 | CALL abort_gcm('Finalize_parallel',' Probleme dans prism_terminate_proto ',1) |
---|
417 | endif |
---|
418 | ENDIF |
---|
419 | #else |
---|
420 | CALL abort_gcm('Finalize_parallel', 'type_ocean = couple but CPP_COUPLE not defined', 1) |
---|
421 | #endif |
---|
422 | else |
---|
423 | IF (using_xios) THEN |
---|
424 | !Fermeture propre de XIOS |
---|
425 | CALL wxios_close() |
---|
426 | ENDIF |
---|
427 | IF (using_mpi) CALL MPI_FINALIZE(ierr) |
---|
428 | end if |
---|
429 | |
---|
430 | END SUBROUTINE Finalize_parallel |
---|
431 | |
---|
432 | SUBROUTINE Pack_Data(Field, ij, ll, row, Buffer) |
---|
433 | IMPLICIT NONE |
---|
434 | |
---|
435 | INCLUDE "dimensions.h" |
---|
436 | INCLUDE "paramet.h" |
---|
437 | |
---|
438 | INTEGER, INTENT(IN) :: ij, ll, row |
---|
439 | REAL, DIMENSION(ij, ll), INTENT(IN) :: Field |
---|
440 | REAL, DIMENSION(ll * iip1 * row), INTENT(OUT) :: Buffer |
---|
441 | |
---|
442 | INTEGER :: Pos |
---|
443 | INTEGER :: i, l |
---|
444 | |
---|
445 | Pos = 0 |
---|
446 | do l = 1, ll |
---|
447 | do i = 1, row * iip1 |
---|
448 | Pos = Pos + 1 |
---|
449 | Buffer(Pos) = Field(i, l) |
---|
450 | enddo |
---|
451 | enddo |
---|
452 | |
---|
453 | END SUBROUTINE Pack_data |
---|
454 | |
---|
455 | SUBROUTINE Unpack_Data(Field, ij, ll, row, Buffer) |
---|
456 | IMPLICIT NONE |
---|
457 | |
---|
458 | INCLUDE "dimensions.h" |
---|
459 | INCLUDE "paramet.h" |
---|
460 | |
---|
461 | INTEGER, INTENT(IN) :: ij, ll, row |
---|
462 | REAL, DIMENSION(ij, ll), INTENT(OUT) :: Field |
---|
463 | REAL, DIMENSION(ll * iip1 * row), INTENT(IN) :: Buffer |
---|
464 | |
---|
465 | INTEGER :: Pos |
---|
466 | INTEGER :: i, l |
---|
467 | |
---|
468 | Pos = 0 |
---|
469 | |
---|
470 | do l = 1, ll |
---|
471 | do i = 1, row * iip1 |
---|
472 | Pos = Pos + 1 |
---|
473 | Field(i, l) = Buffer(Pos) |
---|
474 | enddo |
---|
475 | enddo |
---|
476 | |
---|
477 | END SUBROUTINE UnPack_data |
---|
478 | |
---|
479 | |
---|
480 | SUBROUTINE barrier |
---|
481 | USE lmdz_mpi |
---|
482 | IMPLICIT NONE |
---|
483 | INTEGER :: ierr |
---|
484 | |
---|
485 | !$OMP CRITICAL (MPI) |
---|
486 | IF (using_mpi) CALL MPI_Barrier(COMM_LMDZ, ierr) |
---|
487 | !$OMP END CRITICAL (MPI) |
---|
488 | |
---|
489 | END SUBROUTINE barrier |
---|
490 | |
---|
491 | |
---|
492 | SUBROUTINE exchange_hallo(Field, ij, ll, up, down) |
---|
493 | USE lmdz_mpi |
---|
494 | USE lmdz_vampir |
---|
495 | IMPLICIT NONE |
---|
496 | INCLUDE "dimensions.h" |
---|
497 | INCLUDE "paramet.h" |
---|
498 | INTEGER :: ij, ll |
---|
499 | REAL, DIMENSION(ij, ll) :: Field |
---|
500 | INTEGER :: up, down |
---|
501 | |
---|
502 | INTEGER :: ierr |
---|
503 | LOGICAL :: SendUp, SendDown |
---|
504 | LOGICAL :: RecvUp, RecvDown |
---|
505 | INTEGER, DIMENSION(4) :: Request |
---|
506 | INTEGER, DIMENSION(MPI_STATUS_SIZE, 4) :: Status |
---|
507 | |
---|
508 | INTEGER :: NbRequest |
---|
509 | REAL, DIMENSION(:), ALLOCATABLE :: Buffer_Send_up, Buffer_Send_down |
---|
510 | REAL, DIMENSION(:), ALLOCATABLE :: Buffer_Recv_up, Buffer_Recv_down |
---|
511 | INTEGER :: Buffer_size |
---|
512 | |
---|
513 | IF (using_mpi) THEN |
---|
514 | |
---|
515 | CALL barrier |
---|
516 | |
---|
517 | CALL VTb(VThallo) |
---|
518 | |
---|
519 | SendUp = .TRUE. |
---|
520 | SendDown = .TRUE. |
---|
521 | RecvUp = .TRUE. |
---|
522 | RecvDown = .TRUE. |
---|
523 | |
---|
524 | IF (pole_nord) THEN |
---|
525 | SendUp = .FALSE. |
---|
526 | RecvUp = .FALSE. |
---|
527 | ENDIF |
---|
528 | |
---|
529 | IF (pole_sud) THEN |
---|
530 | SendDown = .FALSE. |
---|
531 | RecvDown = .FALSE. |
---|
532 | ENDIF |
---|
533 | |
---|
534 | IF (up==0) THEN |
---|
535 | SendDown = .FALSE. |
---|
536 | RecvUp = .FALSE. |
---|
537 | endif |
---|
538 | |
---|
539 | IF (down==0) THEN |
---|
540 | SendUp = .FALSE. |
---|
541 | RecvDown = .FALSE. |
---|
542 | endif |
---|
543 | |
---|
544 | NbRequest = 0 |
---|
545 | |
---|
546 | IF (SendUp) THEN |
---|
547 | NbRequest = NbRequest + 1 |
---|
548 | buffer_size = down * iip1 * ll |
---|
549 | allocate(Buffer_Send_up(Buffer_size)) |
---|
550 | CALL PACK_Data(Field(ij_begin, 1), ij, ll, down, Buffer_Send_up) |
---|
551 | !$OMP CRITICAL (MPI) |
---|
552 | CALL MPI_ISEND(Buffer_send_up, Buffer_Size, MPI_REAL8, MPI_Rank - 1, 1, & |
---|
553 | COMM_LMDZ, Request(NbRequest), ierr) |
---|
554 | !$OMP END CRITICAL (MPI) |
---|
555 | ENDIF |
---|
556 | |
---|
557 | IF (SendDown) THEN |
---|
558 | NbRequest = NbRequest + 1 |
---|
559 | |
---|
560 | buffer_size = up * iip1 * ll |
---|
561 | allocate(Buffer_Send_down(Buffer_size)) |
---|
562 | CALL PACK_Data(Field(ij_end + 1 - up * iip1, 1), ij, ll, up, Buffer_send_down) |
---|
563 | |
---|
564 | !$OMP CRITICAL (MPI) |
---|
565 | CALL MPI_ISEND(Buffer_send_down, Buffer_Size, MPI_REAL8, MPI_Rank + 1, 1, & |
---|
566 | COMM_LMDZ, Request(NbRequest), ierr) |
---|
567 | !$OMP END CRITICAL (MPI) |
---|
568 | ENDIF |
---|
569 | |
---|
570 | IF (RecvUp) THEN |
---|
571 | NbRequest = NbRequest + 1 |
---|
572 | buffer_size = up * iip1 * ll |
---|
573 | allocate(Buffer_recv_up(Buffer_size)) |
---|
574 | |
---|
575 | !$OMP CRITICAL (MPI) |
---|
576 | CALL MPI_IRECV(Buffer_recv_up, Buffer_size, MPI_REAL8, MPI_Rank - 1, 1, & |
---|
577 | COMM_LMDZ, Request(NbRequest), ierr) |
---|
578 | !$OMP END CRITICAL (MPI) |
---|
579 | |
---|
580 | ENDIF |
---|
581 | |
---|
582 | IF (RecvDown) THEN |
---|
583 | NbRequest = NbRequest + 1 |
---|
584 | buffer_size = down * iip1 * ll |
---|
585 | allocate(Buffer_recv_down(Buffer_size)) |
---|
586 | |
---|
587 | !$OMP CRITICAL (MPI) |
---|
588 | CALL MPI_IRECV(Buffer_recv_down, Buffer_size, MPI_REAL8, MPI_Rank + 1, 1, & |
---|
589 | COMM_LMDZ, Request(NbRequest), ierr) |
---|
590 | !$OMP END CRITICAL (MPI) |
---|
591 | |
---|
592 | ENDIF |
---|
593 | |
---|
594 | IF (NbRequest > 0) CALL MPI_WAITALL(NbRequest, Request, Status, ierr) |
---|
595 | IF (RecvUp) CALL Unpack_Data(Field(ij_begin - up * iip1, 1), ij, ll, up, Buffer_Recv_up) |
---|
596 | IF (RecvDown) CALL Unpack_Data(Field(ij_end + 1, 1), ij, ll, down, Buffer_Recv_down) |
---|
597 | |
---|
598 | CALL VTe(VThallo) |
---|
599 | CALL barrier |
---|
600 | |
---|
601 | ENDIF ! using_mpi |
---|
602 | |
---|
603 | END SUBROUTINE exchange_Hallo |
---|
604 | |
---|
605 | |
---|
606 | SUBROUTINE Gather_Field(Field, ij, ll, rank) |
---|
607 | USE lmdz_mpi |
---|
608 | USE lmdz_iniprint, ONLY: lunout, prt_level |
---|
609 | IMPLICIT NONE |
---|
610 | INCLUDE "dimensions.h" |
---|
611 | INCLUDE "paramet.h" |
---|
612 | INTEGER :: ij, ll, rank |
---|
613 | REAL, DIMENSION(ij, ll) :: Field |
---|
614 | REAL, DIMENSION(:), ALLOCATABLE :: Buffer_send |
---|
615 | REAL, DIMENSION(:), ALLOCATABLE :: Buffer_Recv |
---|
616 | INTEGER, DIMENSION(0:MPI_Size - 1) :: Recv_count, displ |
---|
617 | INTEGER :: ierr |
---|
618 | INTEGER :: i |
---|
619 | |
---|
620 | IF (using_mpi) THEN |
---|
621 | |
---|
622 | IF (ij==ip1jmp1) THEN |
---|
623 | allocate(Buffer_send(iip1 * ll * (jj_end - jj_begin + 1))) |
---|
624 | CALL Pack_Data(Field(ij_begin, 1), ij, ll, jj_end - jj_begin + 1, Buffer_send) |
---|
625 | ELSE IF (ij==ip1jm) THEN |
---|
626 | allocate(Buffer_send(iip1 * ll * (min(jj_end, jjm) - jj_begin + 1))) |
---|
627 | CALL Pack_Data(Field(ij_begin, 1), ij, ll, min(jj_end, jjm) - jj_begin + 1, Buffer_send) |
---|
628 | else |
---|
629 | WRITE(lunout, *)ij |
---|
630 | CALL abort_gcm("parallel_lmdz", "erreur dans Gather_Field", 1) |
---|
631 | endif |
---|
632 | |
---|
633 | IF (MPI_Rank==rank) THEN |
---|
634 | allocate(Buffer_Recv(ij * ll)) |
---|
635 | |
---|
636 | !CDIR NOVECTOR |
---|
637 | do i = 0, MPI_Size - 1 |
---|
638 | |
---|
639 | IF (ij==ip1jmp1) THEN |
---|
640 | Recv_count(i) = (jj_end_para(i) - jj_begin_para(i) + 1) * ll * iip1 |
---|
641 | ELSE IF (ij==ip1jm) THEN |
---|
642 | Recv_count(i) = (min(jj_end_para(i), jjm) - jj_begin_para(i) + 1) * ll * iip1 |
---|
643 | else |
---|
644 | CALL abort_gcm("parallel_lmdz", "erreur dans Gather_Field", 1) |
---|
645 | endif |
---|
646 | |
---|
647 | IF (i==0) THEN |
---|
648 | displ(i) = 0 |
---|
649 | else |
---|
650 | displ(i) = displ(i - 1) + Recv_count(i - 1) |
---|
651 | endif |
---|
652 | |
---|
653 | enddo |
---|
654 | |
---|
655 | else |
---|
656 | ! Ehouarn: When in debug mode, ifort complains (for CALL MPI_GATHERV |
---|
657 | ! below) about Buffer_Recv() being not allocated. |
---|
658 | ! So make a dummy allocation. |
---|
659 | allocate(Buffer_Recv(1)) |
---|
660 | endif ! of if (MPI_Rank==rank) |
---|
661 | |
---|
662 | !$OMP CRITICAL (MPI) |
---|
663 | CALL MPI_GATHERV(Buffer_send, (min(ij_end, ij) - ij_begin + 1) * ll, MPI_REAL8, & |
---|
664 | Buffer_Recv, Recv_count, displ, MPI_REAL8, rank, COMM_LMDZ, ierr) |
---|
665 | !$OMP END CRITICAL (MPI) |
---|
666 | |
---|
667 | IF (MPI_Rank==rank) THEN |
---|
668 | IF (ij==ip1jmp1) THEN |
---|
669 | do i = 0, MPI_Size - 1 |
---|
670 | CALL Unpack_Data(Field((jj_begin_para(i) - 1) * iip1 + 1, 1), ij, ll, & |
---|
671 | jj_end_para(i) - jj_begin_para(i) + 1, Buffer_Recv(displ(i) + 1)) |
---|
672 | enddo |
---|
673 | ELSE IF (ij==ip1jm) THEN |
---|
674 | do i = 0, MPI_Size - 1 |
---|
675 | CALL Unpack_Data(Field((jj_begin_para(i) - 1) * iip1 + 1, 1), ij, ll, & |
---|
676 | min(jj_end_para(i), jjm) - jj_begin_para(i) + 1, Buffer_Recv(displ(i) + 1)) |
---|
677 | enddo |
---|
678 | endif |
---|
679 | endif |
---|
680 | ENDIF ! using_mpi |
---|
681 | |
---|
682 | END SUBROUTINE Gather_Field |
---|
683 | |
---|
684 | |
---|
685 | SUBROUTINE AllGather_Field(Field, ij, ll) |
---|
686 | USE lmdz_mpi |
---|
687 | IMPLICIT NONE |
---|
688 | INCLUDE "dimensions.h" |
---|
689 | INCLUDE "paramet.h" |
---|
690 | INTEGER :: ij, ll |
---|
691 | REAL, DIMENSION(ij, ll) :: Field |
---|
692 | INTEGER :: ierr |
---|
693 | |
---|
694 | IF (using_mpi) THEN |
---|
695 | CALL Gather_Field(Field, ij, ll, 0) |
---|
696 | !$OMP CRITICAL (MPI) |
---|
697 | CALL MPI_BCAST(Field, ij * ll, MPI_REAL8, 0, COMM_LMDZ, ierr) |
---|
698 | !$OMP END CRITICAL (MPI) |
---|
699 | ENDIF |
---|
700 | |
---|
701 | END SUBROUTINE AllGather_Field |
---|
702 | |
---|
703 | SUBROUTINE Broadcast_Field(Field, ij, ll, rank) |
---|
704 | USE lmdz_mpi |
---|
705 | IMPLICIT NONE |
---|
706 | INCLUDE "dimensions.h" |
---|
707 | INCLUDE "paramet.h" |
---|
708 | INTEGER :: ij, ll |
---|
709 | REAL, DIMENSION(ij, ll) :: Field |
---|
710 | INTEGER :: rank |
---|
711 | INTEGER :: ierr |
---|
712 | |
---|
713 | IF (using_mpi) THEN |
---|
714 | |
---|
715 | !$OMP CRITICAL (MPI) |
---|
716 | CALL MPI_BCAST(Field, ij * ll, MPI_REAL8, rank, COMM_LMDZ, ierr) |
---|
717 | !$OMP END CRITICAL (MPI) |
---|
718 | |
---|
719 | ENDIF |
---|
720 | END SUBROUTINE Broadcast_Field |
---|
721 | |
---|
722 | |
---|
723 | ! Subroutine verif_hallo(Field,ij,ll,up,down) |
---|
724 | ! USE lmdz_mpi |
---|
725 | ! IMPLICIT NONE |
---|
726 | ! INCLUDE "dimensions.h" |
---|
727 | ! INCLUDE "paramet.h" |
---|
728 | |
---|
729 | ! INTEGER :: ij,ll |
---|
730 | ! REAL, DIMENSION(ij,ll) :: Field |
---|
731 | ! INTEGER :: up,down |
---|
732 | |
---|
733 | ! REAL,DIMENSION(ij,ll): NewField |
---|
734 | |
---|
735 | ! NewField=0 |
---|
736 | |
---|
737 | ! ijb=ij_begin |
---|
738 | ! ije=ij_end |
---|
739 | ! if (pole_nord) |
---|
740 | ! NewField(ij_be |
---|
741 | |
---|
742 | end MODULE parallel_lmdz |
---|