- Timestamp:
- Jun 30, 2023, 8:18:43 PM (17 months ago)
- Location:
- LMDZ6/trunk/libf
- Files:
-
- 2 added
- 19 edited
Legend:
- Unmodified
- Added
- Removed
-
LMDZ6/trunk/libf/dyn3dmem/abort_gcm.F
r4593 r4600 47 47 else 48 48 write(lunout,*) 'Houston, we have a problem, ierr = ', ierr 49 #ifdef CPP_MPI 49 50 if (using_mpi) THEN 50 51 C$OMP CRITICAL (MPI_ABORT_GCM) 51 call MPI_ABORT(COMM_LMDZ, 1, ierror_mpi)52 call MPI_ABORT(COMM_LMDZ, 1, ierror_mpi) 52 53 C$OMP END CRITICAL (MPI_ABORT_GCM) 53 #else 54 stop 1 55 #endif 54 else 55 stop 1 56 endif 57 56 58 endif 57 59 END -
LMDZ6/trunk/libf/dyn3dmem/mod_const_mpi.F90
r4146 r4600 11 11 12 12 SUBROUTINE Init_const_mpi 13 USE lmdz_mpi 14 13 15 #ifdef CPP_IOIPSL 14 16 USE IOIPSL, ONLY: getin … … 25 27 #endif 26 28 IMPLICIT NONE 27 #ifdef CPP_MPI28 INCLUDE 'mpif.h'29 #endif30 29 31 30 INTEGER :: ierr … … 52 51 !$OMP END MASTER 53 52 #endif 54 #ifdef CPP_MPI55 53 MPI_REAL_LMDZ=MPI_REAL8 56 #endif57 54 ELSE 58 55 CALL init_mpi … … 62 59 63 60 SUBROUTINE Init_mpi 61 USE lmdz_mpi 62 64 63 #ifdef CPP_XIOS 65 64 USE wxios, only: wxios_init 66 65 #endif 67 66 IMPLICIT NONE 68 #ifdef CPP_MPI69 INCLUDE 'mpif.h'70 #endif71 67 INTEGER :: ierr 72 68 INTEGER :: thread_required 73 69 INTEGER :: thread_provided 74 70 75 #ifdef CPP_MPI76 71 !$OMP MASTER 77 72 thread_required=MPI_THREAD_SERIALIZED … … 92 87 #endif 93 88 !$OMP END MASTER 94 #else95 #ifdef CPP_XIOS96 !$OMP MASTER97 CALL wxios_init("LMDZ")98 !$OMP END MASTER99 #endif100 #endif101 89 102 90 END SUBROUTINE Init_mpi 103 91 104 92 END MODULE mod_const_mpi -
LMDZ6/trunk/libf/dyn3dmem/mod_hallo.F90
r4469 r4600 106 106 107 107 SUBROUTINE create_global_mpi_buffer 108 USE lmdz_mpi 108 109 IMPLICIT NONE 109 #ifdef CPP_MPI110 INCLUDE 'mpif.h'111 #endif112 110 POINTER (Pbuffer,MPI_Buffer(MaxBufferSize)) 113 111 REAL :: MPI_Buffer 114 #ifdef CPP_MPI115 112 INTEGER(KIND=MPI_ADDRESS_KIND) :: BS 116 #else117 INTEGER(KIND=8) :: BS118 #endif119 113 INTEGER :: i,ierr 120 114 … … 122 116 Bs=8*MaxBufferSize 123 117 !$OMP CRITICAL (MPI) 124 #ifdef CPP_MPI125 118 CALL MPI_ALLOC_MEM(BS,MPI_INFO_NULL,Pbuffer,ierr) 126 #endif127 119 !$OMP END CRITICAL (MPI) 128 120 DO i=1,MaxBufferSize … … 1157 1149 subroutine Register_Hallo(Field,ij,ll,RUp,Rdown,SUp,SDown,a_request) 1158 1150 USE dimensions_mod 1151 USE lmdz_mpi 1159 1152 implicit none 1160 1153 1161 #ifdef CPP_MPI1162 include 'mpif.h'1163 #endif1164 1154 INTEGER :: ij,ll 1165 1155 REAL, dimension(ij,ll) :: Field … … 1224 1214 subroutine Register_Hallo_u(Field,ll,RUp,Rdown,SUp,SDown,a_request) 1225 1215 USE dimensions_mod 1216 USE lmdz_mpi 1226 1217 implicit none 1227 #ifdef CPP_MPI1228 include 'mpif.h'1229 #endif1230 1218 INTEGER :: ll 1231 1219 REAL, dimension(ijb_u:ije_u,ll) :: Field … … 1289 1277 subroutine Register_Hallo_v(Field,ll,RUp,Rdown,SUp,SDown,a_request) 1290 1278 USE dimensions_mod 1279 USE lmdz_mpi 1291 1280 implicit none 1292 #ifdef CPP_MPI1293 include 'mpif.h'1294 #endif1295 1281 INTEGER :: ll 1296 1282 REAL, dimension(ijb_v:ije_v,ll) :: Field … … 1354 1340 subroutine SendRequest(a_Request) 1355 1341 USE dimensions_mod 1342 USE lmdz_mpi 1356 1343 implicit none 1357 1358 #ifdef CPP_MPI1359 include 'mpif.h'1360 #endif1361 1344 1362 1345 type(request),target :: a_request … … 1410 1393 !$OMP CRITICAL (MPI) 1411 1394 1412 #ifdef CPP_MPI1413 1395 call MPI_ISEND(Buffer(req%Pos),SizeBuffer,MPI_REAL_LMDZ,rank,a_request%tag+1000*omp_rank, & 1414 1396 COMM_LMDZ,Req%MSG_Request,ierr) 1415 #endif1416 1397 IF (.NOT.using_mpi) THEN 1417 1398 PRINT *,'Erreur, echange MPI en mode sequentiel !!!' … … 1453 1434 !$OMP CRITICAL (MPI) 1454 1435 1455 #ifdef CPP_MPI1456 1436 call MPI_IRECV(Buffer(Req%Pos),SizeBuffer,MPI_REAL_LMDZ,rank,a_request%tag+1000*omp_rank, & 1457 1437 COMM_LMDZ,Req%MSG_Request,ierr) 1458 #endif 1438 1459 1439 IF (.NOT.using_mpi) THEN 1460 1440 PRINT *,'Erreur, echange MPI en mode sequentiel !!!' … … 1478 1458 subroutine WaitRequest(a_Request) 1479 1459 USE dimensions_mod 1460 USE lmdz_mpi 1480 1461 implicit none 1481 1482 #ifdef CPP_MPI1483 include 'mpif.h'1484 #endif1485 1462 1486 1463 type(request),target :: a_request … … 1488 1465 type(Hallo),pointer :: PtrHallo 1489 1466 integer, dimension(2*mpi_size) :: TabRequest 1490 #ifdef CPP_MPI1491 1467 integer, dimension(MPI_STATUS_SIZE,2*mpi_size) :: TabStatus 1492 #else1493 integer, dimension(1,2*mpi_size) :: TabStatus1494 #endif1495 1468 integer :: NbRequest 1496 1469 integer :: i,rank,pos,ij,l,ierr … … 1521 1494 ! PRINT *,"Process de rang",mpi_rank,"Task : ",omp_rank,"--->",NbRequest,"en attente" 1522 1495 ! PRINT *,"No des requetes :",TabRequest(1:NbRequest) 1523 #ifdef CPP_MPI1524 1496 call MPI_WAITALL(NbRequest,TabRequest,TabStatus,ierr) 1525 #endif1526 1497 ! PRINT *,"Process de rang",mpi_rank,"Task : ",omp_rank,"--->",NbRequest,"complete" 1527 1498 ! PRINT *,"-------------------------------------------------------------------" … … 1571 1542 1572 1543 subroutine WaitSendRequest(a_Request) 1544 USE lmdz_mpi 1573 1545 USE dimensions_mod 1574 1546 implicit none 1575 1547 1576 #ifdef CPP_MPI1577 include 'mpif.h'1578 #endif1579 1548 type(request),target :: a_request 1580 1549 type(request_SR),pointer :: Req 1581 1550 type(Hallo),pointer :: PtrHallo 1582 1551 integer, dimension(mpi_size) :: TabRequest 1583 #ifdef CPP_MPI1584 1552 integer, dimension(MPI_STATUS_SIZE,mpi_size) :: TabStatus 1585 #else1586 integer, dimension(1,mpi_size) :: TabStatus1587 #endif1588 1553 integer :: NbRequest 1589 1554 integer :: i,rank,pos,ij,l,ierr … … 1606 1571 ! PRINT *,"Process de rang",mpi_rank,"Task : ",omp_rank,"--->",NbRequest,"en attente" 1607 1572 ! PRINT *,"No des requetes :",TabRequest(1:NbRequest) 1608 #ifdef CPP_MPI 1609 call MPI_WAITALL(NbRequest,TabRequest,TabStatus,ierr) 1610 #endif 1573 call MPI_WAITALL(NbRequest,TabRequest,TabStatus,ierr) 1611 1574 ! PRINT *,"Process de rang",mpi_rank,"Task : ",omp_rank,"--->",NbRequest,"complete" 1612 1575 ! PRINT *,"-------------------------------------------------------------------" … … 1628 1591 subroutine WaitRecvRequest(a_Request) 1629 1592 USE dimensions_mod 1593 USE lmdz_mpi 1630 1594 implicit none 1631 1632 #ifdef CPP_MPI1633 include 'mpif.h'1634 #endif1635 1636 1595 type(request),target :: a_request 1637 1596 type(request_SR),pointer :: Req 1638 1597 type(Hallo),pointer :: PtrHallo 1639 1598 integer, dimension(mpi_size) :: TabRequest 1640 #ifdef CPP_MPI1641 1599 integer, dimension(MPI_STATUS_SIZE,mpi_size) :: TabStatus 1642 #else1643 integer, dimension(1,mpi_size) :: TabStatus1644 #endif1645 1600 integer :: NbRequest 1646 1601 integer :: i,rank,pos,ij,l,ierr … … 1664 1619 ! PRINT *,"Process de rang",mpi_rank,"Task : ",omp_rank,"--->",NbRequest,"en attente" 1665 1620 ! PRINT *,"No des requetes :",TabRequest(1:NbRequest) 1666 #ifdef CPP_MPI 1667 call MPI_WAITALL(NbRequest,TabRequest,TabStatus,ierr) 1668 #endif 1621 call MPI_WAITALL(NbRequest,TabRequest,TabStatus,ierr) 1669 1622 ! PRINT *,"Process de rang",mpi_rank,"Task : ",omp_rank,"--->",NbRequest,"complete" 1670 1623 ! PRINT *,"-------------------------------------------------------------------" -
LMDZ6/trunk/libf/dyn3dmem/parallel_lmdz.F90
r4593 r4600 4 4 MODULE parallel_lmdz 5 5 USE mod_const_mpi 6 USE lmdz_mpi, ONLY : using_mpi 6 7 #ifdef CPP_IOIPSL 7 8 use IOIPSL … … 12 13 INTEGER,PARAMETER :: halo_max=3 13 14 14 LOGICAL,SAVE :: using_mpi ! .true. if using MPI15 15 LOGICAL,SAVE :: using_omp ! .true. if using OpenMP 16 16 LOGICAL,SAVE :: is_master ! .true. if the core is both MPI & OpenMP master … … 88 88 subroutine init_parallel 89 89 USE vampir 90 USE lmdz_mpi 90 91 implicit none 91 #ifdef CPP_MPI92 include 'mpif.h'93 #endif94 92 INCLUDE "dimensions.h" 95 93 INCLUDE "paramet.h" … … 111 109 #endif 112 110 113 #ifdef CPP_MPI114 using_mpi=.TRUE.115 #else116 using_mpi=.FALSE.117 #endif118 119 120 111 #ifdef CPP_OMP 121 112 using_OMP=.TRUE. … … 127 118 128 119 IF (using_mpi) THEN 129 #ifdef CPP_MPI130 120 call MPI_COMM_SIZE(COMM_LMDZ,mpi_size,ierr) 131 121 call MPI_COMM_RANK(COMM_LMDZ,mpi_rank,ierr) 132 #endif133 122 ELSE 134 123 mpi_size=1 … … 161 150 write(lunout,*)" ---> diminuez le nombre de CPU ou augmentez la taille en lattitude" 162 151 163 #ifdef CPP_MPI164 152 IF (using_mpi) call MPI_ABORT(COMM_LMDZ,-1, ierr) 165 #endif 153 166 154 endif 167 155 … … 395 383 396 384 subroutine Finalize_parallel 385 USE lmdz_mpi 397 386 #ifdef CPP_XIOS 398 387 ! ug Pour les sorties XIOS … … 418 407 include "dimensions.h" 419 408 include "paramet.h" 420 #ifdef CPP_MPI421 include 'mpif.h'422 #endif423 409 424 410 integer :: ierr … … 446 432 CALL wxios_close() 447 433 #endif 448 #ifdef CPP_MPI 449 IF (using_mpi) call MPI_FINALIZE(ierr) 450 #endif 434 IF (using_mpi) call MPI_FINALIZE(ierr) 451 435 end if 452 436 … … 502 486 503 487 SUBROUTINE barrier 488 USE lmdz_mpi 504 489 IMPLICIT NONE 505 #ifdef CPP_MPI506 INCLUDE 'mpif.h'507 #endif508 490 INTEGER :: ierr 509 491 510 492 !$OMP CRITICAL (MPI) 511 #ifdef CPP_MPI512 493 IF (using_mpi) CALL MPI_Barrier(COMM_LMDZ,ierr) 513 #endif514 494 !$OMP END CRITICAL (MPI) 515 495 … … 518 498 519 499 subroutine exchange_hallo(Field,ij,ll,up,down) 500 USE lmdz_mpi 520 501 USE Vampir 521 502 implicit none 522 503 INCLUDE "dimensions.h" 523 504 INCLUDE "paramet.h" 524 #ifdef CPP_MPI525 include 'mpif.h'526 #endif527 505 INTEGER :: ij,ll 528 506 REAL, dimension(ij,ll) :: Field … … 533 511 LOGICAL :: RecvUp,RecvDown 534 512 INTEGER, DIMENSION(4) :: Request 535 #ifdef CPP_MPI536 513 INTEGER, DIMENSION(MPI_STATUS_SIZE,4) :: Status 537 #else 538 INTEGER, DIMENSION(1,4) :: Status 539 #endif 514 540 515 INTEGER :: NbRequest 541 516 REAL, dimension(:),allocatable :: Buffer_Send_up,Buffer_Send_down … … 582 557 call PACK_Data(Field(ij_begin,1),ij,ll,down,Buffer_Send_up) 583 558 !$OMP CRITICAL (MPI) 584 #ifdef CPP_MPI585 559 call MPI_ISEND(Buffer_send_up,Buffer_Size,MPI_REAL8,MPI_Rank-1,1, & 586 560 COMM_LMDZ,Request(NbRequest),ierr) 587 #endif588 561 !$OMP END CRITICAL (MPI) 589 562 ENDIF … … 597 570 598 571 !$OMP CRITICAL (MPI) 599 #ifdef CPP_MPI600 572 call MPI_ISEND(Buffer_send_down,Buffer_Size,MPI_REAL8,MPI_Rank+1,1, & 601 573 COMM_LMDZ,Request(NbRequest),ierr) 602 #endif603 574 !$OMP END CRITICAL (MPI) 604 575 ENDIF … … 611 582 612 583 !$OMP CRITICAL (MPI) 613 #ifdef CPP_MPI614 584 call MPI_IRECV(Buffer_recv_up,Buffer_size,MPI_REAL8,MPI_Rank-1,1, & 615 585 COMM_LMDZ,Request(NbRequest),ierr) 616 #endif617 586 !$OMP END CRITICAL (MPI) 618 587 … … 626 595 627 596 !$OMP CRITICAL (MPI) 628 #ifdef CPP_MPI629 597 call MPI_IRECV(Buffer_recv_down,Buffer_size,MPI_REAL8,MPI_Rank+1,1, & 630 598 COMM_LMDZ,Request(NbRequest),ierr) 631 #endif632 599 !$OMP END CRITICAL (MPI) 633 600 634 601 ENDIF 635 602 636 #ifdef CPP_MPI637 603 if (NbRequest > 0) call MPI_WAITALL(NbRequest,Request,Status,ierr) 638 #endif639 604 IF (RecvUp) call Unpack_Data(Field(ij_begin-up*iip1,1),ij,ll,up,Buffer_Recv_up) 640 605 IF (RecvDown) call Unpack_Data(Field(ij_end+1,1),ij,ll,down,Buffer_Recv_down) … … 651 616 652 617 subroutine Gather_Field(Field,ij,ll,rank) 618 USE lmdz_mpi 653 619 implicit none 654 620 INCLUDE "dimensions.h" 655 621 INCLUDE "paramet.h" 656 622 INCLUDE "iniprint.h" 657 #ifdef CPP_MPI658 include 'mpif.h'659 #endif660 623 INTEGER :: ij,ll,rank 661 624 REAL, dimension(ij,ll) :: Field … … 709 672 710 673 !$OMP CRITICAL (MPI) 711 #ifdef CPP_MPI712 674 call MPI_GATHERV(Buffer_send,(min(ij_end,ij)-ij_begin+1)*ll,MPI_REAL8, & 713 675 Buffer_Recv,Recv_count,displ,MPI_REAL8,rank,COMM_LMDZ,ierr) 714 #endif715 676 !$OMP END CRITICAL (MPI) 716 677 … … 735 696 736 697 subroutine AllGather_Field(Field,ij,ll) 698 USE lmdz_mpi 737 699 implicit none 738 700 INCLUDE "dimensions.h" 739 701 INCLUDE "paramet.h" 740 #ifdef CPP_MPI741 include 'mpif.h'742 #endif743 702 INTEGER :: ij,ll 744 703 REAL, dimension(ij,ll) :: Field … … 748 707 call Gather_Field(Field,ij,ll,0) 749 708 !$OMP CRITICAL (MPI) 750 #ifdef CPP_MPI751 709 call MPI_BCAST(Field,ij*ll,MPI_REAL8,0,COMM_LMDZ,ierr) 752 #endif753 710 !$OMP END CRITICAL (MPI) 754 711 ENDIF … … 757 714 758 715 subroutine Broadcast_Field(Field,ij,ll,rank) 716 USE lmdz_mpi 759 717 implicit none 760 718 INCLUDE "dimensions.h" 761 719 INCLUDE "paramet.h" 762 #ifdef CPP_MPI763 include 'mpif.h'764 #endif765 720 INTEGER :: ij,ll 766 721 REAL, dimension(ij,ll) :: Field … … 771 726 772 727 !$OMP CRITICAL (MPI) 773 #ifdef CPP_MPI774 728 call MPI_BCAST(Field,ij*ll,MPI_REAL8,rank,COMM_LMDZ,ierr) 775 #endif776 729 !$OMP END CRITICAL (MPI) 777 730 … … 781 734 782 735 ! Subroutine verif_hallo(Field,ij,ll,up,down) 736 ! USE lmdz_mpi 783 737 ! implicit none 784 738 ! INCLUDE "dimensions.h" 785 739 ! INCLUDE "paramet.h" 786 ! include 'mpif.h'787 740 ! 788 741 ! INTEGER :: ij,ll -
LMDZ6/trunk/libf/dyn3dmem/times.F90
r4593 r4600 137 137 subroutine allgather_timer 138 138 USE parallel_lmdz 139 implicit none 140 #ifdef CPP_MPI 141 include 'mpif.h' 142 #endif 139 USE lmdz_mpi 140 implicit none 141 143 142 integer :: ierr 144 143 integer :: data_size … … 155 154 156 155 tmp_table(:,:)=timer_table(:,:,mpi_rank) 157 #ifdef CPP_MPI158 156 call mpi_allgather(tmp_table(1,1),data_size,MPI_REAL_LMDZ,timer_table(1,1,0),data_size,MPI_REAL_LMDZ,COMM_LMDZ,ierr) 159 #endif160 157 tmp_table(:,:)=timer_table_sqr(:,:,mpi_rank) 161 #ifdef CPP_MPI162 158 call mpi_allgather(tmp_table(1,1),data_size,MPI_REAL_LMDZ,timer_table_sqr(1,1,0),data_size,MPI_REAL_LMDZ,COMM_LMDZ,ierr) 163 #endif164 159 deallocate(tmp_table) 165 160 … … 172 167 subroutine allgather_timer_average 173 168 USE parallel_lmdz 174 implicit none 175 #ifdef CPP_MPI 176 include 'mpif.h' 177 #endif 169 USE lmdz_mpi 170 implicit none 178 171 integer :: ierr 179 172 integer :: data_size … … 192 185 193 186 tmp_table(:,:)=timer_average(:,:,mpi_rank) 194 #ifdef CPP_MPI195 187 call mpi_allgather(tmp_table(1,1),data_size,MPI_REAL_LMDZ,timer_average(1,1,0),data_size,MPI_REAL_LMDZ,COMM_LMDZ,ierr) 196 #endif197 188 tmp_table(:,:)=timer_delta(:,:,mpi_rank) 198 #ifdef CPP_MPI199 189 call mpi_allgather(tmp_table(1,1),data_size,MPI_REAL_LMDZ,timer_delta(1,1,0),data_size,MPI_REAL_LMDZ,COMM_LMDZ,ierr) 200 #endif201 190 tmp_iter(:,:)=timer_iteration(:,:,mpi_rank) 202 #ifdef CPP_MPI203 191 call mpi_allgather(tmp_iter(1,1),data_size,MPI_INTEGER,timer_iteration(1,1,0),data_size,MPI_INTEGER,COMM_LMDZ,ierr) 204 #endif205 192 deallocate(tmp_table) 206 193 -
LMDZ6/trunk/libf/dynphy_lonlat/calfis_loc.F
r4464 r4600 38 38 USE IOPHY 39 39 #endif 40 USE lmdz_mpi 41 40 42 #ifdef CPP_PARA 41 43 USE parallel_lmdz,ONLY:omp_chunk,using_mpi,jjb_u,jje_u,jjb_v,jje_v … … 117 119 include "comgeom2.h" 118 120 include "iniprint.h" 119 #ifdef CPP_MPI120 include 'mpif.h'121 #endif122 121 c Arguments : 123 122 c ----------- … … 233 232 REAL,SAVE,dimension(1:iim,1:llm):: du_send,du_recv,dv_send,dv_recv 234 233 INTEGER :: ierr 235 #ifdef CPP_MPI236 234 INTEGER,dimension(MPI_STATUS_SIZE,4) :: Status 237 #else238 INTEGER,dimension(1,4) :: Status239 #endif240 235 INTEGER, dimension(4) :: Req 241 236 REAL,ALLOCATABLE,SAVE:: zdufi2(:,:),zdvfi2(:,:) … … 873 868 874 869 c$OMP BARRIER 875 #ifdef CPP_MPI 870 876 871 c$OMP MASTER 877 872 !$OMP CRITICAL (MPI) … … 882 877 !$OMP END CRITICAL (MPI) 883 878 c$OMP END MASTER 884 #endif 879 885 880 c$OMP BARRIER 886 881 … … 889 884 if (MPI_rank<MPI_Size-1) then 890 885 c$OMP BARRIER 891 #ifdef CPP_MPI 886 892 887 c$OMP MASTER 893 888 !$OMP CRITICAL (MPI) … … 898 893 !$OMP END CRITICAL (MPI) 899 894 c$OMP END MASTER 900 #endif 895 901 896 endif 902 897 … … 904 899 905 900 906 #ifdef CPP_MPI907 901 c$OMP MASTER 908 902 !$OMP CRITICAL (MPI) … … 916 910 !$OMP END CRITICAL (MPI) 917 911 c$OMP END MASTER 918 #endif919 912 920 913 c$OMP BARRIER -
LMDZ6/trunk/libf/dynphy_lonlat/phylmd/ce0l.F90
r4389 r4600 46 46 USE comvert_mod, ONLY: pa, preff, pressure_exner 47 47 USE temps_mod, ONLY: calend, day_ini, dt 48 USE lmdz_mpi 48 49 49 50 IMPLICIT NONE … … 55 56 include "comgeom2.h" 56 57 include "iniprint.h" 57 #ifdef CPP_MPI58 include 'mpif.h'59 #endif60 58 61 59 REAL :: masque(iip1,jjp1) !--- CONTINENTAL MASK … … 255 253 CALL xios_finalize 256 254 #endif 257 #ifdef CPP_MPI258 255 IF (using_mpi) call MPI_FINALIZE(ierr) 259 #endif260 256 #endif 261 257 -
LMDZ6/trunk/libf/phy_common/abort_physic.F90
r4460 r4600 38 38 if (ierr .eq. 0) then 39 39 write(lunout,*) 'Everything is cool' 40 #ifdef CPP_MPI 41 !$OMP CRITICAL (MPI_ABORT_PHYSIC)42 call MPI_ABORT(COMM_LMDZ_PHY, 0, ierror_mpi)43 !$OMP END CRITICAL (MPI_ABORT_PHYSIC)44 #else45 stop 046 #endif40 if (using_mpi) then 41 !$OMP CRITICAL (MPI_ABORT_PHYSIC) 42 call MPI_ABORT(COMM_LMDZ_PHY, 0, ierror_mpi) 43 !$OMP END CRITICAL (MPI_ABORT_PHYSIC) 44 else 45 stop 0 46 endif 47 47 else 48 48 write(lunout,*) 'Houston, we have a problem, ierr = ', ierr 49 #ifdef CPP_MPI 50 !$OMP CRITICAL (MPI_ABORT_PHYSIC)51 call MPI_ABORT(COMM_LMDZ_PHY, 1, ierror_mpi)52 !$OMP END CRITICAL (MPI_ABORT_PHYSIC)53 #else54 stop 155 #endif49 if (using_mpi) then 50 !$OMP CRITICAL (MPI_ABORT_PHYSIC) 51 call MPI_ABORT(COMM_LMDZ_PHY, 1, ierror_mpi) 52 !$OMP END CRITICAL (MPI_ABORT_PHYSIC) 53 else 54 stop 1 55 endif 56 56 endif 57 57 END -
LMDZ6/trunk/libf/phy_common/mod_phys_lmdz_mpi_data.F90
r4127 r4600 3 3 ! 4 4 MODULE mod_phys_lmdz_mpi_data 5 5 USE lmdz_mpi 6 6 7 INTEGER,SAVE :: ii_begin 7 8 INTEGER,SAVE :: ii_end … … 47 48 48 49 SUBROUTINE init_phys_lmdz_mpi_data(nbp, nbp_lon, nbp_lat, nbp_lev, grid_type, nvertex, communicator) 49 50 USE lmdz_mpi 50 51 IMPLICIT NONE 51 #ifdef CPP_MPI52 INCLUDE 'mpif.h'53 #endif54 52 INTEGER,INTENT(IN) :: nbp 55 53 INTEGER,INTENT(IN) :: nbp_lon … … 65 63 INTEGER :: i 66 64 67 #ifdef CPP_MPI68 is_using_mpi=.TRUE.69 #else70 is_using_mpi=.FALSE.71 #endif72 73 65 if ((nbp_lon.eq.1).and.(nbp_lat.eq.1)) then ! running 1D column model 74 66 klon_glo=1 … … 82 74 83 75 IF (is_using_mpi) THEN 84 #ifdef CPP_MPI85 76 MPI_REAL_LMDZ=MPI_REAL8 86 77 CALL MPI_COMM_SIZE(COMM_LMDZ_PHY,mpi_size,ierr) 87 78 CALL MPI_COMM_RANK(COMM_LMDZ_PHY,mpi_rank,ierr) 88 #endif89 79 ELSE 90 80 mpi_size=1 … … 95 85 96 86 IF (is_using_mpi) THEN 97 #ifdef CPP_MPI 98 CALL MPI_ALLGATHER(nbp,1,MPI_INTEGER,distrib,1,MPI_INTEGER,COMM_LMDZ_PHY,ierr) 99 #endif 87 CALL MPI_ALLGATHER(nbp,1,MPI_INTEGER,distrib,1,MPI_INTEGER,COMM_LMDZ_PHY,ierr) 100 88 ELSE 101 89 distrib(:)=nbp -
LMDZ6/trunk/libf/phy_common/mod_phys_lmdz_mpi_transfert.F90
r3465 r4600 1367 1367 SUBROUTINE bcast_mpi_cgen(var,nb) 1368 1368 USE mod_phys_lmdz_mpi_data 1369 USE lmdz_mpi 1369 1370 IMPLICIT NONE 1370 1371 … … 1372 1373 INTEGER,INTENT(IN) :: nb 1373 1374 1374 #ifdef CPP_MPI1375 INCLUDE 'mpif.h'1376 #endif1377 1375 INTEGER :: ierr 1378 1376 1379 1377 IF (.not.is_using_mpi) RETURN 1380 1378 1381 #ifdef CPP_MPI1382 1379 CALL MPI_BCAST(Var,nb,MPI_CHARACTER,mpi_master,COMM_LMDZ_PHY,ierr) 1383 #endif1384 1380 1385 1381 END SUBROUTINE bcast_mpi_cgen … … 1389 1385 SUBROUTINE bcast_mpi_igen(var,nb) 1390 1386 USE mod_phys_lmdz_mpi_data 1387 USE lmdz_mpi 1391 1388 IMPLICIT NONE 1392 1389 … … 1394 1391 INTEGER,DIMENSION(nb),INTENT(INOUT) :: Var 1395 1392 1396 #ifdef CPP_MPI1397 INCLUDE 'mpif.h'1398 #endif1399 1393 INTEGER :: ierr 1400 1394 1401 1395 IF (.not.is_using_mpi) RETURN 1402 1396 1403 #ifdef CPP_MPI1404 1397 CALL MPI_BCAST(Var,nb,MPI_INTEGER,mpi_master,COMM_LMDZ_PHY,ierr) 1405 #endif1406 1398 1407 1399 END SUBROUTINE bcast_mpi_igen … … 1412 1404 SUBROUTINE bcast_mpi_rgen(var,nb) 1413 1405 USE mod_phys_lmdz_mpi_data 1406 USE lmdz_mpi 1414 1407 IMPLICIT NONE 1415 1408 … … 1417 1410 REAL,DIMENSION(nb),INTENT(INOUT) :: Var 1418 1411 1419 #ifdef CPP_MPI1420 INCLUDE 'mpif.h'1421 #endif1422 1412 INTEGER :: ierr 1423 1413 1424 1414 IF (.not.is_using_mpi) RETURN 1425 1415 1426 #ifdef CPP_MPI1427 1416 CALL MPI_BCAST(Var,nb,MPI_REAL_LMDZ,mpi_master,COMM_LMDZ_PHY,ierr) 1428 #endif1429 1417 1430 1418 END SUBROUTINE bcast_mpi_rgen … … 1435 1423 SUBROUTINE bcast_mpi_lgen(var,nb) 1436 1424 USE mod_phys_lmdz_mpi_data 1425 USE lmdz_mpi 1437 1426 IMPLICIT NONE 1438 1427 … … 1440 1429 LOGICAL,DIMENSION(nb),INTENT(INOUT) :: Var 1441 1430 1442 #ifdef CPP_MPI1443 INCLUDE 'mpif.h'1444 #endif1445 1431 INTEGER :: ierr 1446 1432 1447 1433 IF (.not.is_using_mpi) RETURN 1448 1434 1449 #ifdef CPP_MPI1450 1435 CALL MPI_BCAST(Var,nb,MPI_LOGICAL,mpi_master,COMM_LMDZ_PHY,ierr) 1451 #endif1452 1436 1453 1437 END SUBROUTINE bcast_mpi_lgen … … 1458 1442 USE mod_phys_lmdz_mpi_data 1459 1443 USE mod_grid_phy_lmdz 1444 USE lmdz_mpi 1460 1445 IMPLICIT NONE 1461 1446 … … 1464 1449 INTEGER,INTENT(OUT),DIMENSION(klon_mpi,dimsize) :: VarOut 1465 1450 1466 #ifdef CPP_MPI1467 INCLUDE 'mpif.h'1468 #endif1469 1451 INTEGER,DIMENSION(0:mpi_size-1) :: displs 1470 1452 INTEGER,DIMENSION(0:mpi_size-1) :: counts … … 1493 1475 ENDIF 1494 1476 1495 #ifdef CPP_MPI1496 1477 CALL MPI_SCATTERV(VarTmp,counts,displs,MPI_INTEGER,VarOut,klon_mpi*dimsize, & 1497 1478 MPI_INTEGER,mpi_master, COMM_LMDZ_PHY,ierr) 1498 #endif1499 1479 1500 1480 END SUBROUTINE scatter_mpi_igen … … 1503 1483 USE mod_phys_lmdz_mpi_data 1504 1484 USE mod_grid_phy_lmdz 1485 USE lmdz_mpi 1505 1486 IMPLICIT NONE 1506 1487 … … 1508 1489 REAL,INTENT(IN),DIMENSION(klon_glo,dimsize) :: VarIn 1509 1490 REAL,INTENT(OUT),DIMENSION(klon_mpi,dimsize) :: VarOut 1510 1511 #ifdef CPP_MPI1512 INCLUDE 'mpif.h'1513 #endif1514 1491 1515 1492 INTEGER,DIMENSION(0:mpi_size-1) :: displs … … 1537 1514 ENDIF 1538 1515 1539 #ifdef CPP_MPI1540 1516 CALL MPI_SCATTERV(VarTmp,counts,displs,MPI_REAL_LMDZ,VarOut,klon_mpi*dimsize, & 1541 1517 MPI_REAL_LMDZ,mpi_master, COMM_LMDZ_PHY,ierr) 1542 1518 1543 #endif1544 1545 1519 END SUBROUTINE scatter_mpi_rgen 1546 1520 … … 1549 1523 USE mod_phys_lmdz_mpi_data 1550 1524 USE mod_grid_phy_lmdz 1525 USE lmdz_mpi 1551 1526 IMPLICIT NONE 1552 1527 … … 1555 1530 LOGICAL,INTENT(OUT),DIMENSION(klon_mpi,dimsize) :: VarOut 1556 1531 1557 #ifdef CPP_MPI1558 INCLUDE 'mpif.h'1559 #endif1560 1532 1561 1533 INTEGER,DIMENSION(0:mpi_size-1) :: displs … … 1583 1555 ENDIF 1584 1556 1585 #ifdef CPP_MPI1586 1557 CALL MPI_SCATTERV(VarTmp,counts,displs,MPI_LOGICAL,VarOut,klon_mpi*dimsize, & 1587 1558 MPI_LOGICAL,mpi_master, COMM_LMDZ_PHY,ierr) 1588 #endif1589 1559 1590 1560 END SUBROUTINE scatter_mpi_lgen … … 1596 1566 USE mod_phys_lmdz_mpi_data 1597 1567 USE mod_grid_phy_lmdz 1598 IMPLICIT NONE 1599 1600 #ifdef CPP_MPI 1601 INCLUDE 'mpif.h' 1602 #endif 1603 1568 USE lmdz_mpi 1569 IMPLICIT NONE 1570 1604 1571 INTEGER,INTENT(IN) :: dimsize 1605 1572 INTEGER,INTENT(IN),DIMENSION(klon_mpi,dimsize) :: VarIn … … 1628 1595 ENDIF 1629 1596 1630 #ifdef CPP_MPI1631 1597 CALL MPI_GATHERV(VarIn,klon_mpi*dimsize,MPI_INTEGER,VarTmp,counts,displs, & 1632 1598 MPI_INTEGER,mpi_master, COMM_LMDZ_PHY,ierr) 1633 #endif1634 1599 1635 1600 … … 1650 1615 USE mod_phys_lmdz_mpi_data 1651 1616 USE mod_grid_phy_lmdz 1652 IMPLICIT NONE 1653 1654 #ifdef CPP_MPI 1655 INCLUDE 'mpif.h' 1656 #endif 1657 1617 USE lmdz_mpi 1618 IMPLICIT NONE 1619 1658 1620 INTEGER,INTENT(IN) :: dimsize 1659 1621 REAL,INTENT(IN),DIMENSION(klon_mpi,dimsize) :: VarIn … … 1681 1643 ENDIF 1682 1644 1683 #ifdef CPP_MPI1684 1645 CALL MPI_GATHERV(VarIn,klon_mpi*dimsize,MPI_REAL_LMDZ,VarTmp,counts,displs, & 1685 1646 MPI_REAL_LMDZ,mpi_master, COMM_LMDZ_PHY,ierr) 1686 #endif1687 1647 1688 1648 IF (is_mpi_root) THEN … … 1702 1662 USE mod_phys_lmdz_mpi_data 1703 1663 USE mod_grid_phy_lmdz 1664 USE lmdz_mpi 1704 1665 IMPLICIT NONE 1705 1666 … … 1707 1668 LOGICAL,INTENT(IN),DIMENSION(klon_mpi,dimsize) :: VarIn 1708 1669 LOGICAL,INTENT(OUT),DIMENSION(klon_glo,dimsize) :: VarOut 1709 1710 #ifdef CPP_MPI1711 INCLUDE 'mpif.h'1712 #endif1713 1670 1714 1671 INTEGER,DIMENSION(0:mpi_size-1) :: displs … … 1734 1691 1735 1692 1736 #ifdef CPP_MPI1737 1693 CALL MPI_GATHERV(VarIn,klon_mpi*dimsize,MPI_LOGICAL,VarTmp,counts,displs, & 1738 1694 MPI_LOGICAL,mpi_master, COMM_LMDZ_PHY,ierr) 1739 #endif1740 1695 1741 1696 IF (is_mpi_root) THEN … … 1757 1712 USE mod_phys_lmdz_mpi_data 1758 1713 USE mod_grid_phy_lmdz 1759 IMPLICIT NONE 1760 1761 #ifdef CPP_MPI 1762 INCLUDE 'mpif.h' 1763 #endif 1714 USE lmdz_mpi 1715 IMPLICIT NONE 1764 1716 1765 1717 INTEGER,INTENT(IN) :: nb … … 1774 1726 1775 1727 1776 #ifdef CPP_MPI1777 1728 CALL MPI_REDUCE(VarIn,VarOut,nb,MPI_INTEGER,MPI_SUM,mpi_master,COMM_LMDZ_PHY,ierr) 1778 #endif1779 1729 1780 1730 END SUBROUTINE reduce_sum_mpi_igen … … 1783 1733 USE mod_phys_lmdz_mpi_data 1784 1734 USE mod_grid_phy_lmdz 1785 1786 IMPLICIT NONE 1787 1788 #ifdef CPP_MPI 1789 INCLUDE 'mpif.h' 1790 #endif 1791 1735 USE lmdz_mpi 1736 IMPLICIT NONE 1737 1792 1738 INTEGER,INTENT(IN) :: nb 1793 1739 REAL,DIMENSION(nb),INTENT(IN) :: VarIn … … 1800 1746 ENDIF 1801 1747 1802 #ifdef CPP_MPI1803 1748 CALL MPI_REDUCE(VarIn,VarOut,nb,MPI_REAL_LMDZ,MPI_SUM,mpi_master,COMM_LMDZ_PHY,ierr) 1804 #endif1805 1749 1806 1750 END SUBROUTINE reduce_sum_mpi_rgen … … 1811 1755 USE mod_phys_lmdz_mpi_data 1812 1756 USE mod_grid_phy_lmdz 1813 IMPLICIT NONE 1814 1815 #ifdef CPP_MPI 1816 INCLUDE 'mpif.h' 1817 #endif 1818 1757 USE lmdz_mpi 1758 IMPLICIT NONE 1759 1819 1760 INTEGER,INTENT(IN) :: nb 1820 1761 INTEGER,DIMENSION(nb),INTENT(IN) :: VarIn … … 1828 1769 1829 1770 1830 #ifdef CPP_MPI1831 1771 CALL MPI_REDUCE(VarIn,VarOut,nb,MPI_INTEGER,MPI_MIN,mpi_master,COMM_LMDZ_PHY,ierr) 1832 #endif1833 1772 1834 1773 END SUBROUTINE reduce_min_mpi_igen … … 1837 1776 USE mod_phys_lmdz_mpi_data 1838 1777 USE mod_grid_phy_lmdz 1839 1840 IMPLICIT NONE 1841 1842 #ifdef CPP_MPI 1843 INCLUDE 'mpif.h' 1844 #endif 1845 1778 USE lmdz_mpi 1779 1780 IMPLICIT NONE 1781 1846 1782 INTEGER,INTENT(IN) :: nb 1847 1783 REAL,DIMENSION(nb),INTENT(IN) :: VarIn … … 1854 1790 ENDIF 1855 1791 1856 #ifdef CPP_MPI1857 1792 CALL MPI_REDUCE(VarIn,VarOut,nb,MPI_REAL_LMDZ,MPI_MIN,mpi_master,COMM_LMDZ_PHY,ierr) 1858 #endif1859 1793 1860 1794 END SUBROUTINE reduce_min_mpi_rgen -
LMDZ6/trunk/libf/phylmd/cpl_mod.F90
r4370 r4600 1249 1249 USE mod_grid_phy_lmdz, ONLY : nbp_lon, nbp_lat 1250 1250 USE time_phylmdz_mod, ONLY: start_time, itau_phy 1251 use config_ocean_skin_m, only: activate_ocean_skin 1251 USE config_ocean_skin_m, only: activate_ocean_skin 1252 USE lmdz_mpi 1253 1252 1254 ! Some includes 1253 1255 ! … … 1283 1285 REAL :: calving_glo(nb_zone_calving) 1284 1286 1285 #ifdef CPP_MPI1286 INCLUDE 'mpif.h'1287 1287 INTEGER, DIMENSION(MPI_STATUS_SIZE) :: status 1288 #endif1289 1288 1290 1289 ! End definitions … … 1357 1356 IF (is_parallel) THEN 1358 1357 IF (.NOT. is_north_pole_dyn) THEN 1359 #ifdef CPP_MPI1360 1358 CALL MPI_RECV(Up,1,MPI_REAL_LMDZ,mpi_rank-1,1234,COMM_LMDZ_PHY,status,error) 1361 1359 CALL MPI_SEND(tmp_calv(1,1),1,MPI_REAL_LMDZ,mpi_rank-1,1234,COMM_LMDZ_PHY,error) 1362 #endif1363 1360 ENDIF 1364 1361 1365 1362 IF (.NOT. is_south_pole_dyn) THEN 1366 #ifdef CPP_MPI1367 1363 CALL MPI_SEND(tmp_calv(1,jj_nb),1,MPI_REAL_LMDZ,mpi_rank+1,1234,COMM_LMDZ_PHY,error) 1368 1364 CALL MPI_RECV(down,1,MPI_REAL_LMDZ,mpi_rank+1,1234,COMM_LMDZ_PHY,status,error) 1369 #endif1370 1365 ENDIF 1371 1366 … … 1392 1387 ENDDO 1393 1388 1394 #ifdef CPP_MPI1395 1389 CALL MPI_ALLREDUCE(calving, calving_glo, nb_zone_calving, MPI_REAL_LMDZ, MPI_SUM, COMM_LMDZ_PHY, error) 1396 #endif1397 1390 1398 1391 tab_flds(:,:,ids_calvin) = 0 -
LMDZ6/trunk/libf/phylmd/create_etat0_limit_unstruct.F90
r4460 r4600 84 84 CALL xios_context_finalize() 85 85 CALL xios_finalize() 86 #ifdef CPP_MPI87 86 CALL MPI_Finalize(ierr) 88 #endif89 87 abort_message='create_etat0_limit_unstruct, Initial state file are created, all is fine' 90 88 CALL abort_physic(modname,abort_message,0) … … 102 100 CALL xios_context_finalize() 103 101 CALL xios_finalize() 104 #ifdef CPP_MPI105 102 CALL MPI_Finalize(ierr) 106 #endif107 103 ENDIF 108 104 !$OMP BARRIER -
LMDZ6/trunk/libf/phylmd/mod_surf_para.F90
r2351 r4600 50 50 SUBROUTINE Init_surf_para(knon) 51 51 USE mod_phys_lmdz_para 52 #ifdef CPP_MPI 53 INCLUDE 'mpif.h' 54 #endif 52 USE lmdz_mpi 53 55 54 INTEGER :: knon 56 55 INTEGER :: i,ierr … … 88 87 89 88 IF (is_using_mpi) THEN 90 #ifdef CPP_MPI91 89 CALL MPI_ALLGather(knon_mpi,1,MPI_INTEGER,knon_mpi_para,1,MPI_INTEGER,COMM_LMDZ_PHY,ierr) 92 90 CALL MPI_ALLGather(knon_omp_para,omp_size,MPI_INTEGER,knon_glo_para,omp_size,MPI_INTEGER,COMM_LMDZ_PHY,ierr) 93 #endif94 91 ELSE 95 92 knon_mpi_para(:)=knon_mpi … … 169 166 SUBROUTINE gather_surf_mpi_i(FieldIn,FieldOut) 170 167 USE mod_phys_lmdz_para 171 #ifdef CPP_MPI 172 INCLUDE 'mpif.h' 173 #endif 168 USE lmdz_mpi 169 174 170 INTEGER :: FieldIn(:) 175 171 INTEGER :: FieldOut(:) … … 177 173 178 174 IF (is_using_mpi) THEN 179 #ifdef CPP_MPI180 175 CALL MPI_Gatherv(FieldIn,knon_mpi,MPI_INTEGER, & 181 176 FieldOut,knon_mpi_para,knon_mpi_begin_para(:)-1,MPI_INTEGER, & 182 177 mpi_master,COMM_LMDZ_PHY,ierr) 183 #endif184 178 ELSE 185 179 FieldOut(:)=FieldIn(:) … … 224 218 SUBROUTINE gather_surf_mpi_r(FieldIn,FieldOut) 225 219 USE mod_phys_lmdz_para 226 #ifdef CPP_MPI 227 INCLUDE 'mpif.h' 228 #endif 220 USE lmdz_mpi 221 229 222 REAL :: FieldIn(:) 230 223 REAL :: FieldOut(:) … … 232 225 233 226 IF (is_using_mpi) THEN 234 #ifdef CPP_MPI235 227 CALL MPI_Gatherv(FieldIn,knon_mpi,MPI_REAL_LMDZ, & 236 228 FieldOut,knon_mpi_para,knon_mpi_begin_para(:)-1,MPI_REAL_LMDZ, & 237 229 mpi_master,COMM_LMDZ_PHY,ierr) 238 #endif239 230 ELSE 240 231 FieldOut(:)=FieldIn(:) … … 277 268 SUBROUTINE scatter_surf_mpi_i(FieldIn,FieldOut) 278 269 USE mod_phys_lmdz_para 279 #ifdef CPP_MPI 280 INCLUDE 'mpif.h' 281 #endif 270 USE lmdz_mpi 271 282 272 INTEGER :: FieldIn(:) 283 273 INTEGER :: FieldOut(:) … … 285 275 286 276 IF (is_using_mpi) THEN 287 #ifdef CPP_MPI288 277 CALL MPI_Scatterv(FieldIn,knon_mpi_para,knon_mpi_begin_para(:)-1,MPI_INTEGER, & 289 278 FieldOut,knon_mpi,MPI_INTEGER, & 290 279 mpi_master,COMM_LMDZ_PHY,ierr) 291 #endif292 280 ELSE 293 281 FieldOut(:)=FieldIn(:) … … 329 317 SUBROUTINE scatter_surf_mpi_r(FieldIn,FieldOut) 330 318 USE mod_phys_lmdz_para 331 #ifdef CPP_MPI 332 INCLUDE 'mpif.h' 333 #endif 319 USE lmdz_mpi 320 334 321 REAL :: FieldIn(:) 335 322 REAL :: FieldOut(:) … … 337 324 338 325 IF (is_using_mpi) THEN 339 #ifdef CPP_MPI340 326 CALL MPI_Scatterv(FieldIn,knon_mpi_para,knon_mpi_begin_para(:)-1,MPI_INTEGER, & 341 327 FieldOut,knon_mpi,MPI_INTEGER, & 342 328 mpi_master,COMM_LMDZ_PHY,ierr) 343 #endif344 329 ELSE 345 330 FieldOut(:)=FieldIn(:) -
LMDZ6/trunk/libf/phylmd/surf_land_orchidee_mod.F90
r4526 r4600 701 701 SUBROUTINE Get_orchidee_communicator(orch_comm, orch_mpi_size, orch_mpi_rank, orch_omp_size,orch_omp_rank) 702 702 USE mod_surf_para 703 USE lmdz_mpi 703 704 704 #ifdef CPP_MPI705 INCLUDE 'mpif.h'706 #endif707 708 705 INTEGER,INTENT(OUT) :: orch_comm 709 706 INTEGER,INTENT(OUT) :: orch_mpi_size … … 725 722 ENDIF 726 723 727 #ifdef CPP_MPI728 724 CALL MPI_COMM_SPLIT(COMM_LMDZ_PHY,color,mpi_rank,orch_comm,ierr) 729 725 CALL MPI_COMM_SIZE(orch_comm,orch_mpi_size,ierr) 730 726 CALL MPI_COMM_RANK(orch_comm,orch_mpi_rank,ierr) 731 #endif732 727 733 728 ENDIF … … 753 748 USE mod_surf_para 754 749 USE indice_sol_mod 755 756 #ifdef CPP_MPI 757 INCLUDE 'mpif.h' 758 #endif 750 USE lmdz_mpi 759 751 760 752 ! Input arguments -
LMDZ6/trunk/libf/phylmd/surf_land_orchidee_nofrein_mod.F90
r3102 r4600 527 527 SUBROUTINE Get_orchidee_communicator(orch_comm,orch_omp_size,orch_omp_rank) 528 528 USE mod_surf_para 529 USE lmdz_mpi 529 530 530 #ifdef CPP_MPI531 INCLUDE 'mpif.h'532 #endif533 534 531 INTEGER,INTENT(OUT) :: orch_comm 535 532 INTEGER,INTENT(OUT) :: orch_omp_size … … 550 547 ENDIF 551 548 552 #ifdef CPP_MPI553 549 CALL MPI_COMM_SPLIT(COMM_LMDZ_PHY,color,mpi_rank,orch_comm,ierr) 554 #endif555 550 556 551 ENDIF … … 577 572 USE mod_surf_para 578 573 USE indice_sol_mod 579 580 #ifdef CPP_MPI 581 INCLUDE 'mpif.h' 582 #endif 574 USE lmdz_mpi 583 575 584 576 ! Input arguments -
LMDZ6/trunk/libf/phylmd/surf_land_orchidee_nolic_mod.F90
r4283 r4600 689 689 690 690 SUBROUTINE Get_orchidee_communicator(orch_comm, orch_mpi_size, orch_mpi_rank, orch_omp_size,orch_omp_rank) 691 USE lmdz_mpi 691 692 USE mod_surf_para 692 693 693 #ifdef CPP_MPI694 INCLUDE 'mpif.h'695 #endif696 697 694 INTEGER,INTENT(OUT) :: orch_comm 698 695 INTEGER,INTENT(OUT) :: orch_mpi_size … … 714 711 ENDIF 715 712 716 #ifdef CPP_MPI717 713 CALL MPI_COMM_SPLIT(COMM_LMDZ_PHY,color,mpi_rank,orch_comm,ierr) 718 714 CALL MPI_COMM_SIZE(orch_comm,orch_mpi_size,ierr) 719 715 CALL MPI_COMM_RANK(orch_comm,orch_mpi_rank,ierr) 720 #endif721 716 722 717 ENDIF … … 742 737 USE mod_surf_para 743 738 USE indice_sol_mod 744 745 #ifdef CPP_MPI 746 INCLUDE 'mpif.h' 747 #endif 739 USE lmdz_mpi 748 740 749 741 ! Input arguments -
LMDZ6/trunk/libf/phylmd/surf_land_orchidee_noopenmp_mod.F90
r2952 r4600 421 421 IF (knon /=0) THEN 422 422 CALL Init_orchidee_index(knon,orch_comm,knindex,offset,ktindex) 423 424 #ifndef CPP_MPI 425 ! Interface for ORCHIDEE compiled in sequential mode(without preprocessing flag CPP_MPI)426 CALL intersurf_main (itime+itau_phy-1, nbp_lon, nbp_lat, knon, ktindex, dtime, &427 lrestart_read, lrestart_write, lalo, &428 contfrac, neighbours, resolution, date0, &429 zlev, u1_lay, v1_lay, spechum, temp_air, epot_air, ccanopy, &430 cdrag, petA_orc, peqA_orc, petB_orc, peqB_orc, &431 precip_rain, precip_snow, lwdown, swnet, swdown, ps, &432 evap, fluxsens, fluxlat, coastalflow, riverflow, &433 tsol_rad, tsurf_new, qsurf, albedo_out, emis_new, z0_new, &434 lon_scat, lat_scat, q2m, t2m &423 424 IF (.NOT. using_mpi) THEN 425 ! Interface for ORCHIDEE compiled in sequential mode(without preprocessing flag CPP_MPI) 426 CALL intersurf_main (itime+itau_phy-1, nbp_lon, nbp_lat, knon, ktindex, dtime, & 427 lrestart_read, lrestart_write, lalo, & 428 contfrac, neighbours, resolution, date0, & 429 zlev, u1_lay, v1_lay, spechum, temp_air, epot_air, ccanopy, & 430 cdrag, petA_orc, peqA_orc, petB_orc, peqB_orc, & 431 precip_rain, precip_snow, lwdown, swnet, swdown, ps, & 432 evap, fluxsens, fluxlat, coastalflow, riverflow, & 433 tsol_rad, tsurf_new, qsurf, albedo_out, emis_new, z0_new, & 434 lon_scat, lat_scat, q2m, t2m & 435 435 #ifdef ORCH_NEW 436 , nb_fields_cpl, fields_cpl)436 , nb_fields_cpl, fields_cpl) 437 437 #else 438 )438 ) 439 439 #endif 440 440 441 #else442 ! Interface for ORCHIDEE version 1.9 or later(1.9.2, 1.9.3, 1.9.4, 1.9.5) compiled in parallel mode(with preprocessing flag CPP_MPI)443 CALL intersurf_main (itime+itau_phy-1, nbp_lon, nbp_lat, offset, knon, ktindex, &444 orch_comm, dtime, lrestart_read, lrestart_write, lalo, &445 contfrac, neighbours, resolution, date0, &446 zlev, u1_lay(1:knon), v1_lay(1:knon), spechum(1:knon), temp_air(1:knon), epot_air(1:knon), ccanopy(1:knon), &447 cdrag(1:knon), petA_orc(1:knon), peqA_orc(1:knon), petB_orc(1:knon), peqB_orc(1:knon), &448 precip_rain(1:knon), precip_snow(1:knon), lwdown(1:knon), swnet(1:knon), swdown(1:knon), ps(1:knon), &449 evap(1:knon), fluxsens(1:knon), fluxlat(1:knon), coastalflow(1:knon), riverflow(1:knon), &450 tsol_rad(1:knon), tsurf_new(1:knon), qsurf(1:knon), albedo_out(1:knon,:), emis_new(1:knon), z0_new(1:knon), &451 lon_scat, lat_scat, q2m, t2m &441 ELSE 442 ! Interface for ORCHIDEE version 1.9 or later(1.9.2, 1.9.3, 1.9.4, 1.9.5) compiled in parallel mode(with preprocessing flag CPP_MPI) 443 CALL intersurf_main (itime+itau_phy-1, nbp_lon, nbp_lat, offset, knon, ktindex, & 444 orch_comm, dtime, lrestart_read, lrestart_write, lalo, & 445 contfrac, neighbours, resolution, date0, & 446 zlev, u1_lay(1:knon), v1_lay(1:knon), spechum(1:knon), temp_air(1:knon), epot_air(1:knon), ccanopy(1:knon), & 447 cdrag(1:knon), petA_orc(1:knon), peqA_orc(1:knon), petB_orc(1:knon), peqB_orc(1:knon), & 448 precip_rain(1:knon), precip_snow(1:knon), lwdown(1:knon), swnet(1:knon), swdown(1:knon), ps(1:knon), & 449 evap(1:knon), fluxsens(1:knon), fluxlat(1:knon), coastalflow(1:knon), riverflow(1:knon), & 450 tsol_rad(1:knon), tsurf_new(1:knon), qsurf(1:knon), albedo_out(1:knon,:), emis_new(1:knon), z0_new(1:knon), & 451 lon_scat, lat_scat, q2m, t2m & 452 452 #ifdef ORCH_NEW 453 , nb_fields_cpl, fields_cpl(1:knon,:))453 , nb_fields_cpl, fields_cpl(1:knon,:)) 454 454 #else 455 )455 ) 456 456 #endif 457 #endif 458 457 ENDIF 459 458 ENDIF 460 459 … … 467 466 468 467 IF (knon /=0) THEN 469 #ifndef CPP_MPI 470 ! Interface for ORCHIDEE compiled in sequential mode(without preprocessing flag CPP_MPI)471 CALL intersurf_main (itime+itau_phy, nbp_lon, nbp_lat, knon, ktindex, dtime, &472 lrestart_read, lrestart_write, lalo, &473 contfrac, neighbours, resolution, date0, &474 zlev, u1_lay, v1_lay, spechum, temp_air, epot_air, ccanopy, &475 cdrag, petA_orc, peqA_orc, petB_orc, peqB_orc, &476 precip_rain, precip_snow, lwdown, swnet, swdown_vrai, ps, &477 evap, fluxsens, fluxlat, coastalflow, riverflow, &478 tsol_rad, tsurf_new, qsurf, albedo_out, emis_new, z0_new, &479 lon_scat, lat_scat, q2m, t2m &468 IF (.NOT. using_mpi) THEN 469 ! Interface for ORCHIDEE compiled in sequential mode(without preprocessing flag CPP_MPI) 470 CALL intersurf_main (itime+itau_phy, nbp_lon, nbp_lat, knon, ktindex, dtime, & 471 lrestart_read, lrestart_write, lalo, & 472 contfrac, neighbours, resolution, date0, & 473 zlev, u1_lay, v1_lay, spechum, temp_air, epot_air, ccanopy, & 474 cdrag, petA_orc, peqA_orc, petB_orc, peqB_orc, & 475 precip_rain, precip_snow, lwdown, swnet, swdown_vrai, ps, & 476 evap, fluxsens, fluxlat, coastalflow, riverflow, & 477 tsol_rad, tsurf_new, qsurf, albedo_out, emis_new, z0_new, & 478 lon_scat, lat_scat, q2m, t2m & 480 479 #ifdef ORCH_NEW 481 , nb_fields_cpl, fields_cpl)480 , nb_fields_cpl, fields_cpl) 482 481 #else 483 )482 ) 484 483 #endif 484 ELSE 485 ! Interface for ORCHIDEE version 1.9 or later compiled in parallel mode(with preprocessing flag CPP_MPI) 486 CALL intersurf_main (itime+itau_phy, nbp_lon, nbp_lat,offset, knon, ktindex, & 487 orch_comm,dtime, lrestart_read, lrestart_write, lalo, & 488 contfrac, neighbours, resolution, date0, & 489 zlev, u1_lay(1:knon), v1_lay(1:knon), spechum(1:knon), temp_air(1:knon), epot_air(1:knon), ccanopy(1:knon), & 490 cdrag(1:knon), petA_orc(1:knon), peqA_orc(1:knon), petB_orc(1:knon), peqB_orc(1:knon), & 491 precip_rain(1:knon), precip_snow(1:knon), lwdown(1:knon), swnet(1:knon), swdown_vrai(1:knon), ps(1:knon), & 492 evap(1:knon), fluxsens(1:knon), fluxlat(1:knon), coastalflow(1:knon), riverflow(1:knon), & 493 tsol_rad(1:knon), tsurf_new(1:knon), qsurf(1:knon), albedo_out(1:knon,:), emis_new(1:knon), z0_new(1:knon), & 494 lon_scat, lat_scat, q2m, t2m & 495 #ifdef ORCH_NEW 496 , nb_fields_cpl, fields_cpl(1:knon,:)) 485 497 #else 486 ! Interface for ORCHIDEE version 1.9 or later compiled in parallel mode(with preprocessing flag CPP_MPI) 487 CALL intersurf_main (itime+itau_phy, nbp_lon, nbp_lat,offset, knon, ktindex, & 488 orch_comm,dtime, lrestart_read, lrestart_write, lalo, & 489 contfrac, neighbours, resolution, date0, & 490 zlev, u1_lay(1:knon), v1_lay(1:knon), spechum(1:knon), temp_air(1:knon), epot_air(1:knon), ccanopy(1:knon), & 491 cdrag(1:knon), petA_orc(1:knon), peqA_orc(1:knon), petB_orc(1:knon), peqB_orc(1:knon), & 492 precip_rain(1:knon), precip_snow(1:knon), lwdown(1:knon), swnet(1:knon), swdown_vrai(1:knon), ps(1:knon), & 493 evap(1:knon), fluxsens(1:knon), fluxlat(1:knon), coastalflow(1:knon), riverflow(1:knon), & 494 tsol_rad(1:knon), tsurf_new(1:knon), qsurf(1:knon), albedo_out(1:knon,:), emis_new(1:knon), z0_new(1:knon), & 495 lon_scat, lat_scat, q2m, t2m & 496 #ifdef ORCH_NEW 497 , nb_fields_cpl, fields_cpl(1:knon,:)) 498 #else 499 ) 498 ) 500 499 #endif 501 #endif 500 ENDIF 502 501 ENDIF 503 502 … … 545 544 546 545 USE mod_grid_phy_lmdz, ONLY: nbp_lon, nbp_lat 547 548 #ifdef CPP_MPI 549 INCLUDE 'mpif.h' 550 #endif 551 546 USE lmdz_mpi 552 547 553 548 ! Input arguments … … 564 559 ! Local varables 565 560 !**************************************************************************************** 566 #ifdef CPP_MPI567 561 INTEGER, DIMENSION(MPI_STATUS_SIZE) :: status 568 #endif569 562 570 563 INTEGER :: MyLastPoint … … 580 573 581 574 IF (is_parallel) THEN 582 #ifdef CPP_MPI583 575 CALL MPI_COMM_SIZE(orch_comm,mpi_size_orch,ierr) 584 576 CALL MPI_COMM_RANK(orch_comm,mpi_rank_orch,ierr) 585 #endif586 577 ELSE 587 578 mpi_rank_orch=0 … … 591 582 IF (is_parallel) THEN 592 583 IF (mpi_rank_orch /= 0) THEN 593 #ifdef CPP_MPI594 584 CALL MPI_RECV(LastPoint,1,MPI_INTEGER,mpi_rank_orch-1,1234,orch_comm,status,ierr) 595 #endif596 585 ENDIF 597 586 598 587 IF (mpi_rank_orch /= mpi_size_orch-1) THEN 599 #ifdef CPP_MPI600 588 CALL MPI_SEND(MyLastPoint,1,MPI_INTEGER,mpi_rank_orch+1,1234,orch_comm,ierr) 601 #endif602 589 ENDIF 603 590 ENDIF … … 617 604 ! 618 605 SUBROUTINE Get_orchidee_communicator(knon,orch_comm) 619 620 #ifdef CPP_MPI 621 INCLUDE 'mpif.h' 622 #endif 623 606 USE lmdz_mpi 624 607 625 608 INTEGER,INTENT(IN) :: knon … … 638 621 ENDIF 639 622 640 #ifdef CPP_MPI641 623 CALL MPI_COMM_SPLIT(COMM_LMDZ_PHY,color,mpi_rank,orch_comm,ierr) 642 #endif643 624 644 625 END SUBROUTINE Get_orchidee_communicator … … 650 631 USE indice_sol_mod 651 632 USE mod_grid_phy_lmdz, ONLY: nbp_lon, nbp_lat 652 653 #ifdef CPP_MPI 654 INCLUDE 'mpif.h' 655 #endif 633 USE lmdz_mpi 656 634 657 635 ! Input arguments … … 688 666 knon_nb(:)=knon 689 667 ELSE 690 691 #ifdef CPP_MPI692 668 CALL MPI_GATHER(knon,1,MPI_INTEGER,knon_nb,1,MPI_INTEGER,0,COMM_LMDZ_PHY,ierr) 693 #endif694 695 669 ENDIF 696 670 … … 714 688 ktindex_g(:)=ktindex_p(:) 715 689 ELSE 716 717 #ifdef CPP_MPI718 690 CALL MPI_GATHERV(ktindex_p,knon,MPI_INTEGER,ktindex_g,knon_nb,& 719 691 displs,MPI_INTEGER,0,COMM_LMDZ_PHY,ierr) 720 #endif721 722 692 ENDIF 723 693 … … 776 746 neighbours(:,i)=neighbours_g(:,i) 777 747 ELSE 778 #ifdef CPP_MPI779 748 IF (knon > 0) THEN 780 749 ! knon>0, scattter global field neighbours_g from master process to local process … … 784 753 CALL MPI_SCATTERV(neighbours_g(:,i),knon_nb,displs,MPI_INTEGER,var_tmp,knon,MPI_INTEGER,0,COMM_LMDZ_PHY,ierr) 785 754 END IF 786 #endif787 755 ENDIF 788 756 ENDDO -
LMDZ6/trunk/libf/phylmd/surf_land_orchidee_nounstruct_mod.F90
r3438 r4600 525 525 SUBROUTINE Get_orchidee_communicator(orch_comm,orch_omp_size,orch_omp_rank) 526 526 USE mod_surf_para 527 USE lmdz_mpi 527 528 528 #ifdef CPP_MPI529 INCLUDE 'mpif.h'530 #endif531 532 529 INTEGER,INTENT(OUT) :: orch_comm 533 530 INTEGER,INTENT(OUT) :: orch_omp_size … … 548 545 ENDIF 549 546 550 #ifdef CPP_MPI551 547 CALL MPI_COMM_SPLIT(COMM_LMDZ_PHY,color,mpi_rank,orch_comm,ierr) 552 #endif553 548 554 549 ENDIF … … 575 570 USE mod_surf_para 576 571 USE indice_sol_mod 577 578 #ifdef CPP_MPI 579 INCLUDE 'mpif.h' 580 #endif 572 USE lmdz_mpi 581 573 582 574 ! Input arguments -
LMDZ6/trunk/libf/phylmd/surf_land_orchidee_noz0h_mod.F90
r3568 r4600 525 525 SUBROUTINE Get_orchidee_communicator(orch_comm,orch_omp_size,orch_omp_rank) 526 526 USE mod_surf_para 527 USE lmdz_mpi 527 528 528 #ifdef CPP_MPI529 INCLUDE 'mpif.h'530 #endif531 532 529 INTEGER,INTENT(OUT) :: orch_comm 533 530 INTEGER,INTENT(OUT) :: orch_omp_size … … 548 545 ENDIF 549 546 550 #ifdef CPP_MPI551 547 CALL MPI_COMM_SPLIT(COMM_LMDZ_PHY,color,mpi_rank,orch_comm,ierr) 552 #endif553 548 554 549 ENDIF … … 575 570 USE mod_surf_para 576 571 USE indice_sol_mod 577 578 #ifdef CPP_MPI 579 INCLUDE 'mpif.h' 580 #endif 572 USE lmdz_mpi 581 573 582 574 ! Input arguments
Note: See TracChangeset
for help on using the changeset viewer.