- Timestamp:
- Jun 30, 2023, 8:18:43 PM (11 months ago)
- File:
-
- 1 edited
Legend:
- Unmodified
- Added
- Removed
-
LMDZ6/trunk/libf/phylmd/surf_land_orchidee_noopenmp_mod.F90
r2952 r4600 421 421 IF (knon /=0) THEN 422 422 CALL Init_orchidee_index(knon,orch_comm,knindex,offset,ktindex) 423 424 #ifndef CPP_MPI 425 ! Interface for ORCHIDEE compiled in sequential mode(without preprocessing flag CPP_MPI)426 CALL intersurf_main (itime+itau_phy-1, nbp_lon, nbp_lat, knon, ktindex, dtime, &427 lrestart_read, lrestart_write, lalo, &428 contfrac, neighbours, resolution, date0, &429 zlev, u1_lay, v1_lay, spechum, temp_air, epot_air, ccanopy, &430 cdrag, petA_orc, peqA_orc, petB_orc, peqB_orc, &431 precip_rain, precip_snow, lwdown, swnet, swdown, ps, &432 evap, fluxsens, fluxlat, coastalflow, riverflow, &433 tsol_rad, tsurf_new, qsurf, albedo_out, emis_new, z0_new, &434 lon_scat, lat_scat, q2m, t2m &423 424 IF (.NOT. using_mpi) THEN 425 ! Interface for ORCHIDEE compiled in sequential mode(without preprocessing flag CPP_MPI) 426 CALL intersurf_main (itime+itau_phy-1, nbp_lon, nbp_lat, knon, ktindex, dtime, & 427 lrestart_read, lrestart_write, lalo, & 428 contfrac, neighbours, resolution, date0, & 429 zlev, u1_lay, v1_lay, spechum, temp_air, epot_air, ccanopy, & 430 cdrag, petA_orc, peqA_orc, petB_orc, peqB_orc, & 431 precip_rain, precip_snow, lwdown, swnet, swdown, ps, & 432 evap, fluxsens, fluxlat, coastalflow, riverflow, & 433 tsol_rad, tsurf_new, qsurf, albedo_out, emis_new, z0_new, & 434 lon_scat, lat_scat, q2m, t2m & 435 435 #ifdef ORCH_NEW 436 , nb_fields_cpl, fields_cpl)436 , nb_fields_cpl, fields_cpl) 437 437 #else 438 )438 ) 439 439 #endif 440 440 441 #else442 ! Interface for ORCHIDEE version 1.9 or later(1.9.2, 1.9.3, 1.9.4, 1.9.5) compiled in parallel mode(with preprocessing flag CPP_MPI)443 CALL intersurf_main (itime+itau_phy-1, nbp_lon, nbp_lat, offset, knon, ktindex, &444 orch_comm, dtime, lrestart_read, lrestart_write, lalo, &445 contfrac, neighbours, resolution, date0, &446 zlev, u1_lay(1:knon), v1_lay(1:knon), spechum(1:knon), temp_air(1:knon), epot_air(1:knon), ccanopy(1:knon), &447 cdrag(1:knon), petA_orc(1:knon), peqA_orc(1:knon), petB_orc(1:knon), peqB_orc(1:knon), &448 precip_rain(1:knon), precip_snow(1:knon), lwdown(1:knon), swnet(1:knon), swdown(1:knon), ps(1:knon), &449 evap(1:knon), fluxsens(1:knon), fluxlat(1:knon), coastalflow(1:knon), riverflow(1:knon), &450 tsol_rad(1:knon), tsurf_new(1:knon), qsurf(1:knon), albedo_out(1:knon,:), emis_new(1:knon), z0_new(1:knon), &451 lon_scat, lat_scat, q2m, t2m &441 ELSE 442 ! Interface for ORCHIDEE version 1.9 or later(1.9.2, 1.9.3, 1.9.4, 1.9.5) compiled in parallel mode(with preprocessing flag CPP_MPI) 443 CALL intersurf_main (itime+itau_phy-1, nbp_lon, nbp_lat, offset, knon, ktindex, & 444 orch_comm, dtime, lrestart_read, lrestart_write, lalo, & 445 contfrac, neighbours, resolution, date0, & 446 zlev, u1_lay(1:knon), v1_lay(1:knon), spechum(1:knon), temp_air(1:knon), epot_air(1:knon), ccanopy(1:knon), & 447 cdrag(1:knon), petA_orc(1:knon), peqA_orc(1:knon), petB_orc(1:knon), peqB_orc(1:knon), & 448 precip_rain(1:knon), precip_snow(1:knon), lwdown(1:knon), swnet(1:knon), swdown(1:knon), ps(1:knon), & 449 evap(1:knon), fluxsens(1:knon), fluxlat(1:knon), coastalflow(1:knon), riverflow(1:knon), & 450 tsol_rad(1:knon), tsurf_new(1:knon), qsurf(1:knon), albedo_out(1:knon,:), emis_new(1:knon), z0_new(1:knon), & 451 lon_scat, lat_scat, q2m, t2m & 452 452 #ifdef ORCH_NEW 453 , nb_fields_cpl, fields_cpl(1:knon,:))453 , nb_fields_cpl, fields_cpl(1:knon,:)) 454 454 #else 455 )455 ) 456 456 #endif 457 #endif 458 457 ENDIF 459 458 ENDIF 460 459 … … 467 466 468 467 IF (knon /=0) THEN 469 #ifndef CPP_MPI 470 ! Interface for ORCHIDEE compiled in sequential mode(without preprocessing flag CPP_MPI)471 CALL intersurf_main (itime+itau_phy, nbp_lon, nbp_lat, knon, ktindex, dtime, &472 lrestart_read, lrestart_write, lalo, &473 contfrac, neighbours, resolution, date0, &474 zlev, u1_lay, v1_lay, spechum, temp_air, epot_air, ccanopy, &475 cdrag, petA_orc, peqA_orc, petB_orc, peqB_orc, &476 precip_rain, precip_snow, lwdown, swnet, swdown_vrai, ps, &477 evap, fluxsens, fluxlat, coastalflow, riverflow, &478 tsol_rad, tsurf_new, qsurf, albedo_out, emis_new, z0_new, &479 lon_scat, lat_scat, q2m, t2m &468 IF (.NOT. using_mpi) THEN 469 ! Interface for ORCHIDEE compiled in sequential mode(without preprocessing flag CPP_MPI) 470 CALL intersurf_main (itime+itau_phy, nbp_lon, nbp_lat, knon, ktindex, dtime, & 471 lrestart_read, lrestart_write, lalo, & 472 contfrac, neighbours, resolution, date0, & 473 zlev, u1_lay, v1_lay, spechum, temp_air, epot_air, ccanopy, & 474 cdrag, petA_orc, peqA_orc, petB_orc, peqB_orc, & 475 precip_rain, precip_snow, lwdown, swnet, swdown_vrai, ps, & 476 evap, fluxsens, fluxlat, coastalflow, riverflow, & 477 tsol_rad, tsurf_new, qsurf, albedo_out, emis_new, z0_new, & 478 lon_scat, lat_scat, q2m, t2m & 480 479 #ifdef ORCH_NEW 481 , nb_fields_cpl, fields_cpl)480 , nb_fields_cpl, fields_cpl) 482 481 #else 483 )482 ) 484 483 #endif 484 ELSE 485 ! Interface for ORCHIDEE version 1.9 or later compiled in parallel mode(with preprocessing flag CPP_MPI) 486 CALL intersurf_main (itime+itau_phy, nbp_lon, nbp_lat,offset, knon, ktindex, & 487 orch_comm,dtime, lrestart_read, lrestart_write, lalo, & 488 contfrac, neighbours, resolution, date0, & 489 zlev, u1_lay(1:knon), v1_lay(1:knon), spechum(1:knon), temp_air(1:knon), epot_air(1:knon), ccanopy(1:knon), & 490 cdrag(1:knon), petA_orc(1:knon), peqA_orc(1:knon), petB_orc(1:knon), peqB_orc(1:knon), & 491 precip_rain(1:knon), precip_snow(1:knon), lwdown(1:knon), swnet(1:knon), swdown_vrai(1:knon), ps(1:knon), & 492 evap(1:knon), fluxsens(1:knon), fluxlat(1:knon), coastalflow(1:knon), riverflow(1:knon), & 493 tsol_rad(1:knon), tsurf_new(1:knon), qsurf(1:knon), albedo_out(1:knon,:), emis_new(1:knon), z0_new(1:knon), & 494 lon_scat, lat_scat, q2m, t2m & 495 #ifdef ORCH_NEW 496 , nb_fields_cpl, fields_cpl(1:knon,:)) 485 497 #else 486 ! Interface for ORCHIDEE version 1.9 or later compiled in parallel mode(with preprocessing flag CPP_MPI) 487 CALL intersurf_main (itime+itau_phy, nbp_lon, nbp_lat,offset, knon, ktindex, & 488 orch_comm,dtime, lrestart_read, lrestart_write, lalo, & 489 contfrac, neighbours, resolution, date0, & 490 zlev, u1_lay(1:knon), v1_lay(1:knon), spechum(1:knon), temp_air(1:knon), epot_air(1:knon), ccanopy(1:knon), & 491 cdrag(1:knon), petA_orc(1:knon), peqA_orc(1:knon), petB_orc(1:knon), peqB_orc(1:knon), & 492 precip_rain(1:knon), precip_snow(1:knon), lwdown(1:knon), swnet(1:knon), swdown_vrai(1:knon), ps(1:knon), & 493 evap(1:knon), fluxsens(1:knon), fluxlat(1:knon), coastalflow(1:knon), riverflow(1:knon), & 494 tsol_rad(1:knon), tsurf_new(1:knon), qsurf(1:knon), albedo_out(1:knon,:), emis_new(1:knon), z0_new(1:knon), & 495 lon_scat, lat_scat, q2m, t2m & 496 #ifdef ORCH_NEW 497 , nb_fields_cpl, fields_cpl(1:knon,:)) 498 #else 499 ) 498 ) 500 499 #endif 501 #endif 500 ENDIF 502 501 ENDIF 503 502 … … 545 544 546 545 USE mod_grid_phy_lmdz, ONLY: nbp_lon, nbp_lat 547 548 #ifdef CPP_MPI 549 INCLUDE 'mpif.h' 550 #endif 551 546 USE lmdz_mpi 552 547 553 548 ! Input arguments … … 564 559 ! Local varables 565 560 !**************************************************************************************** 566 #ifdef CPP_MPI567 561 INTEGER, DIMENSION(MPI_STATUS_SIZE) :: status 568 #endif569 562 570 563 INTEGER :: MyLastPoint … … 580 573 581 574 IF (is_parallel) THEN 582 #ifdef CPP_MPI583 575 CALL MPI_COMM_SIZE(orch_comm,mpi_size_orch,ierr) 584 576 CALL MPI_COMM_RANK(orch_comm,mpi_rank_orch,ierr) 585 #endif586 577 ELSE 587 578 mpi_rank_orch=0 … … 591 582 IF (is_parallel) THEN 592 583 IF (mpi_rank_orch /= 0) THEN 593 #ifdef CPP_MPI594 584 CALL MPI_RECV(LastPoint,1,MPI_INTEGER,mpi_rank_orch-1,1234,orch_comm,status,ierr) 595 #endif596 585 ENDIF 597 586 598 587 IF (mpi_rank_orch /= mpi_size_orch-1) THEN 599 #ifdef CPP_MPI600 588 CALL MPI_SEND(MyLastPoint,1,MPI_INTEGER,mpi_rank_orch+1,1234,orch_comm,ierr) 601 #endif602 589 ENDIF 603 590 ENDIF … … 617 604 ! 618 605 SUBROUTINE Get_orchidee_communicator(knon,orch_comm) 619 620 #ifdef CPP_MPI 621 INCLUDE 'mpif.h' 622 #endif 623 606 USE lmdz_mpi 624 607 625 608 INTEGER,INTENT(IN) :: knon … … 638 621 ENDIF 639 622 640 #ifdef CPP_MPI641 623 CALL MPI_COMM_SPLIT(COMM_LMDZ_PHY,color,mpi_rank,orch_comm,ierr) 642 #endif643 624 644 625 END SUBROUTINE Get_orchidee_communicator … … 650 631 USE indice_sol_mod 651 632 USE mod_grid_phy_lmdz, ONLY: nbp_lon, nbp_lat 652 653 #ifdef CPP_MPI 654 INCLUDE 'mpif.h' 655 #endif 633 USE lmdz_mpi 656 634 657 635 ! Input arguments … … 688 666 knon_nb(:)=knon 689 667 ELSE 690 691 #ifdef CPP_MPI692 668 CALL MPI_GATHER(knon,1,MPI_INTEGER,knon_nb,1,MPI_INTEGER,0,COMM_LMDZ_PHY,ierr) 693 #endif694 695 669 ENDIF 696 670 … … 714 688 ktindex_g(:)=ktindex_p(:) 715 689 ELSE 716 717 #ifdef CPP_MPI718 690 CALL MPI_GATHERV(ktindex_p,knon,MPI_INTEGER,ktindex_g,knon_nb,& 719 691 displs,MPI_INTEGER,0,COMM_LMDZ_PHY,ierr) 720 #endif721 722 692 ENDIF 723 693 … … 776 746 neighbours(:,i)=neighbours_g(:,i) 777 747 ELSE 778 #ifdef CPP_MPI779 748 IF (knon > 0) THEN 780 749 ! knon>0, scattter global field neighbours_g from master process to local process … … 784 753 CALL MPI_SCATTERV(neighbours_g(:,i),knon_nb,displs,MPI_INTEGER,var_tmp,knon,MPI_INTEGER,0,COMM_LMDZ_PHY,ierr) 785 754 END IF 786 #endif787 755 ENDIF 788 756 ENDDO
Note: See TracChangeset
for help on using the changeset viewer.