! MODULE slab_heat_transp_mod ! ! Slab ocean : temperature tendencies due to horizontal diffusion ! and / or Ekman transport USE mod_grid_phy_lmdz, ONLY: nbp_lon, nbp_lat, klon_glo IMPLICIT NONE ! Variables copied over from dyn3d dynamics: REAL,SAVE,ALLOCATABLE :: fext(:) ! Coriolis f times cell area !$OMP THREADPRIVATE(fext) REAL,SAVE,ALLOCATABLE :: beta(:) ! df/dy !$OMP THREADPRIVATE(beta) REAL,SAVE,ALLOCATABLE :: unsairez(:) ! 1/cell area !$OMP THREADPRIVATE(unsairez) REAL,SAVE,ALLOCATABLE :: unsaire(:) !$OMP THREADPRIVATE(unsaire) REAL,SAVE,ALLOCATABLE :: cu(:) ! cell longitude dim (m) !$OMP THREADPRIVATE(cu) REAL,SAVE,ALLOCATABLE :: cv(:) ! cell latitude dim (m) !$OMP THREADPRIVATE(cv) REAL,SAVE,ALLOCATABLE :: cuvsurcv(:) ! cu/cv (v points) !$OMP THREADPRIVATE(cuvsurcv) REAL,SAVE,ALLOCATABLE :: cvusurcu(:) ! cv/cu (u points) !$OMP THREADPRIVATE(cvusurcu) REAL,SAVE,ALLOCATABLE :: aire(:) ! cell area !$OMP THREADPRIVATE(aire) REAL,SAVE :: apoln ! area of north pole points !$OMP THREADPRIVATE(apoln) REAL,SAVE :: apols ! area of south pole points !$OMP THREADPRIVATE(apols) REAL,SAVE,ALLOCATABLE :: aireu(:) ! area of u cells !$OMP THREADPRIVATE(aireu) REAL,SAVE,ALLOCATABLE :: airev(:) ! area of v cells !$OMP THREADPRIVATE(airev) ! Local parameters for slab transport !LOGICAL,SAVE :: alpha_var ! variable coef for deep temp (1 layer) !!$OMP THREADPRIVATE(alpha_var) LOGICAL,SAVE :: slab_upstream ! upstream scheme ? (1 layer) !$OMP THREADPRIVATE(slab_upstream) LOGICAL,SAVE :: slab_sverdrup ! use wind stress curl at equator !$OMP THREADPRIVATE(slab_sverdrup) LOGICAL,SAVE :: slab_tyeq ! use merid wind stress at equator !$OMP THREADPRIVATE(slab_tyeq) LOGICAL,SAVE :: ekman_zonadv ! use zonal advection by Ekman currents !$OMP THREADPRIVATE(ekman_zonadv) LOGICAL,SAVE :: ekman_zonavg ! zonally averaged wind stress !$OMP THREADPRIVATE(ekman_zonavg) REAL,SAVE :: alpham !$OMP THREADPRIVATE(alpham) REAL,SAVE :: gmkappa ! GM scheme coefficient !$OMP THREADPRIVATE(gmkappa) REAL,SAVE :: gm_smax ! GM scheme limiter (max slope) !$OMP THREADPRIVATE(gm_smax) ! geometry variables : f, beta, mask... REAL,SAVE,ALLOCATABLE :: zmasqu(:) ! continent mask for zonal mass flux !$OMP THREADPRIVATE(zmasqu) REAL,SAVE,ALLOCATABLE :: zmasqv(:) ! continent mask for merid mass flux !$OMP THREADPRIVATE(zmasqv) REAL,SAVE,ALLOCATABLE :: unsfv(:) ! 1/f, v points !$OMP THREADPRIVATE(unsfv) REAL,SAVE,ALLOCATABLE :: unsbv(:) ! 1/beta !$OMP THREADPRIVATE(unsbv) REAL,SAVE,ALLOCATABLE :: unsev(:) ! 1/epsilon (drag) !$OMP THREADPRIVATE(unsev) REAL,SAVE,ALLOCATABLE :: unsfu(:) ! 1/F, u points !$OMP THREADPRIVATE(unsfu) REAL,SAVE,ALLOCATABLE :: unseu(:) !$OMP THREADPRIVATE(unseu) ! Routines from dyn3d, valid on global dynamics grid only: PRIVATE :: gr_fi_dyn, gr_dyn_fi ! to go between 1D nd 2D horiz grid PRIVATE :: gr_scal_v,gr_v_scal,gr_scal_u ! change on 2D grid U,V, T points PRIVATE :: grad,diverg CONTAINS SUBROUTINE ini_slab_transp_geom(ip1jm,ip1jmp1,unsairez_,fext_,unsaire_,& cu_,cuvsurcv_,cv_,cvusurcu_, & aire_,apoln_,apols_, & aireu_,airev_,rlatv) ! USE comcstfi_mod, ONLY: omeg, rad ! number of points in lon, lat IMPLICIT NONE ! Routine copies some geometry variables from the dynamical core ! see global vars for meaning INTEGER,INTENT(IN) :: ip1jm INTEGER,INTENT(IN) :: ip1jmp1 REAL,INTENT(IN) :: unsairez_(ip1jm) REAL,INTENT(IN) :: fext_(ip1jm) REAL,INTENT(IN) :: unsaire_(ip1jmp1) REAL,INTENT(IN) :: cu_(ip1jmp1) REAL,INTENT(IN) :: cuvsurcv_(ip1jm) REAL,INTENT(IN) :: cv_(ip1jm) REAL,INTENT(IN) :: cvusurcu_(ip1jmp1) REAL,INTENT(IN) :: aire_(ip1jmp1) REAL,INTENT(IN) :: apoln_ REAL,INTENT(IN) :: apols_ REAL,INTENT(IN) :: aireu_(ip1jmp1) REAL,INTENT(IN) :: airev_(ip1jm) REAL,INTENT(IN) :: rlatv(nbp_lat-1) ! Sanity check on dimensions if ((ip1jm.ne.((nbp_lon+1)*(nbp_lat-1))).or. & (ip1jmp1.ne.((nbp_lon+1)*nbp_lat))) then write(*,*) "ini_slab_transp_geom Error: wrong array sizes" stop endif ! Allocations could be done only on master process/thread... allocate(unsairez(ip1jm)) unsairez(:)=unsairez_(:) allocate(fext(ip1jm)) fext(:)=fext_(:) allocate(unsaire(ip1jmp1)) unsaire(:)=unsaire_(:) allocate(cu(ip1jmp1)) cu(:)=cu_(:) allocate(cuvsurcv(ip1jm)) cuvsurcv(:)=cuvsurcv_(:) allocate(cv(ip1jm)) cv(:)=cv_(:) allocate(cvusurcu(ip1jmp1)) cvusurcu(:)=cvusurcu_(:) allocate(aire(ip1jmp1)) aire(:)=aire_(:) apoln=apoln_ apols=apols_ allocate(aireu(ip1jmp1)) aireu(:)=aireu_(:) allocate(airev(ip1jm)) airev(:)=airev_(:) allocate(beta(nbp_lat-1)) ! beta(:)=2*omeg*cos(rlatv(:))/rad beta(:)=rlatv(:) ! Temporarily storing rlatv in beta, will be updated in ini_slab_transp END SUBROUTINE ini_slab_transp_geom SUBROUTINE ini_slab_transp(zmasq) ! USE ioipsl_getin_p_mod, only: getin_p USE IOIPSL, ONLY : getin USE comcstfi_mod, ONLY: omeg, rad IMPLICIT NONE REAL, INTENT (IN) :: zmasq(klon_glo) ! ocean / continent mask, 1=continent REAL zmasq_2d((nbp_lon+1)*nbp_lat) REAL ff((nbp_lon+1)*(nbp_lat-1)) ! Coriolis parameter REAL eps ! epsilon friction timescale (s-1) ! INTEGER :: slab_ekman LOGICAL :: slab_ekman INTEGER i INTEGER :: iim,iip1,jjp1,ip1jm,ip1jmp1 ! Some definition for grid size ip1jm=(nbp_lon+1)*(nbp_lat-1) ip1jmp1=(nbp_lon+1)*nbp_lat iim=nbp_lon iip1=nbp_lon+1 jjp1=nbp_lat ip1jm=(nbp_lon+1)*(nbp_lat-1) ip1jmp1=(nbp_lon+1)*nbp_lat ! Options for Heat transport ! Alpha variable? (for 1.5-layer Ekman) ! alpha_var=.FALSE. ! CALL getin('slab_alphav',alpha_var) ! print *,'alpha variable',alpha_var ! centered ou upstream scheme for meridional transport slab_upstream=.FALSE. CALL getin('slab_upstream',slab_upstream) print *,'upstream slab scheme',slab_upstream ! Sverdrup balance (wind stress curl) at equator ? slab_sverdrup=.TRUE. CALL getin('slab_sverdrup',slab_sverdrup) print *,'Sverdrup balance',slab_sverdrup ! Use tauy for meridional flux at equator ? slab_tyeq=.TRUE. CALL getin('slab_tyeq',slab_tyeq) print *,'Tauy forcing at equator',slab_tyeq ! Zonal Ekman transport ? (Or meridional only) ekman_zonadv=.TRUE. CALL getin('slab_ekman_zonadv',ekman_zonadv) print *,'Use Ekman flow in zonal direction',ekman_zonadv ! Zonally average the Ekman flux ? ekman_zonavg=.FALSE. CALL getin('slab_ekman_zonavg',ekman_zonavg) print *,'Use zonally-averaged wind stress ?',ekman_zonavg ! Value of alpha (Ekman 1.5-layer Tdeep computation) alpham=2./3. CALL getin('slab_alpha',alpham) print *,'slab_alpha',alpham ! GM k coefficient (m2/s) for 2-layers gmkappa=1000. CALL getin('slab_gmkappa',gmkappa) print *,'slab_gmkappa',gmkappa ! GM max slope for 2-layers gm_smax=2e-3 CALL getin('slab_gm_smax',gm_smax) print *,'slab_gm_smax',gm_smax ! ----------------------------------------------------------- ! Define ocean / continent mask (no flux into continent cell) allocate(zmasqu(ip1jmp1)) allocate(zmasqv(ip1jm)) zmasqu=1. zmasqv=1. ! convert mask to 2D grid CALL gr_fi_dyn(1,iip1,jjp1,zmasq,zmasq_2d) ! put flux mask to 0 at boundaries of continent cells DO i=1,ip1jmp1-1 IF (zmasq_2d(i).GT.1e-5 .OR. zmasq_2d(i+1).GT.1e-5) THEN zmasqu(i)=0. ENDIF END DO DO i=iip1,ip1jmp1,iip1 zmasqu(i)=zmasqu(i-iim) END DO DO i=1,ip1jm IF (zmasq_2d(i).GT.1e-5 .OR. zmasq_2d(i+iip1).GT.1e-5) THEN zmasqv(i)=0. END IF END DO ! ----------------------------------------------------------- ! Coriolis and friction for Ekman transport ! slab_ekman=2 slab_ekman=.TRUE. CALL getin("slab_ekman",slab_ekman) ! IF (slab_ekman.GT.0) THEN IF (slab_ekman) THEN allocate(unsfv(ip1jm)) allocate(unsev(ip1jm)) allocate(unsfu(ip1jmp1)) allocate(unseu(ip1jmp1)) allocate(unsbv(ip1jm)) ! Drage parameter (in s-1) eps=1e-5 CALL getin('slab_eps',eps) print *,'epsilon=',eps ! Coriolis paraleter ff=fext*unsairez ! coefs to convert tau_x, tau_y to Ekman mass fluxes ! on 2D grid v points IF (slab_sverdrup) THEN ! unsev factor [0 1] for transition Ekman / Sverdrup near the equator (f<