source: LMDZ4/branches/LMDZ4-dev-20091210/libf/dyn3dpar/fluxstokenc_p.F @ 1413

Last change on this file since 1413 was 1222, checked in by Ehouarn Millour, 15 years ago

Changes and cleanups to enable compiling without physics
and without ioipsl.

IOIPSL related cleanups:

  • bibio/writehist.F encapsulate the routine (which needs IOIPSL to function)

with #ifdef IOIPSL flag.

  • dyn3d/abort_gcm.F, dyn3dpar/abort_gcm.F and dyn3dpar/getparam.F90: use ioipsl_getincom module when not compiling with IOIPSL library, in order to always be able to use getin() routine.
  • removed unused "use IOIPSL" in dyn3dpar/guide_p_mod.F90
  • calendar related issue: Initialize day_ref and annee_ref in iniacademic.F (i.e. when they are not read from start.nc file)

Earth-specific programs/routines/modules:
create_etat0.F, fluxstokenc.F, limit_netcdf.F, startvar.F
(versions in dyn3d and dyn3dpar)
These routines and modules, which by design and porpose are made to function with
Earth physics are encapsulated with #CPP_EARTH cpp flag.

Earth-specific instructions:

  • calls to qminimum (specific treatment of first 2 tracers, i.e. water) in dyn3d/caladvtrac.F, dyn3d/integrd.F, dyn3dpar/caladvtrac_p.F, dyn3dpar/integrd_p.F only if (planet_type == 'earth')

Interaction with parallel physics:

  • routine dyn3dpar/parallel.F90 uses "surface_data" module (which is in the physics ...) to know value of "type_ocean" . Encapsulated that with #ifdef CPP_EARTH and set to a default type_ocean="dummy" otherwise.
  • So far, only Earth physics are parallelized, so all the interaction between parallel dynamics and parallel physics are encapsulated with #ifdef CCP_EARTH (this way we can run parallel without any physics). The (dyn3dpar) routines which contains such interaction are: bands.F90, gr_dyn_fi_p.F, gr_fi_dyn_p.F, mod_interface_dyn_phys.F90 This should later (when improving dyn/phys interface) be encapsulated with a more general and appropriate #ifdef CPP_PHYS cpp flag.

I checked that these changes do not alter results (on the simple
32x24x11 bench) on Ciclad (seq & mpi), Brodie (seq, mpi & omp) and
Vargas (seq, mpi & omp).

EM

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 5.7 KB
Line 
1!
2! $Id: fluxstokenc_p.F 1222 2009-08-07 11:48:33Z idelkadi $
3!
4      SUBROUTINE fluxstokenc_p(pbaru,pbarv,masse,teta,phi,phis,
5     . time_step,itau )
6#ifdef CPP_EARTH
7! This routine is designed to work for Earth and with ioipsl
8
9       USE IOIPSL
10       USE parallel
11       USE misc_mod
12       USE mod_hallo
13c
14c     Auteur :  F. Hourdin
15c
16c
17ccc   ..   Modif. P. Le Van  ( 20/12/97 )  ...
18c
19      IMPLICIT NONE
20c
21#include "dimensions.h"
22#include "paramet.h"
23#include "comconst.h"
24#include "comvert.h"
25#include "comgeom.h"
26#include "tracstoke.h"
27#include "temps.h"
28#include "iniprint.h"
29
30      REAL time_step,t_wrt, t_ops
31      REAL pbaru(ip1jmp1,llm),pbarv(ip1jm,llm)
32      REAL masse(ip1jmp1,llm),teta(ip1jmp1,llm),phi(ip1jmp1,llm)
33      REAL phis(ip1jmp1)
34
35      REAL,SAVE :: pbaruc(ip1jmp1,llm),pbarvc(ip1jm,llm)
36      REAL massem(ip1jmp1,llm),tetac(ip1jmp1,llm),phic(ip1jmp1,llm)
37
38      REAL pbarug(ip1jmp1,llm),pbarvg(iip1,jjm,llm),wg(ip1jmp1,llm)
39
40      REAL pbarvst(iip1,jjp1,llm),zistdyn
41        real dtcum
42
43      INTEGER iadvtr,ndex(1)
44      integer nscal
45      real tst(1),ist(1),istp(1)
46      INTEGER ij,l,irec,i,j,itau
47      INTEGER,SAVE :: fluxid, fluxvid,fluxdid
48 
49      SAVE iadvtr, massem,irec
50      SAVE phic,tetac
51      logical first
52      save first
53      data first/.true./
54      DATA iadvtr/0/
55      integer :: ijb,ije,jjb,jje,jjn
56      type(Request) :: Req
57
58c AC initialisations
59cym      pbarug(:,:)   = 0.
60cym      pbarvg(:,:,:) = 0.
61cym      wg(:,:)       = 0.
62
63c$OMP MASTER
64
65      if(first) then
66
67        CALL initfluxsto_p( 'fluxstoke',
68     .  time_step,istdyn* time_step,istdyn* time_step,
69     .  fluxid,fluxvid,fluxdid)
70       
71        ijb=ij_begin
72        ije=ij_end
73        jjn=jj_nb
74
75        ndex(1) = 0
76        call histwrite(fluxid, 'phis', 1, phis(ijb:ije),
77     .                 iip1*jjn, ndex)
78        call histwrite(fluxid, 'aire', 1, aire(ijb:ije),
79     .                 iip1*jjn, ndex)
80       
81        ndex(1) = 0
82        nscal = 1
83       
84        if (mpi_rank==0) then
85          tst(1) = time_step
86          call histwrite(fluxdid, 'dtvr', 1, tst, nscal, ndex)
87          ist(1)=istdyn
88          call histwrite(fluxdid, 'istdyn', 1, ist, nscal, ndex)
89          istp(1)= istphy
90          call histwrite(fluxdid, 'istphy', 1, istp, nscal, ndex)
91        endif
92        first = .false.
93
94      endif
95
96
97      IF(iadvtr.EQ.0) THEN
98cym         CALL initial0(ijp1llm,phic)
99cym        CALL initial0(ijp1llm,tetac)
100cym         CALL initial0(ijp1llm,pbaruc)
101cym         CALL initial0(ijmllm,pbarvc)
102        ijb=ij_begin
103        ije=ij_end
104        phic(ijb:ije,1:llm)=0
105        tetac(ijb:ije,1:llm)=0
106        pbaruc(ijb:ije,1:llm)=0
107       
108        IF (pole_sud) ije=ij_end-iip1
109        pbarvc(ijb:ije,1:llm)=0
110      ENDIF
111
112c   accumulation des flux de masse horizontaux
113      ijb=ij_begin
114      ije=ij_end
115     
116      DO l=1,llm
117         DO ij = ijb,ije
118            pbaruc(ij,l) = pbaruc(ij,l) + pbaru(ij,l)
119            tetac(ij,l) = tetac(ij,l) + teta(ij,l)
120            phic(ij,l) = phic(ij,l) + phi(ij,l)
121         ENDDO
122       ENDDO
123     
124      ijb=ij_begin
125      ije=ij_end
126      if (pole_sud) ije=ij_end-iip1
127       
128      DO l=1,llm
129         DO ij = ijb,ije
130            pbarvc(ij,l) = pbarvc(ij,l) + pbarv(ij,l)
131         ENDDO
132      ENDDO
133
134c   selection de la masse instantannee des mailles avant le transport.
135      IF(iadvtr.EQ.0) THEN
136cym         CALL SCOPY(ip1jmp1*llm,masse,1,massem,1)
137        ijb=ij_begin
138        ije=ij_end
139        massem(ijb:ije,1:llm)=masse(ijb:ije,1:llm)
140      ENDIF
141
142      iadvtr   = iadvtr+1
143
144c$OMP END MASTER
145c$OMP BARRIER
146c   Test pour savoir si on advecte a ce pas de temps
147      IF ( iadvtr.EQ.istdyn ) THEN
148c$OMP MASTER
149c    normalisation
150      ijb=ij_begin
151      ije=ij_end
152
153      DO l=1,llm
154         DO ij = ijb,ije
155            pbaruc(ij,l) = pbaruc(ij,l)/float(istdyn)
156            tetac(ij,l) = tetac(ij,l)/float(istdyn)
157            phic(ij,l) = phic(ij,l)/float(istdyn)
158         ENDDO
159      ENDDO
160
161      ijb=ij_begin
162      ije=ij_end
163      if (pole_sud) ije=ij_end-iip1     
164     
165      DO l=1,llm
166          DO ij = ijb,ije
167            pbarvc(ij,l) = pbarvc(ij,l)/float(istdyn)
168         ENDDO
169      ENDDO
170
171c   traitement des flux de masse avant advection.
172c     1. calcul de w
173c     2. groupement des mailles pres du pole.
174c$OMP END MASTER
175c$OMP BARRIER
176        call Register_Hallo(pbaruc,ip1jmp1,llm,1,1,1,1,Req)
177        call Register_Hallo(pbarvc,ip1jm,llm,1,1,1,1,Req)
178        call SendRequest(Req)
179c$OMP BARRIER
180        call WaitRequest(Req)
181c$OMP BARRIER
182c$OMP MASTER
183        CALL groupe_p( massem, pbaruc,pbarvc, pbarug,pbarvg,wg )
184       
185        jjb=jj_begin
186        jje=jj_end
187        if (pole_sud) jje=jj_end-1
188       
189        do l=1,llm
190           do j=jjb,jje
191              do i=1,iip1
192                 pbarvst(i,j,l)=pbarvg(i,j,l)
193              enddo
194           enddo
195         enddo
196         
197         if (pole_sud) then
198           do i=1,iip1
199              pbarvst(i,jjp1,l)=0.
200           enddo
201        endif
202     
203         iadvtr=0
204        Print*,'ITAU auqel on stoke les fluxmasses',itau
205       
206        ijb=ij_begin
207        ije=ij_end
208        jjn=jj_nb
209       
210        call histwrite(fluxid, 'masse', itau, massem(ijb:ije,:),
211     .               iip1*jjn*llm, ndex)
212       
213        call histwrite(fluxid, 'pbaru', itau, pbarug(ijb:ije,:),
214     .               iip1*jjn*llm, ndex)
215       
216        jjb=jj_begin
217        jje=jj_end
218        jjn=jj_nb
219        if (pole_sud) then
220          jje=jj_end-1
221          jjn=jj_nb-1
222        endif
223       
224        call histwrite(fluxvid, 'pbarv', itau, pbarvg(:,jjb:jje,:),
225     .               iip1*jjn*llm, ndex)
226       
227        ijb=ij_begin
228        ije=ij_end
229        jjn=jj_nb
230       
231        call histwrite(fluxid, 'w' ,itau, wg(ijb:ije,:),
232     .             iip1*jjn*llm, ndex)
233       
234        call histwrite(fluxid, 'teta' ,itau, tetac(ijb:ije,:),
235     .             iip1*jjn*llm, ndex)
236       
237        call histwrite(fluxid, 'phi' ,itau, phic(ijb:ije,:),
238     .             iip1*jjn*llm, ndex)
239       
240C
241c$OMP END MASTER
242      ENDIF ! if iadvtr.EQ.istdyn
243
244#else
245      write(lunout,*)
246     & 'fluxstokenc: Needs Earth physics (and ioipsl) to function'
247#endif
248! of #ifdef CPP_EARTH
249      RETURN
250      END
Note: See TracBrowser for help on using the repository browser.