!WRF:MEDIATION_LAYER:PHYSICS ! MODULE module_diagnostics CONTAINS SUBROUTINE diagnostic_output_calc( & ids,ide, jds,jde, kds,kde, & ims,ime, jms,jme, kms,kme, & ips,ipe, jps,jpe, kps,kpe, & ! patch dims i_start,i_end,j_start,j_end,kts,kte,num_tiles & ,dpsdt,dmudt & ,p8w,pk1m,mu_2,mu_2m & ,u,v & ,raincv,rainncv,rainc,rainnc & ,hfx,sfcevp,lh & ,dt,xtime,sbw & ,diag_print & ) !---------------------------------------------------------------------- USE module_dm, ONLY: wrf_dm_sum_real, wrf_dm_maxval IMPLICIT NONE !====================================================================== ! Definitions !----------- !-- DIAG_PRINT print control: 0 - no diagnostics; 1 - dmudt only; 2 - all !-- DT time step (second) !-- XTIME forecast time !-- SBW specified boundary width - used later ! !-- P8W 3D pressure array at full eta levels !-- MU dry column hydrostatic pressure !-- RAINC cumulus scheme precipitation since hour 0 !-- RAINCV cumulus scheme precipitation in one time step (mm) !-- RAINNC explicit scheme precipitation since hour 0 !-- RAINNCV explicit scheme precipitation in one time step (mm) !-- HFX surface sensible heat flux !-- LH surface latent heat flux !-- SFCEVP total surface evaporation !-- U u component of wind - to be used later to compute k.e. !-- V v component of wind - to be used later to compute k.e. ! !-- ids start index for i in domain !-- ide end index for i in domain !-- jds start index for j in domain !-- jde end index for j in domain !-- kds start index for k in domain !-- kde end index for k in domain !-- ims start index for i in memory !-- ime end index for i in memory !-- jms start index for j in memory !-- jme end index for j in memory !-- ips start index for i in patch !-- ipe end index for i in patch !-- jps start index for j in patch !-- jpe end index for j in patch !-- kms start index for k in memory !-- kme end index for k in memory !-- i_start start indices for i in tile !-- i_end end indices for i in tile !-- j_start start indices for j in tile !-- j_end end indices for j in tile !-- kts start index for k in tile !-- kte end index for k in tile !-- num_tiles number of tiles ! !====================================================================== INTEGER, INTENT(IN ) :: & ids,ide, jds,jde, kds,kde, & ims,ime, jms,jme, kms,kme, & ips,ipe, jps,jpe, kps,kpe, & kts,kte, & num_tiles INTEGER, DIMENSION(num_tiles), INTENT(IN) :: & & i_start,i_end,j_start,j_end INTEGER, INTENT(IN ) :: diag_print REAL, DIMENSION( ims:ime, kms:kme, jms:jme ), & INTENT(IN ) :: u & , v & , p8w REAL, DIMENSION( ims:ime , jms:jme ), INTENT(IN) :: & MU_2 & , RAINC & , RAINNC & , RAINCV & , RAINNCV & , HFX & , SFCEVP & , LH REAL, DIMENSION( ims:ime , jms:jme ), & INTENT(INOUT) :: DPSDT & , DMUDT & , MU_2M & , PK1M REAL, INTENT(IN ) :: DT, XTIME INTEGER, INTENT(IN ) :: SBW ! LOCAL VAR INTEGER :: i,j,k,its,ite,jts,jte,ij INTEGER :: idp,jdp,irc,jrc,irnc,jrnc,isnh,jsnh INTEGER :: prfreq REAL :: no_points REAL :: dpsdt_sum, dmudt_sum, dardt_sum, drcdt_sum, drndt_sum REAL :: hfx_sum, lh_sum, sfcevp_sum, rainc_sum, rainnc_sum, raint_sum REAL :: dmumax, raincmax, rainncmax, snowhmax LOGICAL, EXTERNAL :: wrf_dm_on_monitor CHARACTER*256 :: outstring CHARACTER*6 :: grid_str !----------------------------------------------------------------- if (diag_print .eq. 0 ) return IF ( xtime .ne. 0. ) THEN if(diag_print.eq.1) then prfreq = dt ! prfreq = max(2,int(dt/60.)) ! in min else prfreq=10 ! in min endif IF (MOD(nint(dt),prfreq) == 0) THEN ! COMPUTE THE NUMBER OF MASS GRID POINTS no_points = float((ide-ids)*(jde-jds)) ! SET START AND END POINTS FOR TILES ! !$OMP PARALLEL DO & ! !$OMP PRIVATE ( ij ) dmumax = 0. DO ij = 1 , num_tiles ! print *, i_start(ij),i_end(ij),j_start(ij),j_end(ij) DO j=j_start(ij),j_end(ij) DO i=i_start(ij),i_end(ij) dpsdt(i,j)=(p8w(i,kms,j)-pk1m(i,j))/dt dmudt(i,j)=(mu_2(i,j)-mu_2m(i,j))/dt if(abs(dmudt(i,j)*dt).gt.dmumax)then dmumax=abs(dmudt(i,j)*dt) idp=i jdp=j endif ENDDO ENDDO ENDDO ! !$OMP END PARALLEL DO ! convert DMUMAX from (PA) to (bars) per time step dmumax = dmumax*1.e-5 ! compute global MAX CALL wrf_dm_maxval ( dmumax, idp, jdp ) ! print *, 'p8w(30,1,30),pk1m(30,30) : ', p8w(30,1,30),pk1m(30,30) ! print *, 'mu_2(30,30),mu_2m(30,30) : ', mu_2(30,30),mu_2m(30,30) dpsdt_sum = 0. dmudt_sum = 0. DO j = jps, min(jpe,jde-1) DO i = ips, min(ipe,ide-1) dpsdt_sum = dpsdt_sum + abs(dpsdt(i,j)) dmudt_sum = dmudt_sum + abs(dmudt(i,j)) ENDDO ENDDO ! compute global sum dpsdt_sum = wrf_dm_sum_real ( dpsdt_sum ) dmudt_sum = wrf_dm_sum_real ( dmudt_sum ) ! print *, 'dpsdt, dmudt : ', dpsdt_sum, dmudt_sum IF ( diag_print .eq. 2 ) THEN dardt_sum = 0. drcdt_sum = 0. drndt_sum = 0. rainc_sum = 0. raint_sum = 0. rainnc_sum = 0. sfcevp_sum = 0. hfx_sum = 0. lh_sum = 0. DO j = jps, min(jpe,jde-1) DO i = ips, min(ipe,ide-1) drcdt_sum = drcdt_sum + abs(raincv(i,j)) drndt_sum = drndt_sum + abs(rainncv(i,j)) dardt_sum = dardt_sum + abs(raincv(i,j)) + abs(rainncv(i,j)) rainc_sum = rainc_sum + abs(rainc(i,j)) ! MAX for accumulated conv precip IF(rainc(i,j).gt.raincmax)then raincmax=rainc(i,j) irc=i jrc=j ENDIF rainnc_sum = rainnc_sum + abs(rainnc(i,j)) ! MAX for accumulated resolved precip IF(rainnc(i,j).gt.rainncmax)then rainncmax=rainnc(i,j) irnc=i jrnc=j ENDIF raint_sum = raint_sum + abs(rainc(i,j)) + abs(rainnc(i,j)) sfcevp_sum = sfcevp_sum + abs(sfcevp(i,j)) hfx_sum = hfx_sum + abs(hfx(i,j)) lh_sum = lh_sum + abs(lh(i,j)) ENDDO ENDDO ! compute global MAX CALL wrf_dm_maxval ( raincmax, irc, jrc ) CALL wrf_dm_maxval ( rainncmax, irnc, jrnc ) ! compute global sum drcdt_sum = wrf_dm_sum_real ( drcdt_sum ) drndt_sum = wrf_dm_sum_real ( drndt_sum ) dardt_sum = wrf_dm_sum_real ( dardt_sum ) rainc_sum = wrf_dm_sum_real ( rainc_sum ) rainnc_sum = wrf_dm_sum_real ( rainnc_sum ) raint_sum = wrf_dm_sum_real ( raint_sum ) sfcevp_sum = wrf_dm_sum_real ( sfcevp_sum ) hfx_sum = wrf_dm_sum_real ( hfx_sum ) lh_sum = wrf_dm_sum_real ( lh_sum ) ENDIF ! print out the average values CALL get_current_grid_name( grid_str ) #ifdef DM_PARALLEL IF ( wrf_dm_on_monitor() ) THEN #endif WRITE(outstring,*) grid_str,'Domain average of dpsdt, dmudt (mb/3h): ', xtime, & dpsdt_sum/no_points*108., & dmudt_sum/no_points*108. CALL wrf_message ( TRIM(outstring) ) WRITE(outstring,*) grid_str,'Max mu change time step: ', idp,jdp,dmumax CALL wrf_message ( TRIM(outstring) ) IF ( diag_print .eq. 2) THEN WRITE(outstring,*) grid_str,'Domain average of dardt, drcdt, drndt (mm/sec): ', xtime, & dardt_sum/dt/no_points, & drcdt_sum/dt/no_points, & drndt_sum/dt/no_points CALL wrf_message ( TRIM(outstring) ) WRITE(outstring,*) grid_str,'Domain average of rt_sum, rc_sum, rnc_sum (mm): ', xtime, & raint_sum/no_points, & rainc_sum/no_points, & rainnc_sum/no_points CALL wrf_message ( TRIM(outstring) ) WRITE(outstring,*) grid_str,'Max Accum Resolved Precip, I,J (mm): ' ,& rainncmax,irnc,jrnc CALL wrf_message ( TRIM(outstring) ) WRITE(outstring,*) grid_str,'Max Accum Convective Precip, I,J (mm): ' ,& raincmax,irc,jrc CALL wrf_message ( TRIM(outstring) ) WRITE(outstring,*) grid_str,'Domain average of sfcevp, hfx, lh: ', xtime, & sfcevp_sum/no_points, & hfx_sum/no_points, & lh_sum/no_points CALL wrf_message ( TRIM(outstring) ) ENDIF #ifdef DM_PARALLEL ENDIF #endif ENDIF ! print frequency ENDIF ! save values at this time step !$OMP PARALLEL DO & !$OMP PRIVATE ( ij,i,j ) DO ij = 1 , num_tiles DO j=j_start(ij),j_end(ij) DO i=i_start(ij),i_end(ij) pk1m(i,j)=p8w(i,kms,j) mu_2m(i,j)=mu_2(i,j) ENDDO ENDDO IF ( xtime .lt. 0.0001 ) THEN DO j=j_start(ij),j_end(ij) DO i=i_start(ij),i_end(ij) dpsdt(i,j)=0. dmudt(i,j)=0. ENDDO ENDDO ENDIF ENDDO !$OMP END PARALLEL DO END SUBROUTINE diagnostic_output_calc END MODULE module_diagnostics