#include "cppdefs.h" #if defined NONLINEAR && !defined SOLVE3D SUBROUTINE main2d (RunInterval) ! !svn $Id: main2d.F 927 2018-10-16 03:51:56Z arango $ !======================================================================= ! Copyright (c) 2002-2019 The ROMS/TOMS Group ! ! Licensed under a MIT/X style license ! ! See License_ROMS.txt Hernan G. Arango ! !========================================== Alexander F. Shchepetkin === ! ! ! This subroutine is the main driver for nonlinear ROMS/TOMS when ! ! configurated as shallow water (barotropic) ocean model only. It ! ! advances forward the vertically integrated primitive equations ! ! for all nested grids, if any, by the specified time interval ! ! (seconds), RunInterval. ! ! ! !======================================================================= ! USE mod_param USE mod_parallel # if defined MODEL_COUPLING && defined MCT_LIB && !defined COAWST_MODEL USE mod_coupler # endif USE mod_iounits # ifdef NESTING USE mod_nesting # endif USE mod_scalars USE mod_stepping ! USE dateclock_mod, ONLY : time_string USE diag_mod, ONLY : diag # ifdef TLM_CHECK USE dotproduct_mod, ONLY : nl_dotproduct # endif # if defined W4DPSAS || defined NLM_OUTER || \ defined W4DPSAS_SENSITIVITY USE forcing_mod, ONLY : forcing # endif # ifdef ADJUST_WSTRESS USE frc_adjust_mod, ONLY : frc_adjust # endif USE ini_fields_mod, ONLY : ini_fields, ini_zeta # ifdef NESTING USE nesting_mod, ONLY : nesting # ifndef ONE_WAY USE nesting_mod, ONLY : do_twoway # endif # endif # if defined ADJUST_BOUNDARY USE obc_adjust_mod, ONLY : obc_adjust, load_obc # endif # ifdef AIR_OCEAN USE ocean_coupler_mod, ONLY : ocn2atm_coupling # endif # ifdef WAVES_OCEAN USE ocean_coupler_mod, ONLY : ocn2wav_coupling # endif # ifdef WEC_MELLOR USE radiation_stress_mod, ONLY : radiation_stress # endif # ifdef BULK_FLUXES2D # ifdef CCSM_FLUXES2D USE ccsm_flux_mod, ONLY : ccsm_flux # else USE bulk_flux_mod, ONLY : bulk_flux # endif # endif # ifdef AVERAGES USE set_avg_mod, ONLY : set_avg # endif # if defined SSH_TIDES || defined UV_TIDES USE set_tides_mod, ONLY : set_tides # endif USE set_vbc_mod, ONLY : set_vbc USE step2d_mod, ONLY : step2d # ifdef FLOATS USE step_floats_mod, ONLY : step_floats # endif USE strings_mod, ONLY : FoundError ! implicit none ! ! Imported variable declarations. ! real(dp), intent(in) :: RunInterval ! ! Local variable declarations. ! logical :: DoNestLayer, Time_Step integer :: Nsteps, Rsteps integer :: ig, il, istep, ng, nl, tile integer :: next_indx1 # ifdef FLOATS integer :: Lend, Lstr, chunk_size # endif ! !======================================================================= ! Time-step vertically integrated equations. !======================================================================= ! ! Time-step the 3D kernel for the specified time interval (seconds), ! RunInterval. ! Time_Step=.TRUE. DoNestLayer=.TRUE. ! KERNEL_LOOP : DO WHILE (Time_Step) ! ! In nesting applications, the number of nesting layers (NestLayers) is ! used to facilitate refinement grids and composite/refinament grids ! combinations. Otherwise, the solution it is looped once for a single ! grid application (NestLayers = 1). ! nl=0 #ifdef NESTING TwoWayInterval(1:Ngrids)=0.0_r8 #endif ! NEST_LAYER : DO WHILE (DoNestLayer) ! ! Determine number of time steps to compute in each nested grid layer ! based on the specified time interval (seconds), RunInterval. Non ! nesting applications have NestLayers=1. Notice that RunInterval is ! set in the calling driver. Its value may span the full period of the ! simulation, a multi-model coupling interval (RunInterval > ifac*dt), ! or just a single step (RunInterval=0). ! CALL ntimesteps (iNLM, RunInterval, nl, Nsteps, Rsteps) IF (FoundError(exit_flag, NoError, __LINE__, & & __FILE__)) RETURN IF ((nl.le.0).or.(nl.gt.NestLayers)) EXIT ! ! Time-step governing equations for Nsteps. ! STEP_LOOP : DO istep=1,Nsteps ! ! Set time indices and time clock. ! DO ig=1,GridsInLayer(nl) ng=GridNumber(ig,nl) iic(ng)=iic(ng)+1 time(ng)=time(ng)+dt(ng) tdays(ng)=time(ng)*sec2day CALL time_string (time(ng), time_code(ng)) IF (step_counter(ng).eq.Rsteps) Time_Step=.FALSE. END DO # if defined W4DPSAS || defined NLM_OUTER || \ defined W4DPSAS_SENSITIVITY ! !----------------------------------------------------------------------- ! If appropriate, add convolved adjoint solution impulse forcing to ! the nonlinear model solution. Notice that the forcing is only needed ! after finishing all the inner loops. The forcing is continuous. ! That is, it is time interpolated at every time-step from available ! snapshots (FrequentImpulse=TRUE). !----------------------------------------------------------------------- ! DO ig=1,GridsInLayer(nl) ng=GridNumber(ig,nl) IF (FrequentImpulse(ng)) THEN DO tile=first_tile(ng),last_tile(ng),+1 CALL forcing (ng, tile, kstp(ng), nstp(ng)) END DO !$OMP BARRIER END IF END DO # endif ! !----------------------------------------------------------------------- ! Read in required data, if any, from input NetCDF files. !----------------------------------------------------------------------- ! DO ig=1,GridsInLayer(nl) ng=GridNumber(ig,nl) !$OMP MASTER CALL get_data (ng) !$OMP END MASTER !$OMP BARRIER IF (FoundError(exit_flag, NoError, __LINE__, & & __FILE__)) RETURN END DO ! !----------------------------------------------------------------------- ! If applicable, process input data: time interpolate between data ! snapshots. !----------------------------------------------------------------------- ! DO ig=1,GridsInLayer(nl) ng=GridNumber(ig,nl) DO tile=first_tile(ng),last_tile(ng),+1 CALL set_data (ng, tile) END DO !$OMP BARRIER END DO IF (FoundError(exit_flag, NoError, __LINE__, & & __FILE__)) RETURN ! !----------------------------------------------------------------------- ! Initialize all time levels and compute other initial fields. !----------------------------------------------------------------------- ! DO ig=1,GridsInLayer(nl) ng=GridNumber(ig,nl) IF (iic(ng).eq.ntstart(ng)) THEN ! ! Initialize free-surface. ! DO tile=first_tile(ng),last_tile(ng),+1 CALL ini_zeta (ng, tile, iNLM) END DO !$OMP BARRIER ! ! Initialize other state variables. ! DO tile=last_tile(ng),first_tile(ng),-1 CALL ini_fields (ng, tile, iNLM) END DO !$OMP BARRIER # ifdef NESTING ! ! Extract donor grid initial data at contact points and store it in ! REFINED structure so it can be used for the space-time interpolation. ! IF (RefinedGrid(ng)) THEN CALL nesting (ng, iNLM, ngetD) END IF # endif END IF END DO ! !----------------------------------------------------------------------- ! Compute and report diagnostics. If appropriate, accumulate time- ! averaged output data which needs a irreversible loop in shared-memory ! jobs. !----------------------------------------------------------------------- ! DO ig=1,GridsInLayer(nl) ng=GridNumber(ig,nl) DO tile=first_tile(ng),last_tile(ng),+1 ! irreversible # ifdef AVERAGES # ifdef FILTERED CALL set_filter (ng, tile) # else CALL set_avg (ng, tile) # endif # endif # ifdef DIAGNOSTICS CALL set_diags (ng, tile) # endif CALL diag (ng, tile) # ifdef TLM_CHECK CALL nl_dotproduct (ng, tile, Lnew(ng)) # endif END DO !$OMP BARRIER END DO IF (FoundError(exit_flag, NoError, __LINE__, & & __FILE__)) RETURN # ifdef AIR_OCEAN ! !----------------------------------------------------------------------- ! Couple ocean to atmosphere model every "CoupleSteps(Iatmos)" ! timesteps: get air/sea fluxes. !----------------------------------------------------------------------- ! DO ig=1,GridsInLayer(nl) ng=GridNumber(ig,nl) IF ((iic(ng).ne.ntstart(ng)).and. & & MOD(iic(ng),CoupleSteps(Iatmos,ng)).eq.0) THEN DO tile=last_tile(ng),first_tile(ng),-1 CALL ocn2atm_coupling (ng, tile) END DO !$OMP BARRIER END IF END DO # endif # ifdef ADJUST_BOUNDARY ! !----------------------------------------------------------------------- ! Interpolate open boundary increments and adjust open boundary. ! Load open boundary into storage arrays. Skip the last output ! timestep. !----------------------------------------------------------------------- ! DO ig=1,GridsInLayer(nl) ng=GridNumber(ig,nl) IF (iic(ng).lt.(ntend(ng)+1)) THEN DO tile=first_tile(ng),last_tile(ng),+1 CALL obc_adjust (ng, tile, Lbinp(ng)) CALL load_obc (ng, tile, Lbout(ng)) END DO !$OMP BARRIER END IF END DO # endif # ifdef ADJUST_WSTRESS ! !----------------------------------------------------------------------- ! Interpolate surface forcing increments and adjust surface forcing. ! Load surface forcing to storage arrays. !----------------------------------------------------------------------- ! DO ig=1,GridsInLayer(nl) ng=GridNumber(ig,nl) IF (iic(ng).lt.(ntend(ng)+1)) THEN DO tile=first_tile(ng),last_tile(ng),+1 CALL frc_adjust (ng, tile, Lfinp(ng)) END DO !$OMP BARRIER END IF END DO # endif # ifdef WAVES_OCEAN ! !----------------------------------------------------------------------- ! Couple ocean to waves model every "CoupleSteps(Iwaves)" ! timesteps: get waves/sea fluxes. !----------------------------------------------------------------------- ! DO ig=1,GridsInLayer(nl) ng=GridNumber(ig,nl) IF ((iic(ng).ne.ntstart(ng)).and. & & MOD(iic(ng)-1,CoupleSteps(Iwaves,ng)).eq.0) THEN DO tile=first_tile(ng),last_tile(ng),+1 CALL ocn2wav_coupling (ng, tile) END DO !$OMP BARRIER END IF END DO # endif # ifdef WEC_MELLOR ! !----------------------------------------------------------------------- ! Compute radiation stress terms. !----------------------------------------------------------------------- ! DO ig=1,GridsInLayer(nl) ng=GridNumber(ig,nl) DO tile=last_tile(ng),first_tile(ng),-1 CALL radiation_stress (ng, tile) END DO !$OMP BARRIER END DO # endif ! !----------------------------------------------------------------------- ! Set vertical boundary conditions. Process tidal forcing. !----------------------------------------------------------------------- ! DO ig=1,GridsInLayer(nl) ng=GridNumber(ig,nl) DO tile=first_tile(ng),last_tile(ng),+1 # ifdef BULK_FLUXES2D # ifdef CCSM_FLUXES2D CALL ccsm_flux (ng, tile) # else CALL bulk_flux (ng, tile) # endif # endif CALL set_vbc (ng, tile) # if defined SSH_TIDES || defined UV_TIDES CALL set_tides (ng, tile) # endif END DO !$OMP BARRIER END DO # ifdef NESTING ! ! If composite or mosaic grids, process additional points in the ! contact zone between connected grids for bottom stress variables. ! DO ig=1,GridsInLayer(nl) ng=GridNumber(ig,nl) IF (ANY(CompositeGrid(:,ng))) THEN CALL nesting (ng, iNLM, nbstr) END IF END DO # endif ! !----------------------------------------------------------------------- ! If appropriate, write out fields into output NetCDF files. Notice ! that IO data is written in delayed and serial mode. Exit if last ! time step. !----------------------------------------------------------------------- ! DO ig=1,GridsInLayer(nl) ng=GridNumber(ig,nl) !$OMP MASTER CALL output (ng) !$OMP END MASTER !$OMP BARRIER IF ((FoundError(exit_flag, NoError, __LINE__, & & __FILE__)).or. & & ((iic(ng).eq.(ntend(ng)+1)).and.(ng.eq.Ngrids))) THEN RETURN END IF END DO # ifdef NESTING ! !----------------------------------------------------------------------- ! If refinement grid, interpolate (space, time) state variables ! contact points from donor grid extracted data. # ifdef NESTING_DEBUG ! ! Also, fill BRY_CONTACT(:,:)%Mflux to check for mass conservation ! between coarse and fine grids. This is only done for diagnostic ! purposes. Also, debugging is possible with very verbose output ! to fort.300 is allowed by activating uppercase(nesting_debug). # endif !----------------------------------------------------------------------- ! DO ig=1,GridsInLayer(nl) ng=GridNumber(ig,nl) IF (RefinedGrid(ng).and.(RefineScale(ng).gt.0)) THEN CALL nesting (ng, iNLM, nputD) # ifdef NESTING_DEBUG CALL nesting (ng, iNLM, nmflx) # endif END IF END DO # endif ! !----------------------------------------------------------------------- ! Solve the vertically integrated primitive equations for the ! free-surface and momentum components. !----------------------------------------------------------------------- ! # ifndef OFFLINE_FLOATS ! Set time indices for predictor step. The PREDICTOR_2D_STEP switch ! it is assumed to be false before the first time-step. ! DO ig=1,GridsInLayer(nl) ng=GridNumber(ig,nl) iif(ng)=1 nfast(ng)=1 next_indx1=3-indx1(ng) IF (.not.PREDICTOR_2D_STEP(ng)) THEN PREDICTOR_2D_STEP(ng)=.TRUE. IF (FIRST_2D_STEP) THEN kstp(ng)=indx1(ng) ELSE kstp(ng)=3-indx1(ng) END IF knew(ng)=3 krhs(ng)=indx1(ng) END IF ! ! Predictor step - Advance barotropic equations using 2D time-step ! ============== predictor scheme. ! DO tile=last_tile(ng),first_tile(ng),-1 CALL step2d (ng, tile) END DO !$OMP BARRIER END DO # ifdef NESTING ! ! If composite or mosaic grids, process additional points in the ! contact zone between connected grids for the state variables ! associated with the 2D engine Predictor Step section. ! If refinement, check mass flux conservation between coarse and ! fine grids during debugging. # ifdef NESTING_DEBUG ! Warning: very verbose output to fort.300 ascii file to check ! mass flux conservation. # endif ! DO ig=1,GridsInLayer(nl) ng=GridNumber(ig,nl) IF (ANY(CompositeGrid(:,ng))) THEN CALL nesting (ng, iNLM, n2dPS) END IF IF (RefinedGrid(ng).and.(RefineScale(ng).gt.0)) THEN CALL nesting (ng, iNLM, nmflx) END IF END DO # endif ! ! Set time indices for corrector step. ! DO ig=1,GridsInLayer(nl) ng=GridNumber(ig,nl) IF (PREDICTOR_2D_STEP(ng)) THEN PREDICTOR_2D_STEP(ng)=.FALSE. knew(ng)=next_indx1 kstp(ng)=3-knew(ng) krhs(ng)=3 IF (iif(ng).lt.(nfast(ng)+1)) indx1(ng)=next_indx1 END IF ! ! Corrector step - Apply 2D time-step corrector scheme. Notice that ! ============== there is not need for a corrector step during the ! auxiliary (nfast+1) time-step. ! IF (iif(ng).lt.(nfast(ng)+1)) THEN DO tile=first_tile(ng),last_tile(ng),+1 CALL step2d (ng, tile) END DO !$OMP BARRIER END IF END DO # ifdef NESTING # if defined MASKING && defined WET_DRY ! ! If nesting and wetting and drying, scale horizontal interpolation ! weights to account for land/sea masking in contact areas. This needs ! to be done at very time-step since the Land/Sea masking is time ! dependent. ! DO ig=1,GridsInLayer(nl) ng=GridNumber(ig,nl) CALL nesting (ng, iNLM, nmask) END DO # endif ! ! If composite or mosaic grids, process additional points in the ! contact zone between connected grids for the state variables ! associated with the 2D engine Corrector Step section. ! If refinement, check mass flux conservation between coarse and ! fine grids during debugging. # ifdef NESTING_DEBUG ! Warning: very verbose output to fort.300 ascii file to check ! mass flux conservation. # endif ! DO ig=1,GridsInLayer(nl) ng=GridNumber(ig,nl) IF (ANY(CompositeGrid(:,ng))) THEN CALL nesting (ng, iNLM, n2dCS) END IF IF (RefinedGrid(ng).and.(RefineScale(ng).gt.0)) THEN CALL nesting (ng, iNLM, nmflx) END IF END DO # endif # ifdef NESTING # ifndef ONE_WAY ! !----------------------------------------------------------------------- ! If refinement grids, perform two-way coupling between fine and ! coarse grids. Correct coarse grid tracers values at the refinement ! grid with refined accumulated fluxes. Then, replace coarse grid ! state variable with averaged refined grid values (two-way nesting). ! Update coarse grid depth variables. ! ! The two-way exchange of infomation between nested grids needs to be ! done at the correct time-step and in the right sequence. !----------------------------------------------------------------------- ! DO il=NestLayers,1,-1 DO ig=1,GridsInLayer(il) ng=GridNumber(ig,il) IF (do_twoway(iNLM, nl, il, ng, istep)) THEN CALL nesting (ng, iNLM, n2way) END IF END DO END DO # endif ! !----------------------------------------------------------------------- ! If donor to a finer grid, extract data for the external contact ! points. This is the latest solution for the coarser grid. ! ! It is stored in the REFINED structure so it can be used for the ! space-time interpolation. !----------------------------------------------------------------------- ! DO ig=1,GridsInLayer(nl) ng=GridNumber(ig,nl) IF (DonorToFiner(ng)) THEN CALL nesting (ng, iNLM, ngetD) END IF END DO # endif # endif # ifdef FLOATS ! !----------------------------------------------------------------------- ! Compute Lagrangian drifters trajectories: Split all the drifters ! between all the computational threads, except in distributed-memory ! and serial configurations. In distributed-memory, the parallel node ! containing the drifter is selected internally since the state ! variables do not have a global scope. !----------------------------------------------------------------------- ! DO ig=1,GridsInLayer(nl) ng=GridNumber(ig,nl) IF (Lfloats(ng)) THEN # ifdef _OPENMP chunk_size=(Nfloats(ng)+numthreads-1)/numthreads Lstr=1+MyThread*chunk_size Lend=MIN(Nfloats(ng),Lstr+chunk_size-1) # else Lstr=1 Lend=Nfloats(ng) # endif CALL step_floats (ng, Lstr, Lend) !$OMP BARRIER ! ! Shift floats time indices. ! nfp1(ng)=MOD(nfp1(ng)+1,NFT+1) nf (ng)=MOD(nf (ng)+1,NFT+1) nfm1(ng)=MOD(nfm1(ng)+1,NFT+1) nfm2(ng)=MOD(nfm2(ng)+1,NFT+1) nfm3(ng)=MOD(nfm3(ng)+1,NFT+1) END IF END DO # endif END DO STEP_LOOP END DO NEST_LAYER END DO KERNEL_LOOP RETURN END SUBROUTINE main2d #else SUBROUTINE main2d RETURN END SUBROUTINE main2d #endif