<p><b>mpetersen@lanl.gov</b> 2011-10-25 10:53:58 -0600 (Tue, 25 Oct 2011)</p><p>Fix boundary update for tracers. Before this update, block boundaries were visible in the temperature field when using split explicit timestepping and del4 diffusion.<br>
</p><hr noshade><pre><font color="gray">Modified: branches/ocean_projects/split_explicit_timestepping/src/core_ocean/mpas_ocn_time_integration_split.F
===================================================================
--- branches/ocean_projects/split_explicit_timestepping/src/core_ocean/mpas_ocn_time_integration_split.F        2011-10-25 16:36:05 UTC (rev 1127)
+++ branches/ocean_projects/split_explicit_timestepping/src/core_ocean/mpas_ocn_time_integration_split.F        2011-10-25 16:53:58 UTC (rev 1128)
@@ -822,15 +822,9 @@
end do ! block
- ! Check that you can compute SSH using the total sum or the individual increments
- ! over the barotropic subcycles.
- ! efficiency: This next block of code is really a check for debugging, and can
- ! be removed later.
block => domain % blocklist
do while (associated(block))
- allocate(uTemp(block % mesh % nVertLevels))
-
if (config_SSH_from=='avg_flux') then
! Accumulate fluxes in the tend % ssh variable
block % tend % ssh % array(:) = 0.0
@@ -873,6 +867,8 @@
ucorr_coef = 0
endif
+ allocate(uTemp(block % mesh % nVertLevels))
+
do iEdge=1,block % mesh % nEdges
cell1 = block % mesh % cellsOnEdge % array(1,iEdge)
cell2 = block % mesh % cellsOnEdge % array(2,iEdge)
@@ -955,10 +951,10 @@
call ocn_wtop(block % state % time_levs(2) % state, block % mesh)
if (trim(config_time_integration) == 'unsplit_explicit') then
- call ocn_tend_h(block % tend, block % state % time_levs(2) % state , block % diagnostics, block % mesh)
+ call ocn_tend_h(block % tend, block % state % time_levs(2) % state, block % diagnostics, block % mesh)
endif
- call ocn_tend_scalar(block % tend, block % state % time_levs(2) % state , block % diagnostics, block % mesh)
+ call ocn_tend_scalar(block % tend, block % state % time_levs(2) % state, block % diagnostics, block % mesh)
block => block % next
end do
@@ -967,16 +963,15 @@
block => domain % blocklist
do while (associated(block))
- call dmpar_exch_halo_field2dReal(domain % dminfo, block % tend % h % array(:,:), &
- block % mesh % nVertLevels, block % mesh % nCells, &
- block % parinfo % cellsToSend, block % parinfo % cellsToRecv)
- call dmpar_exch_halo_field3dReal(domain % dminfo, block % tend % tracers % array(:,:,:), &
- block % tend % num_tracers, block % mesh % nVertLevels, block % mesh % nCells, &
- block % parinfo % cellsToSend, block % parinfo % cellsToRecv)
+ if (trim(config_time_integration) == 'unsplit_explicit') then
+ call dmpar_exch_halo_field2dReal(domain % dminfo, block % tend % h % array(:,:), &
+ block % mesh % nVertLevels, block % mesh % nCells, &
+ block % parinfo % cellsToSend, block % parinfo % cellsToRecv)
+ endif ! unsplit_explicit
+
block => block % next
end do
-
block => domain % blocklist
do while (associated(block))
allocate(hNew(block % mesh % nVertLevels))
@@ -1008,7 +1003,7 @@
! Only need T & S for earlier iterations,
! then all the tracers needed the last time through.
- if (split_explicit_step < config_n_ts_iter) then
+ if (split_explicit_step < config_n_ts_iter) then
hNew(:) = block % mesh % hZLevel % array(:)
do iCell=1,block % mesh % nCells
@@ -1034,16 +1029,15 @@
if (trim(config_time_integration) == 'unsplit_explicit') then
+ ! compute h*, which is h at n+1/2 and put into array hNew
+ ! on last iteration, hNew remains at n+1
+ do iCell=1,block % mesh % nCells
+ block % state % time_levs(2) % state % h % array(1,iCell) &
+ = 0.5*( &
+ block % state % time_levs(2) % state % h % array(1,iCell) &
+ + block % state % time_levs(1) % state % h % array(1,iCell) )
- ! compute h*, which is h at n+1/2 and put into array hNew
- ! on last iteration, hNew remains at n+1
- do iCell=1,block % mesh % nCells
- block % state % time_levs(2) % state % h % array(1,iCell) &
- = 0.5*( &
- block % state % time_levs(2) % state % h % array(1,iCell) &
- + block % state % time_levs(1) % state % h % array(1,iCell) )
-
- end do ! iCell
+ end do ! iCell
endif ! unsplit_explicit
! compute u*, the velocity for tendency terms. Put in uNew.
@@ -1056,13 +1050,8 @@
+ block % state % time_levs(2) % state % uBcl % array(:,iEdge)
end do ! iEdge
- ! mrp 110512 I really only need this to compute h_edge, density, pressure.
- ! I can par this down later.
- call ocn_diagnostic_solve(dt, block % state % time_levs(2) % state, block % mesh)
-
+ elseif (split_explicit_step == config_n_ts_iter) then
- elseif (split_explicit_step == config_n_ts_iter) then
-
hNew(:) = block % mesh % hZLevel % array(:)
do iCell=1,block % mesh % nCells
! sshNew is a pointer, defined above.
@@ -1080,12 +1069,38 @@
end do
end do
- endif ! split_explicit_step
+ endif ! split_explicit_step
deallocate(hNew)
block => block % next
end do
+
+ ! Boundary update on tracers. This is placed here, rather than
+ ! on tend % tracers as in RK4, because I needed to update
+ ! afterwards for the del4 diffusion operator.
+ block => domain % blocklist
+ do while (associated(block))
+ call dmpar_exch_halo_field3dReal(domain % dminfo, block % state % time_levs(2) % state % tracers % array(:,:,:), &
+ block % tend % num_tracers, block % mesh % nVertLevels, block % mesh % nCells, &
+ block % parinfo % cellsToSend, block % parinfo % cellsToRecv)
+ block => block % next
+ end do
+
+
+ if (split_explicit_step < config_n_ts_iter) then
+ ! mrp 110512 I really only need this to compute h_edge, density, pressure.
+ ! I can par this down later.
+ block => domain % blocklist
+ do while (associated(block))
+
+ call ocn_diagnostic_solve(dt, block % state % time_levs(2) % state, block % mesh)
+
+ block => block % next
+ end do
+ endif
+
+
end do ! split_explicit_step = 1, config_n_ts_iter
!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
! END large iteration loop
</font>
</pre>