<p><b>croesch@ucar.edu</b> 2011-12-09 17:05:56 -0700 (Fri, 09 Dec 2011)</p><p>BRANCH COMMIT<br>
<br>
Updates to halo exchange lists to allow updating by layer.  Updated time integration to use new exchange lists and remove if tests where no longer needed.<br>
</p><hr noshade><pre><font color="gray">Modified: branches/halo/src/core_nhyd_atmos/mpas_atm_mpas_core.F
===================================================================
--- branches/halo/src/core_nhyd_atmos/mpas_atm_mpas_core.F        2011-12-08 22:32:29 UTC (rev 1248)
+++ branches/halo/src/core_nhyd_atmos/mpas_atm_mpas_core.F        2011-12-10 00:05:56 UTC (rev 1249)
@@ -192,7 +192,7 @@
 
       call mpas_dmpar_exch_halo_field2d_real(dminfo, block % state % time_levs(1) % state % u % array(:,:), &amp;
                                        block % mesh % nVertLevels, block % mesh % nEdges, &amp;
-                                       block % parinfo % edgesToSend, block % parinfo % edgesToRecv)
+                                       block % parinfo % halo2EdgesToSend, block % parinfo % halo2EdgesToRecv)
    
       if (.not. config_do_restart .or. (config_do_restart .and. config_do_DAcycling)) then
          call atm_init_coupled_diagnostics( block % state % time_levs(1) % state, block % diag, mesh)
@@ -201,13 +201,13 @@
 
       call mpas_dmpar_exch_halo_field2d_real(dminfo, block % diag % pv_edge % array(:,:), &amp;
                                        block % mesh % nVertLevels, block % mesh % nEdges, &amp;
-                                       block % parinfo % edgesToSend, block % parinfo % edgesToRecv)
+                                       block % parinfo % halo2EdgesToSend, block % parinfo % halo2EdgesToRecv)
       call mpas_dmpar_exch_halo_field2d_real(dminfo, block % diag % ru % array(:,:), &amp;
                                        block % mesh % nVertLevels, block % mesh % nEdges, &amp;
-                                       block % parinfo % edgesToSend, block % parinfo % edgesToRecv)
+                                       block % parinfo % halo2EdgesToSend, block % parinfo % halo2EdgesToRecv)
       call mpas_dmpar_exch_halo_field2d_real(dminfo, block % diag % rw % array, &amp;
                                        block % mesh % nVertLevels+1, block % mesh % nCells,  &amp;
-                                       block % parinfo % CellsToSend, block % parinfo % CellsToRecv)
+                                       block % parinfo % halo2CellsToSend, block % parinfo % halo2CellsToRecv)
 
       call mpas_rbf_interp_initialize(mesh)
       call mpas_init_reconstruct(mesh)

Modified: branches/halo/src/core_nhyd_atmos/mpas_atm_time_integration.F
===================================================================
--- branches/halo/src/core_nhyd_atmos/mpas_atm_time_integration.F        2011-12-08 22:32:29 UTC (rev 1248)
+++ branches/halo/src/core_nhyd_atmos/mpas_atm_time_integration.F        2011-12-10 00:05:56 UTC (rev 1249)
@@ -132,23 +132,23 @@
 ! theta_m
          call mpas_dmpar_exch_halo_field2d_real(domain % dminfo, block % state % time_levs(1) % state % theta_m % array(:,:), &amp;
                                           block % mesh % nVertLevels, block % mesh % nCells, &amp;
-                                          block % parinfo % cellsToSend, block % parinfo % cellsToRecv)
+                                          block % parinfo % halo2CellsToSend, block % parinfo % halo2CellsToRecv)
 ! scalars
          call mpas_dmpar_exch_halo_field3d_real(domain % dminfo, block % state % time_levs(1) % state % scalars % array(:,:,:), &amp;
                                           block % state % time_levs(1) % state % num_scalars, block % mesh % nVertLevels, block % mesh % nCells, &amp;
-                                          block % parinfo % cellsToSend, block % parinfo % cellsToRecv)
+                                          block % parinfo % halo2CellsToSend, block % parinfo % halo2CellsToRecv)
 ! pressure_p
          call mpas_dmpar_exch_halo_field2d_real(domain % dminfo, block % diag % pressure_p % array(:,:), &amp;
                                           block % mesh % nVertLevels, block % mesh % nCells, &amp;
-                                          block % parinfo % cellsToSend, block % parinfo % cellsToRecv)
+                                          block % parinfo % halo2CellsToSend, block % parinfo % halo2CellsToRecv)
 ! rtheta_p
          call mpas_dmpar_exch_halo_field2d_real(domain % dminfo, block % diag % rtheta_p % array(:,:), &amp;
                                           block % mesh % nVertLevels, block % mesh % nCells, &amp;
-                                          block % parinfo % cellsToSend, block % parinfo % cellsToRecv)
+                                          block % parinfo % halo2CellsToSend, block % parinfo % halo2CellsToRecv)
 !surface_pressure
-         call mpas_dmpar_exch_halo_field1d_real(domain % dminfo, block % diag % surface_pressure % array(:), &amp;
-                                          block % mesh % nCells, &amp;
-                                          block % parinfo % cellsToSend, block % parinfo % cellsToRecv)
+!         call mpas_dmpar_exch_halo_field1d_real(domain % dminfo, block % diag % surface_pressure % array(:), &amp;
+!                                          block % mesh % nCells, &amp;
+!                                          block % parinfo % halo2CellsToSend, block % parinfo % halo2CellsToRecv)
          block =&gt; block % next
       end do
 
@@ -191,7 +191,7 @@
         if (debug) write(0,*) ' add physics tendencies '
         block =&gt; domain % blocklist
         do while (associated(block))
-           call physics_addtend( domain % dminfo , block % parinfo % cellsToSend, block % parinfo % cellsToRecv, &amp;
+           call physics_addtend( domain % dminfo , block % parinfo % halo2CellsToSend, block % parinfo % halo2CellsToRecv, &amp;
                         block % mesh , block % state % time_levs(2) % state, block % diag, block % tend, &amp;
                         block % tend_physics , block % state % time_levs(2) % state % rho_zz % array(:,:), &amp;
                         block % diag % rho_edge % array(:,:) )
@@ -210,14 +210,14 @@
 ! tend_u
             call mpas_dmpar_exch_halo_field2d_real(domain % dminfo, block % tend % u % array(:,:), &amp;
                                              block % mesh % nVertLevels, block % mesh % nEdges, &amp;
-                                             block % parinfo % edgesToSend, block % parinfo % edgesToRecv)
+                                             block % parinfo % ownPerimEdgesToSend, block % parinfo % ownPerimEdgesToRecv)
             block =&gt; block % next
          end do
 
          block =&gt; domain % blocklist
             do while (associated(block))
                call atm_set_smlstep_pert_variables( block % state % time_levs(1) % state, block % state % time_levs(2) % state,  &amp;
-                                                block % tend, block % diag, block % mesh                                     )
+                                                block % tend, block % diag, block % mesh )
                call atm_compute_vert_imp_coefs( block % state % time_levs(2) % state, block % mesh, block % diag, rk_sub_timestep(rk_step) )
             block =&gt; block % next
          end do
@@ -229,7 +229,7 @@
             block =&gt; domain % blocklist
             do while (associated(block))
                call atm_advance_acoustic_step( block % state % time_levs(2) % state, block % diag, block % tend, &amp;
-                                           block % mesh, rk_sub_timestep(rk_step)                            )
+                                           block % mesh, rk_sub_timestep(rk_step) )
                block =&gt; block % next
             end do
 
@@ -244,7 +244,7 @@
 ! rtheta_pp
                call mpas_dmpar_exch_halo_field2d_real(domain % dminfo, block % diag % rtheta_pp % array(:,:), &amp;
                                                 block % mesh % nVertLevels, block % mesh % nCells, &amp;
-                                                block % parinfo % cellsToSend, block % parinfo % cellsToRecv)
+                                                block % parinfo % halo2CellsToSend, block % parinfo % halo2CellsToRecv)
                block =&gt; block % next
             end do
  
@@ -266,16 +266,16 @@
 ! rw_p
             call mpas_dmpar_exch_halo_field2d_real(domain % dminfo, block % diag % rw_p % array(:,:), &amp;
                                              block % mesh % nVertLevels+1, block % mesh % nCells, &amp;
-                                             block % parinfo % cellsToSend, block % parinfo % cellsToRecv)
+                                             block % parinfo % halo2CellsToSend, block % parinfo % halo2CellsToRecv)
 ! MGD seems necessary
 ! ru_p
             call mpas_dmpar_exch_halo_field2d_real(domain % dminfo, block % diag % ru_p % array(:,:), &amp;
                                              block % mesh % nVertLevels, block % mesh % nEdges, &amp;
-                                             block % parinfo % edgesToSend, block % parinfo % edgesToRecv)
+                                             block % parinfo % halo2EdgesToSend, block % parinfo % halo2EdgesToRecv)
 ! rho_pp
             call mpas_dmpar_exch_halo_field2d_real(domain % dminfo, block % diag % rho_pp % array(:,:), &amp;
                                              block % mesh % nVertLevels, block % mesh % nCells, &amp;
-                                             block % parinfo % cellsToSend, block % parinfo % cellsToRecv)
+                                             block % parinfo % halo2CellsToSend, block % parinfo % halo2CellsToRecv)
             block =&gt; block % next
          end do
 
@@ -295,7 +295,7 @@
 ! u
             call mpas_dmpar_exch_halo_field2d_real(domain % dminfo, block % state % time_levs(2) % state % u % array(:,:), &amp;
                                              block % mesh % nVertLevels, block % mesh % nEdges, &amp;
-                                             block % parinfo % edgesToSend, block % parinfo % edgesToRecv)
+                                             block % parinfo % halo2EdgesToSend, block % parinfo % halo2EdgesToRecv)
             block =&gt; block % next
          end do
 
@@ -320,7 +320,7 @@
                                           block % state % time_levs(1) % state, block % state % time_levs(2) % state, &amp;
                                           block % diag, &amp;
                                           block % mesh, rk_timestep(rk_step), rk_step, 3,             &amp;
-                                          domain % dminfo, block % parinfo % cellsToSend, block % parinfo % cellsToRecv )
+                                          domain % dminfo, block % parinfo % halo2CellsToSend, block % parinfo % halo2CellsToRecv )
             end if
             block =&gt; block % next
          end do
@@ -330,10 +330,10 @@
 !         do while (associated(block))
 !            call mpas_dmpar_exch_halo_field3d_real(domain % dminfo, block % tend % scalars % array(:,:,:), &amp;
 !                                             block % tend % num_scalars, block % mesh % nVertLevels, block % mesh % nCells, &amp;
-!                                             block % parinfo % cellsToSend, block % parinfo % cellsToRecv)
+!                                             block % parinfo % halo2CellsToSend, block % parinfo % halo2CellsToRecv)
 !            call mpas_dmpar_exch_halo_field3d_real(domain % dminfo, block % state % time_levs(2) % state % scalars % array(:,:,:), &amp;
 !                                             block % state % time_levs(2) % state % num_scalars, block % mesh % nVertLevels, block % mesh % nCells, &amp;
-!                                             block % parinfo % cellsToSend, block % parinfo % cellsToRecv)
+!                                             block % parinfo % halo2CellsToSend, block % parinfo % halo2CellsToRecv)
 !            block =&gt; block % next
 !         end do
 
@@ -367,22 +367,22 @@
 ! w
             call mpas_dmpar_exch_halo_field2d_real(domain % dminfo, block % state % time_levs(2) % state % w % array(:,:), &amp;
                                              block % mesh % nVertLevels+1, block % mesh % nCells, &amp;
-                                             block % parinfo % cellsToSend, block % parinfo % cellsToRecv)
+                                             block % parinfo % halo2CellsToSend, block % parinfo % halo2CellsToRecv)
 ! pv_edge
             call mpas_dmpar_exch_halo_field2d_real(domain % dminfo, block % diag % pv_edge % array(:,:), &amp;
                                              block % mesh % nVertLevels, block % mesh % nEdges, &amp;
-                                             block % parinfo % edgesToSend, block % parinfo % edgesToRecv)
+                                             block % parinfo % halo2EdgesToSend, block % parinfo % halo2EdgesToRecv)
 ! rho_edge
             call mpas_dmpar_exch_halo_field2d_real(domain % dminfo, block % diag % rho_edge % array(:,:), &amp;
                                              block % mesh % nVertLevels, block % mesh % nEdges, &amp;
-                                             block % parinfo % edgesToSend, block % parinfo % edgesToRecv)
+                                             block % parinfo % halo2EdgesToSend, block % parinfo % halo2EdgesToRecv)
 
 !  ****  this will always be needed - perhaps we can cover this with compute_solve_diagnostics
 
 ! scalars
             call mpas_dmpar_exch_halo_field3d_real(domain % dminfo, block % state % time_levs(2) % state % scalars % array(:,:,:), &amp;
                                              block % state % time_levs(2) % state % num_scalars, block % mesh % nVertLevels, block % mesh % nCells, &amp;
-                                             block % parinfo % cellsToSend, block % parinfo % cellsToRecv)
+                                             block % parinfo % halo2CellsToSend, block % parinfo % halo2CellsToRecv)
             block =&gt; block % next
          end do
 
@@ -766,7 +766,7 @@
       real (kind=RKIND) :: cf1, cf2, cf3, pr, pl
       integer :: kr, kl
 
-      integer :: nEdges, nCells, nCellsSolve, nVertLevels
+      integer :: nOwnCellEdges, nCells, nCellsSolve, nVertLevels
 
       logical, parameter :: debug = .false.
 !      logical, parameter :: debug = .true.
@@ -818,7 +818,7 @@
 
 !  might these be pointers instead? **************************
 
-      nEdges = grid % nEdges
+      nOwnCellEdges = grid % nEdgesWithinHaloPerimeter(1)
       nCells = grid % nCells
       nCellsSolve = grid % nCellsSolve
       nVertLevels = grid % nVertLevels
@@ -846,13 +846,12 @@
 
       if(debug) write(0,*) ' updating ru_p '
 
-      do iEdge = 1, nEdges
+      do iEdge = 1, nOwnCellEdges ! update edge for block-owned cells
  
          cell1 = grid % cellsOnEdge % array (1,iEdge)
          cell2 = grid % cellsOnEdge % array (2,iEdge)
-         ! update edge for block-owned cells
-         if (cell1 &lt;= grid % nCellsSolve .or. cell2 &lt;= grid % nCellsSolve ) then
 
+
             if (newpx) then
 
                k = 1
@@ -933,10 +932,8 @@
 
             end do
 
-        end if ! end test for block-owned cells
+      end do ! end loop over own cell edges
 
-      end do ! end loop over edges
-
       ! saving rtheta_pp before update for use in divergence damping in next acoustic step
       rtheta_pp_old(:,:) = rtheta_pp(:,:)
 
@@ -1236,6 +1233,8 @@
 
       integer, parameter :: hadv_opt = 2
 
+      integer :: nOwnCellEdges
+
       flux4(q_im2, q_im1, q_i, q_ip1, ua) =                     &amp;
           ua*( 7.*(q_i + q_im1) - (q_ip1 + q_im2) )/12.0
 
@@ -1283,38 +1282,38 @@
       scalar_tend = 0.  !  testing purposes - we have no sources or sinks
 #endif
 
+      nOwnCellEdges = grid % nEdgesWithinHaloPerimeter(1)
+
       !
       ! Runge Kutta integration, so we compute fluxes from scalar_new values, update starts form scalar_old
       !
       !
       !  horizontal flux divergence, accumulate in scalar_tend
 
-         do iEdge=1,grid%nEdges
+         do iEdge=1, nOwnCellEdges
             cell1 = cellsOnEdge(1,iEdge)
             cell2 = cellsOnEdge(2,iEdge)
-            if (cell1 &lt;= grid%nCellsSolve .or. cell2 &lt;= grid%nCellsSolve) then  ! only for owned cells
-  
-               flux_arr(:,:) = 0.
-               do i=1,nAdvCellsForEdge(iEdge)
-                 iCell = advCellsForEdge(i,iEdge)
-                 do k=1,grid % nVertLevels
-                   scalar_weight = adv_coefs(i,iEdge) + coef_3rd_order*sign(1.,uhAvg(k,iEdge))*adv_coefs_3rd(i,iEdge)
-                   do iScalar=1,s_old % num_scalars
+
+            flux_arr(:,:) = 0.
+            do i=1,nAdvCellsForEdge(iEdge)
+               iCell = advCellsForEdge(i,iEdge)
+               do k=1,grid % nVertLevels
+                  scalar_weight = adv_coefs(i,iEdge) + coef_3rd_order*sign(1.,uhAvg(k,iEdge))*adv_coefs_3rd(i,iEdge)
+                  do iScalar=1,s_old % num_scalars
                      flux_arr(iScalar,k) = flux_arr(iScalar,k) + scalar_weight* scalar_new(iScalar,k,iCell)
-                   end do
-                 end do
+                  end do
                end do
+            end do
 
-               do k=1,grid % nVertLevels
+            do k=1,grid % nVertLevels
                do iScalar=1,s_old % num_scalars
                      scalar_tend(iScalar,k,cell1) = scalar_tend(iScalar,k,cell1) &amp;
                             - uhAvg(k,iEdge)*flux_arr(iScalar,k)/areaCell(cell1)
                      scalar_tend(iScalar,k,cell2) = scalar_tend(iScalar,k,cell2) &amp;
                             + uhAvg(k,iEdge)*flux_arr(iScalar,k)/areaCell(cell2)
                end do
-               end do
+            end do
 
-            end if
           end do
 
       !
@@ -1466,6 +1465,9 @@
       real (kind=RKIND), parameter :: eps=1.e-20
       logical, parameter :: debug_print = .false.
 
+      integer :: nOwnCellEdges
+
+
       flux4(q_im2, q_im1, q_i, q_ip1, ua) =                     &amp;
           ua*( 7.*(q_i + q_im1) - (q_ip1 + q_im2) )/12.0
 
@@ -1513,6 +1515,8 @@
       scalar_tend = 0.  !  testing purposes - we have no sources or sinks
 #endif
 
+      nOwnCellEdges = grid % nEdgesWithinHaloPerimeter(1)
+
       !
       ! Update scalars for physics (i.e., what is in scalar_tend)
       !   we should probably move this to another routine called before mono advection ****
@@ -1623,23 +1627,19 @@
       !  horizontal flux divergence
 
          flux_arr(:,:) = 0.
-         do iEdge=1,grid%nEdges
+         do iEdge=1, nOwnCellEdges
 
             cell1 = cellsOnEdge(1,iEdge)
             cell2 = cellsOnEdge(2,iEdge)
-
-            if (cell1 &lt;= grid%nCellsSolve .or. cell2 &lt;= grid%nCellsSolve) then  ! only for owned cells
   
-               do i=1,nAdvCellsForEdge(iEdge)
-                 iCell = advCellsForEdge(i,iEdge)
-                 do k=1,grid % nVertLevels
-                   scalar_weight = uhAvg(k,iEdge)*(adv_coefs(i,iEdge) + coef_3rd_order*sign(1.,uhAvg(k,iEdge))*adv_coefs_3rd(i,iEdge))
-                   flux_arr(k,iEdge) = flux_arr(k,iEdge) + scalar_weight* scalar_new(k,iCell)
-                 end do
+            do i=1,nAdvCellsForEdge(iEdge)
+               iCell = advCellsForEdge(i,iEdge)
+               do k=1,grid % nVertLevels
+                  scalar_weight = uhAvg(k,iEdge)*(adv_coefs(i,iEdge) + coef_3rd_order*sign(1.,uhAvg(k,iEdge))*adv_coefs_3rd(i,iEdge))
+                  flux_arr(k,iEdge) = flux_arr(k,iEdge) + scalar_weight* scalar_new(k,iCell)
                end do
+            end do
 
-            end if
-
           end do
 
 !  vertical flux divergence for upwind update, we will put upwind update into scalar_new, and put factor of dt in fluxes
@@ -1671,24 +1671,23 @@
 
          !  upwind flux computation
 
-         do iEdge=1,grid%nEdges
-           cell1 = cellsOnEdge(1,iEdge)
-           cell2 = cellsOnEdge(2,iEdge)
-             if (cell1 &lt;= grid%nCellsSolve .or. cell2 &lt;= grid%nCellsSolve) then  ! only for owned cells
-               do k=1,grid % nVertLevels
-                 flux_upwind = grid % dvEdge % array(iEdge) * dt *   &amp;
+         do iEdge=1, nOwnCellEdges
+            cell1 = cellsOnEdge(1,iEdge)
+            cell2 = cellsOnEdge(2,iEdge)
+
+            do k=1,grid % nVertLevels
+               flux_upwind = grid % dvEdge % array(iEdge) * dt *   &amp;
                         (max(0.,uhAvg(k,iEdge))*scalar_old(k,cell1) + min(0.,uhAvg(k,iEdge))*scalar_old(k,cell2))
-                 flux_arr(k,iEdge) = dt*flux_arr(k,iEdge) - flux_upwind
-                 scalar_new(k,cell1) = scalar_new(k,cell1) - flux_upwind / areaCell(cell1)
-                 scalar_new(k,cell2) = scalar_new(k,cell2) + flux_upwind / areaCell(cell2)
+               flux_arr(k,iEdge) = dt*flux_arr(k,iEdge) - flux_upwind
+               scalar_new(k,cell1) = scalar_new(k,cell1) - flux_upwind / areaCell(cell1)
+               scalar_new(k,cell2) = scalar_new(k,cell2) + flux_upwind / areaCell(cell2)
 
-                 scale_out(k,cell1) = scale_out(k,cell1) - max(0.,flux_arr(k,iEdge)) / areaCell(cell1)
-                 scale_in (k,cell1) = scale_in (k,cell1) - min(0.,flux_arr(k,iEdge)) / areaCell(cell1)
-                 scale_out(k,cell2) = scale_out(k,cell2) + min(0.,flux_arr(k,iEdge)) / areaCell(cell2)
-                 scale_in (k,cell2) = scale_in (k,cell2) + max(0.,flux_arr(k,iEdge)) / areaCell(cell2)
+               scale_out(k,cell1) = scale_out(k,cell1) - max(0.,flux_arr(k,iEdge)) / areaCell(cell1)
+               scale_in (k,cell1) = scale_in (k,cell1) - min(0.,flux_arr(k,iEdge)) / areaCell(cell1)
+               scale_out(k,cell2) = scale_out(k,cell2) + min(0.,flux_arr(k,iEdge)) / areaCell(cell2)
+               scale_in (k,cell2) = scale_in (k,cell2) + max(0.,flux_arr(k,iEdge)) / areaCell(cell2)
 
-               end do
-             end if
+            end do
           end do
 
 !  next, the limiter
@@ -1723,17 +1722,16 @@
 !
 !  rescale the fluxes
 !
-            do iEdge = 1, grid % nEdges
+            do iEdge = 1, nOwnCellEdges
                cell1 = grid % cellsOnEdge % array(1,iEdge)
                cell2 = grid % cellsOnEdge % array(2,iEdge)
-               if (cell1 &lt;= grid%nCellsSolve .or. cell2 &lt;= grid%nCellsSolve) then
-                  do k = 1, nVertLevels
-                     flux = flux_arr(k,iEdge)
-                     flux = max(0.,flux) * min(scale_out(k,cell1), scale_in(k,cell2)) &amp;
-                          + min(0.,flux) * min(scale_in(k,cell1), scale_out(k,cell2))
-                     flux_arr(k,iEdge) = flux
-                  end do
-               end if
+
+               do k = 1, nVertLevels
+                  flux = flux_arr(k,iEdge)
+                  flux = max(0.,flux) * min(scale_out(k,cell1), scale_in(k,cell2)) &amp;
+                       + min(0.,flux) * min(scale_in(k,cell1), scale_out(k,cell2))
+                  flux_arr(k,iEdge) = flux
+               end do
             end do
  
        ! rescale the vertical flux
@@ -1749,15 +1747,13 @@
 !
 !  do the scalar update now that we have the fluxes
 !
-         do iEdge=1,grid%nEdges
+         do iEdge=1,nOwnCellEdges
             cell1 = cellsOnEdge(1,iEdge)
             cell2 = cellsOnEdge(2,iEdge)
-            if (cell1 &lt;= grid%nCellsSolve .or. cell2 &lt;= grid%nCellsSolve) then  ! only for owned cells
-               do k=1,grid % nVertLevels
-                  scalar_new(k,cell1) = scalar_new(k,cell1) - flux_arr(k,iEdge)/areaCell(cell1)
-                  scalar_new(k,cell2) = scalar_new(k,cell2) + flux_arr(k,iEdge)/areaCell(cell2)
-               end do
-            end if
+            do k=1,grid % nVertLevels
+               scalar_new(k,cell1) = scalar_new(k,cell1) - flux_arr(k,iEdge)/areaCell(cell1)
+               scalar_new(k,cell2) = scalar_new(k,cell2) + flux_arr(k,iEdge)/areaCell(cell2)
+            end do
          end do
 
           do iCell=1,grid % nCellsSolve

Modified: branches/halo/src/framework/mpas_dmpar.F
===================================================================
--- branches/halo/src/framework/mpas_dmpar.F        2011-12-08 22:32:29 UTC (rev 1248)
+++ branches/halo/src/framework/mpas_dmpar.F        2011-12-10 00:05:56 UTC (rev 1249)
@@ -592,6 +592,9 @@
       allocate(ownerListIn(totalSize))
       allocate(ownerListOut(totalSize))
 
+      ownerListIn(:) = 0
+      ownerListOut(:) = 0
+
       nMesgRecv = nNeededList
       ownerListIn(1:nNeededList) = neededList(1:nNeededList)
 

Modified: branches/halo/src/framework/mpas_grid_types.F
===================================================================
--- branches/halo/src/framework/mpas_grid_types.F        2011-12-08 22:32:29 UTC (rev 1248)
+++ branches/halo/src/framework/mpas_grid_types.F        2011-12-10 00:05:56 UTC (rev 1249)
@@ -102,10 +102,18 @@
 
    ! Type for storing (possibly architecture specific) information concerning to parallelism
    type parallel_info
-      type (exchange_list), pointer :: cellsToSend            ! List of types describing which cells to send to other blocks
-      type (exchange_list), pointer :: cellsToRecv            ! List of types describing which cells to receive from other blocks
-      type (exchange_list), pointer :: edgesToSend            ! List of types describing which edges to send to other blocks
-      type (exchange_list), pointer :: edgesToRecv            ! List of types describing which edges to receive from other blocks
+      type (exchange_list), pointer :: halo1CellsToSend            ! List of types describing which cells to send to other blocks
+      type (exchange_list), pointer :: halo1CellsToRecv            ! List of types describing which cells to receive from other blocks
+      type (exchange_list), pointer :: halo2CellsToSend            ! List of types describing which cells to send to other blocks
+      type (exchange_list), pointer :: halo2CellsToRecv            ! List of types describing which cells to receive from other blocks
+
+      type (exchange_list), pointer :: ownPerimEdgesToSend            ! List of types describing which edges to send to other blocks
+      type (exchange_list), pointer :: ownPerimEdgesToRecv            ! List of types describing which edges to receive from other blocks
+      type (exchange_list), pointer :: halo1EdgesToSend            ! List of types describing which edges to send to other blocks
+      type (exchange_list), pointer :: halo1EdgesToRecv            ! List of types describing which edges to receive from other blocks
+      type (exchange_list), pointer :: halo2EdgesToSend            ! List of types describing which edges to send to other blocks
+      type (exchange_list), pointer :: halo2EdgesToRecv            ! List of types describing which edges to receive from other blocks
+
       type (exchange_list), pointer :: verticesToSend         ! List of types describing which vertices to send to other blocks
       type (exchange_list), pointer :: verticesToRecv         ! List of types describing which vertices to receive from other blocks
    end type parallel_info

Modified: branches/halo/src/framework/mpas_io_input.F
===================================================================
--- branches/halo/src/framework/mpas_io_input.F        2011-12-08 22:32:29 UTC (rev 1248)
+++ branches/halo/src/framework/mpas_io_input.F        2011-12-10 00:05:56 UTC (rev 1249)
@@ -535,7 +535,7 @@
       allocate(nEdgesOnCell_2Halo(block_graph_2Halo % nVerticesTotal))
       allocate(edgesOnCell_2Halo(maxEdges, block_graph_2Halo % nVerticesTotal))
       allocate(verticesOnCell_2Halo(maxEdges, block_graph_2Halo % nVerticesTotal))
-   
+
       call mpas_dmpar_get_owner_list(domain % dminfo, &amp;
                                 size(indexToCellIDField % array), block_graph_2Halo % nVerticesTotal, &amp;
                                 indexToCellIDField % array, block_graph_2Halo % vertexID, &amp;
@@ -561,12 +561,12 @@
                                            edgesOnCell_2Halo, nlocal_edges, local_edge_list)
       call mpas_block_decomp_all_edges_in_block(maxEdges, block_graph_2Halo % nVerticesTotal, block_graph_2Halo % nAdjacent, &amp;
                                            verticesOnCell_2Halo, nlocal_vertices, local_vertex_list)
-   
+
       call mpas_dmpar_get_owner_list(domain % dminfo, &amp;
                                 size(indexToEdgeIDField % array), nlocal_edges, &amp;
                                 indexToEdgeIDField % array, local_edge_list, &amp;
                                 sendEdgeList, recvEdgeList)
-   
+
       call mpas_dmpar_get_owner_list(domain % dminfo, &amp;
                                 size(indexToVertexIDField % array), nlocal_vertices, &amp;
                                 indexToVertexIDField % array, local_vertex_list, &amp;
@@ -676,7 +676,7 @@
                                 size(indexToEdgeIDField % array), nlocal_edges, &amp;
                                 indexToEdgeIDField % array, local_edge_list, &amp;
                                 sendEdgeList, recvEdgeList)
-   
+
       call mpas_dmpar_get_owner_list(domain % dminfo, &amp;
                                 size(indexToVertexIDField % array), nlocal_vertices, &amp;
                                 indexToVertexIDField % array, local_vertex_list, &amp;
@@ -735,7 +735,7 @@
                                 size(indexToEdgeIDField % array), nlocal_edges, &amp;
                                 indexToEdgeIDField % array, local_edge_list, &amp;
                                 sendEdgeList, recvEdgeList)
-   
+
       call mpas_dmpar_get_owner_list(domain % dminfo, &amp;
                                 size(indexToVertexIDField % array), nlocal_vertices, &amp;
                                 indexToVertexIDField % array, local_vertex_list, &amp;
@@ -998,17 +998,34 @@
       !
       ! Work out halo exchange lists for cells, edges, and vertices
       !
+
       call mpas_dmpar_get_owner_list(domain % dminfo, &amp;
-                                block_graph_2Halo % nVertices, block_graph_2Halo % nVerticesTotal, &amp;
+                                block_graph_2Halo % nVertices, nCellsHalo(2), &amp;
                                 block_graph_2Halo % vertexID(1:block_graph_2Halo % nVertices), block_graph_2Halo % vertexID, &amp;
-                                domain % blocklist % parinfo % cellsToSend, domain % blocklist % parinfo % cellsToRecv)
+                                domain % blocklist % parinfo % halo1CellsToSend, domain % blocklist % parinfo % halo1CellsToRecv)
 
       call mpas_dmpar_get_owner_list(domain % dminfo, &amp;
-                                ghostEdgeStart-1, nlocal_edges, &amp;
+                                block_graph_2Halo % nVertices, nCellsHalo(3), &amp;
+                                block_graph_2Halo % vertexID(1:block_graph_2Halo % nVertices), block_graph_2Halo % vertexID, &amp;
+                                domain % blocklist % parinfo % halo2CellsToSend, domain % blocklist % parinfo % halo2CellsToRecv)
+
+
+      call mpas_dmpar_get_owner_list(domain % dminfo, &amp;
+                                ghostEdgeStart-1, nEdgesWithinHaloPerimeter(1), &amp;
                                 local_edge_list(1:ghostEdgeStart-1), local_edge_list, &amp;
-                                domain % blocklist % parinfo % edgesToSend, domain % blocklist % parinfo % edgesToRecv)
+                                domain % blocklist % parinfo % ownPerimEdgesToSend, domain % blocklist % parinfo % ownPerimEdgesToRecv)
 
       call mpas_dmpar_get_owner_list(domain % dminfo, &amp;
+                                ghostEdgeStart-1, nEdgesWithinHaloPerimeter(2), &amp;
+                                local_edge_list(1:ghostEdgeStart-1), local_edge_list, &amp;
+                                domain % blocklist % parinfo % halo1EdgesToSend, domain % blocklist % parinfo % halo1EdgesToRecv)
+
+      call mpas_dmpar_get_owner_list(domain % dminfo, &amp;
+                                ghostEdgeStart-1, nEdgesWithinHaloPerimeter(3), &amp;
+                                local_edge_list(1:ghostEdgeStart-1), local_edge_list, &amp;
+                                domain % blocklist % parinfo % halo2EdgesToSend, domain % blocklist % parinfo % halo2EdgesToRecv)
+
+      call mpas_dmpar_get_owner_list(domain % dminfo, &amp;
                                 ghostVertexStart-1, nlocal_vertices, &amp;
                                 local_vertex_list(1:ghostVertexStart-1), local_vertex_list, &amp;
                                 domain % blocklist % parinfo % verticesToSend, domain % blocklist % parinfo % verticesToRecv)
@@ -1022,30 +1039,6 @@
       domain % blocklist % mesh % nEdgesWithinHaloPerimeter = nEdgesWithinHaloPerimeter
 
 
-!CR:TODO: REMOVE THIS TEST CODE:
-!----------- Test Code ----------------
-
- if(1 == 2) then
-      write(*,*) nCellsHalo(1)
-      write(*,*) nCellsHalo(2)
-      write(*,*) nCellsHalo(3)
-
-      do i=1, nCellsHalo(3)
-         write(*,*) domain % blocklist % mesh % indexToCellID % array(i)
-      end do
- else
-      write(*,*) nOwnEdges
-      write(*,*) nEdgesWithinHaloPerimeter(1)
-      write(*,*) nEdgesWithinHaloPerimeter(2)
-      write(*,*) nEdgesWithinHaloPerimeter(3)

-      do i=1, nEdgesWithinHaloPerimeter(3)
-         write(*,*) domain % blocklist % mesh % indexToEdgeID % array(i)
-      end do
- end if
-!-------------------------------------
-
-
       !
       ! Deallocate fields, graphs, and other memory
       !

</font>
</pre>