<p><b>mpetersen@lanl.gov</b> 2010-10-19 15:01:25 -0600 (Tue, 19 Oct 2010)</p><p>Add maxLevelEdge and maxLevelVertex to Registry, initialize in new subroutine mpas_init_domain, add dmpar_exch_halo_field subroutines for integers.<br>
</p><hr noshade><pre><font color="gray">Modified: branches/ocean_projects/topography2_mrp/src/core_ocean/Registry
===================================================================
--- branches/ocean_projects/topography2_mrp/src/core_ocean/Registry        2010-10-19 17:51:22 UTC (rev 569)
+++ branches/ocean_projects/topography2_mrp/src/core_ocean/Registry        2010-10-19 21:01:25 UTC (rev 570)
@@ -119,11 +119,12 @@
var persistent real coeffs_reconstruct ( R3 maxEdges nCells ) 0 - coeffs_reconstruct mesh - -
# Arrays for z-level version of mpas-ocean
-var persistent integer maxLevelsCell ( nCells ) 0 iro kmaxCell mesh - -
-var persistent integer maxLevelsEdge ( nEdges ) 0 iro kmaxEdge mesh - -
+var persistent integer maxLevelCell ( nCells ) 0 iro maxLevelCell mesh - -
+var persistent integer maxLevelEdge ( nEdges ) 0 - maxLevelEdge mesh - -
+var persistent integer maxLevelVertex ( nVertices ) 0 - maxLevelVertex mesh - -
var persistent real hZLevel ( nVertLevels ) 0 iro hZLevel mesh - -
-var persistent real zMidZLevel ( nVertLevels ) 0 iro zMidZLevel mesh - -
-var persistent real zTopZLevel ( nVertLevelsP1 ) 0 iro zTopZLevel mesh - -
+var persistent real zMidZLevel ( nVertLevels ) 0 - zMidZLevel mesh - -
+var persistent real zTopZLevel ( nVertLevelsP1 ) 0 - zTopZLevel mesh - -
# Boundary conditions: read from input, saved in restart and written to output
var persistent integer boundaryEdge ( nVertLevels nEdges ) 0 iro boundaryEdge mesh - -
Modified: branches/ocean_projects/topography2_mrp/src/core_ocean/module_test_cases.F
===================================================================
--- branches/ocean_projects/topography2_mrp/src/core_ocean/module_test_cases.F        2010-10-19 17:51:22 UTC (rev 569)
+++ branches/ocean_projects/topography2_mrp/src/core_ocean/module_test_cases.F        2010-10-19 21:01:25 UTC (rev 570)
@@ -34,6 +34,9 @@
real (kind=RKIND) :: centerx, centery
integer :: nCells, nEdges, nVertices, nVertLevels
+ integer, dimension(:), pointer :: &
+ maxLevelCell, maxLevelEdge, maxLevelVertex
+
if (config_test_case == 0) then
write(0,*) 'Using initial conditions supplied in input file'
@@ -128,21 +131,19 @@
! These should eventually be in an input file. For now
! I just read them in from h(:,1).
hZLevel = h(:,1)
+
+ ! hZLevel should be in the grid.nc and restart.nc file
+ ! the following lines are in mpas_interface and should be
+ ! removed from here. mrp 101019
zTopZLevel(1) = 0.0
do iLevel = 1,nVertLevels
zMidZLevel(iLevel) = zTopZLevel(iLevel)-0.5*hZLevel(iLevel)
zTopZLevel(iLevel+1) = zTopZLevel(iLevel)- hZLevel(iLevel)
enddo
- if (config_vert_grid_type.eq.'isopycnal') then
- print *, ' Using isopycnal coordinates'
- elseif (config_vert_grid_type.eq.'zlevel') then
- print *, ' Using z-level coordinates'
- else
- print *, ' Incorrect choice of config_vert_grid_type:',&
- config_vert_grid_type
- call dmpar_abort(dminfo)
- endif
+ endif
+
+ if (config_vert_grid_type.eq.'zlevel') then
! Set tracers, if not done in grid.nc file
!tracers = 0.0
centerx = 1.0e6
@@ -156,12 +157,12 @@
! tracers(block_ptr % state % time_levs(1) % state % index_salinity,iLevel,iCell) = 1.4 + iLevel*0.6 ! salinity
! for x3, 25 layer test
- !tracers(block_ptr % state % time_levs(1) % state % index_temperature,iLevel,iCell) = 10.0 ! temperature
- !tracers(block_ptr % state % time_levs(1) % state % index_salinity,iLevel,iCell) = 1.4 + iLevel*0.6 ! salinity
+ tracers(block_ptr % state % time_levs(1) % state % index_temperature,iLevel,iCell) = 10.0 ! temperature
+ tracers(block_ptr % state % time_levs(1) % state % index_salinity,iLevel,iCell) = 1.4 + iLevel*0.6 ! salinity
- ! tracers(block_ptr % state % time_levs(1) % state % index_tracer1,iLevel,iCell) = 1.0
- ! tracers(block_ptr % state % time_levs(1) % state % index_tracer2,iLevel,iCell) = &
- ! (yCell(iCell)/4000.e3 + xCell(iCell)/2500.e3 )/2.0
+ tracers(block_ptr % state % time_levs(1) % state % index_tracer1,iLevel,iCell) = 1.0
+ tracers(block_ptr % state % time_levs(1) % state % index_tracer2,iLevel,iCell) = &
+ (yCell(iCell)/4000.e3 + xCell(iCell)/2500.e3 )/2.0
!tracers(block_ptr % state % time_levs(1) % state % index_tracer1,iLevel,iCell) = block_ptr % mesh % lonCell % array(iCell)
!tracers(block_ptr % state % time_levs(1) % state % index_tracer2,iLevel,iCell) = block_ptr % mesh % latCell % array(iCell)
@@ -184,6 +185,11 @@
endif
+!mrp temp, to agree with previous test
+!h(1,:) = 500.0
+!h(2,:) = 1250.0
+!h(3,:) = 3250.0
+
! print some diagnostics
print '(10a)', 'ilevel',&
' rho ',&
@@ -211,7 +217,6 @@
block_ptr => block_ptr % next
end do
-
end subroutine setup_sw_test_case
Modified: branches/ocean_projects/topography2_mrp/src/core_ocean/mpas_interface.F
===================================================================
--- branches/ocean_projects/topography2_mrp/src/core_ocean/mpas_interface.F        2010-10-19 17:51:22 UTC (rev 569)
+++ branches/ocean_projects/topography2_mrp/src/core_ocean/mpas_interface.F        2010-10-19 21:01:25 UTC (rev 570)
@@ -12,6 +12,154 @@
end subroutine mpas_setup_test_case
+subroutine mpas_init_domain(domain)
+! Initialize grid variables that are computed from the netcdf input file.
+
+ use grid_types
+ use configure
+ use constants
+ use dmpar
+
+ implicit none
+
+ type (domain_type), intent(inout) :: domain
+
+ integer :: i, iCell, iEdge, iVertex, iLevel
+ type (block_type), pointer :: block
+ type (dm_info) :: dminfo
+
+ integer :: iTracer, cell1, cell2, cell, k
+ real (kind=RKIND), dimension(:), pointer :: xCell,yCell, &
+ hZLevel, zMidZLevel, zTopZLevel, latCell,LonCell
+ real (kind=RKIND), dimension(:,:), pointer :: h, u, u_src, rho
+ real (kind=RKIND), dimension(:,:,:), pointer :: tracers
+ real (kind=RKIND) :: delta_rho, pi, latCenter, lonCenter, dist
+ real (kind=RKIND) :: centerx, centery
+ integer :: nCells, nEdges, nVertices, nVertLevels, vertexDegree
+
+ integer, dimension(:), pointer :: &
+ maxLevelCell, maxLevelEdge, maxLevelVertex
+ integer, dimension(:,:), pointer :: &
+ cellsOnEdge, cellsOnVertex, boundaryEdge
+
+ if (config_vert_grid_type.eq.'isopycnal') then
+ print *, ' Using isopycnal coordinates'
+ elseif (config_vert_grid_type.eq.'zlevel') then
+ print *, ' Using z-level coordinates'
+ else
+ print *, ' Incorrect choice of config_vert_grid_type:',&
+ config_vert_grid_type
+ call dmpar_abort(dminfo)
+ endif
+
+
+ ! Initialize z-level grid variables from h, read in from input file.
+ block => domain % blocklist
+ do while (associated(block))
+
+ hZLevel => block % mesh % hZLevel % array
+ zMidZLevel => block % mesh % zMidZLevel % array
+ zTopZLevel => block % mesh % zTopZLevel % array
+ maxLevelCell => block % mesh % maxLevelCell % array
+ maxLevelEdge => block % mesh % maxLevelEdge % array
+ maxLevelVertex => block % mesh % maxLevelVertex % array
+ cellsOnEdge => block % mesh % cellsOnEdge % array
+ cellsOnVertex => block % mesh % cellsOnVertex % array
+ boundaryEdge => block % mesh % boundaryEdge % array
+
+ nCells = block % mesh % nCells
+ nEdges = block % mesh % nEdges
+ nVertices = block % mesh % nVertices
+ nVertLevels = block % mesh % nVertLevels
+ vertexDegree = block % mesh % vertexDegree
+
+ if (config_vert_grid_type.eq.'zlevel') then
+
+ ! hZLevel should be in the grid.nc and restart.nc file
+
+ zTopZLevel(1) = 0.0
+ do iLevel = 1,nVertLevels
+ zMidZLevel(iLevel) = zTopZLevel(iLevel)-0.5*hZLevel(iLevel)
+ zTopZLevel(iLevel+1) = zTopZLevel(iLevel)- hZLevel(iLevel)
+ enddo
+
+ endif
+
+ ! Isopycnal grid uses all vertical cells
+ if (config_vert_grid_type.eq.'isopycnal') then
+ maxLevelCell = nVertLevels
+ endif
+
+ ! mrp insert topography mesa for testing
+ ! do iCell = 1,nCells
+ ! if (latCell(iCell)> 0.0/180.*3.14.and.&
+ ! latCell(iCell)< 30.0/180.*3.14.and. &
+ ! lonCell(iCell)>180.0/180.*3.14.and. &
+ ! lonCell(iCell)<210.0/180.*3.14) then
+ ! maxLevelCell(iCell) = 10
+ ! endif
+ ! enddo
+
+ ! maxLevelEdge is the minimum of the surrounding cells
+ do iEdge=1,nEdges
+ cell1 = cellsOnEdge(1,iEdge)
+ cell2 = cellsOnEdge(2,iEdge)
+
+ if (cell1 <= nCells .and. cell2 <= nCells) then
+ maxLevelEdge(iEdge) = &
+ min( maxLevelCell(cell1), &
+ maxLevelCell(cell2) )
+ else
+ maxLevelEdge(iEdge) = 0
+ endif
+
+ end do
+
+ ! set boundary edge
+ boundaryEdge=1
+ do iEdge=1,nEdges
+ boundaryEdge(1:maxLevelEdge(iEdge),iEdge)=0
+ end do
+
+ ! maxLevelVertex is the maximum of the surrounding cells
+ do iVertex = 1,nVertices
+ maxLevelVertex(iVertex) = 0
+ do i=1,vertexDegree
+ cell = cellsOnVertex(i,iVertex)
+ if (cell <= nCells) then
+ maxLevelVertex(iVertex) = &
+ max( maxLevelVertex(iVertex), &
+ maxLevelCell(cell) )
+ endif
+ end do
+ end do
+
+ block => block % next
+ end do
+
+ ! update halos for maxLevel variables
+ block => domain % blocklist
+ do while (associated(block))
+ maxLevelCell => block % mesh % maxLevelCell % array
+ maxLevelEdge => block % mesh % maxLevelEdge % array
+ maxLevelVertex => block % mesh % maxLevelVertex % array
+
+ call dmpar_exch_halo_field1dInteger(domain % dminfo, &
+ maxLevelCell, block % mesh % nCells, &
+ block % parinfo % cellsToSend, block % parinfo % cellsToRecv)
+ call dmpar_exch_halo_field1dInteger(domain % dminfo, &
+ maxLevelEdge, block % mesh % nEdges, &
+ block % parinfo % edgesToSend, block % parinfo % edgesToRecv)
+ call dmpar_exch_halo_field1dInteger(domain % dminfo, &
+ maxLevelVertex, block % mesh % nVertices, &
+ block % parinfo % verticesToSend, block % parinfo % verticesToRecv)
+
+ block => block % next
+ end do
+
+end subroutine mpas_init_domain
+
+
subroutine mpas_init(block, mesh, dt)
use grid_types
Modified: branches/ocean_projects/topography2_mrp/src/driver/mpas.F
===================================================================
--- branches/ocean_projects/topography2_mrp/src/driver/mpas.F        2010-10-19 17:51:22 UTC (rev 569)
+++ branches/ocean_projects/topography2_mrp/src/driver/mpas.F        2010-10-19 21:01:25 UTC (rev 570)
@@ -32,6 +32,8 @@
call input_state_for_domain(domain)
+ call mpas_init_domain(domain)
+
if (.not. config_do_restart) call mpas_setup_test_case(domain)
call timer_stop("initialize")
Modified: branches/ocean_projects/topography2_mrp/src/framework/module_dmpar.F
===================================================================
--- branches/ocean_projects/topography2_mrp/src/framework/module_dmpar.F        2010-10-19 17:51:22 UTC (rev 569)
+++ branches/ocean_projects/topography2_mrp/src/framework/module_dmpar.F        2010-10-19 21:01:25 UTC (rev 570)
@@ -4,6 +4,7 @@
#ifdef _MPI
include 'mpif.h'
+ integer, parameter :: MPI_INTEGERKIND = MPI_INTEGER
#if (RKIND == 8)
integer, parameter :: MPI_REALKIND = MPI_DOUBLE_PRECISION
@@ -64,7 +65,8 @@
dminfo % nprocs = mpi_size
dminfo % my_proc_id = mpi_rank
- write(0,'(a,i5,a,i5,a)') 'task ', mpi_rank, ' of ', mpi_size, ' is running'
+ write(0,'(a,i5,a,i5,a)') 'task ', mpi_rank, ' of ', mpi_size, &
+ ' is running'
call open_streams(dminfo % my_proc_id)
@@ -781,7 +783,8 @@
#else
if (nOwnedList /= nNeededList) then
- write(0,*) 'Error in dmpar_alltoall_field: For non-dmpar, arrayIn and arrayOut dims must match.'
+ write(0,*) 'Error in dmpar_alltoall_field: For non-dmpar, ', &
+ 'arrayIn and arrayOut dims must match.'
call dmpar_abort(dminfo)
else
arrayOut(1:nNeededList) = arrayIn(1:nOwnedList)
@@ -873,7 +876,8 @@
#else
if (nOwnedList /= nNeededList) then
- write(0,*) 'Error in dmpar_alltoall_field: For non-dmpar, arrayIn and arrayOut dims must match.'
+ write(0,*) 'Error in dmpar_alltoall_field: For non-dmpar, ', &
+ 'arrayIn and arrayOut dims must match.'
call dmpar_abort(dminfo)
else
arrayOut(:,1:nNeededList) = arrayIn(:,1:nOwnedList)
@@ -962,7 +966,8 @@
#else
if (nOwnedList /= nNeededList) then
- write(0,*) 'Error in dmpar_alltoall_field: For non-dmpar, arrayIn and arrayOut dims must match.'
+ write(0,*) 'Error in dmpar_alltoall_field: For non-dmpar, ', &
+ 'arrayIn and arrayOut dims must match.'
call dmpar_abort(dminfo)
else
arrayOut(1:nNeededList) = arrayIn(1:nOwnedList)
@@ -1054,7 +1059,8 @@
#else
if (nOwnedList /= nNeededList) then
- write(0,*) 'Error in dmpar_alltoall_field: For non-dmpar, arrayIn and arrayOut dims must match.'
+ write(0,*) 'Error in dmpar_alltoall_field: For non-dmpar, ', &
+ 'arrayIn and arrayOut dims must match.'
call dmpar_abort(dminfo)
else
arrayOut(:,1:nNeededList) = arrayIn(:,1:nOwnedList)
@@ -1146,7 +1152,8 @@
#else
if (nOwnedList /= nNeededList) then
- write(0,*) 'Error in dmpar_alltoall_field: For non-dmpar, arrayIn and arrayOut dims must match.'
+ write(0,*) 'Error in dmpar_alltoall_field: For non-dmpar, ', &
+ 'arrayIn and arrayOut dims must match.'
call dmpar_abort(dminfo)
else
arrayOut(:,:,1:nNeededList) = arrayIn(:,:,1:nOwnedList)
@@ -1198,7 +1205,8 @@
n = de-ds+1
if (n > nBuffer) then
- write(0,*) 'packSendBuf2dInteger: Not enough space in buffer to fit a single slice.'
+ write(0,*) 'packSendBuf2dInteger: Not enough space in buffer', &
+ ' to fit a single slice.'
return
end if
@@ -1217,6 +1225,45 @@
end subroutine packSendBuf2dInteger
+ subroutine packSendBuf3dInteger(d1s, d1e, d2s, d2e, nField, field, sendList, startPackIdx, nBuffer, buffer, nPacked, lastPackedIdx)
+
+ implicit none
+
+ integer, intent(in) :: d1s, d1e, d2s, d2e, nField, nBuffer, startPackIdx
+ integer, dimension(d1s:d1e,d2s:d2e,*), intent(in) :: field
+ type (exchange_list), intent(in) :: sendList
+ integer, dimension(nBuffer), intent(out) :: buffer
+ integer, intent(inout) :: nPacked, lastPackedIdx
+
+ integer :: i, j, k, n
+
+ n = (d1e-d1s+1) * (d2e-d2s+1)
+
+ if (n > nBuffer) then
+ write(0,*) 'packSendBuf2dInteger: Not enough space in buffer', &
+ ' to fit a single slice.'
+ return
+ end if
+
+ nPacked = 0
+ do i=startPackIdx, sendList % nlist
+ nPacked = nPacked + n
+ if (nPacked > nBuffer) then
+ nPacked = nPacked - n
+ lastPackedIdx = i - 1
+ return
+ end if
+ k = nPacked-n+1
+ do j=d2s,d2e
+ buffer(k:k+d1e-d1s) = field(d1s:d1e,j,sendList % list(i))
+ k = k + d1e-d1s+1
+ end do
+ end do
+ lastPackedIdx = sendList % nlist
+
+ end subroutine packSendBuf3dInteger
+
+
subroutine packSendBuf1dReal(nField, field, sendList, startPackIdx, nBuffer, buffer, nPacked, lastPackedIdx)
implicit none
@@ -1259,7 +1306,8 @@
n = de-ds+1
if (n > nBuffer) then
- write(0,*) 'packSendBuf2dReal: Not enough space in buffer to fit a single slice.'
+ write(0,*) 'packSendBuf2dInteger: Not enough space in buffer', &
+ ' to fit a single slice.'
return
end if
@@ -1293,7 +1341,8 @@
n = (d1e-d1s+1) * (d2e-d2s+1)
if (n > nBuffer) then
- write(0,*) 'packSendBuf2dReal: Not enough space in buffer to fit a single slice.'
+ write(0,*) 'packSendBuf2dInteger: Not enough space in buffer', &
+ ' to fit a single slice.'
return
end if
@@ -1372,6 +1421,40 @@
end subroutine unpackRecvBuf2dInteger
+ subroutine unpackRecvBuf3dInteger(d1s, d1e, d2s, d2e, nField, field, recvList, startUnpackIdx, nBuffer, buffer, &
+ nUnpacked, lastUnpackedIdx)
+
+ implicit none
+
+ integer, intent(in) :: d1s, d1e, d2s, d2e, nField, nBuffer, startUnpackIdx
+ integer, dimension(d1s:d1e,d2s:d2e,*), intent(inout) :: field
+ type (exchange_list), intent(in) :: recvList
+ integer, dimension(nBuffer), intent(in) :: buffer
+ integer, intent(inout) :: nUnpacked, lastUnpackedIdx
+
+ integer :: i, j, k, n
+
+ n = (d1e-d1s+1) * (d2e-d2s+1)
+
+ nUnpacked = 0
+ do i=startUnpackIdx, recvList % nlist
+ nUnpacked = nUnpacked + n
+ if (nUnpacked > nBuffer) then
+ nUnpacked = nUnpacked - n
+ lastUnpackedIdx = i - 1
+ return
+ end if
+ k = nUnpacked-n+1
+ do j=d2s,d2e
+ field(d1s:d1e,j,recvList % list(i)) = buffer(k:k+d1e-d1s)
+ k = k + d1e-d1s+1
+ end do
+ end do
+ lastUnpackedIdx = recvList % nlist
+
+ end subroutine unpackRecvBuf3dInteger
+
+
subroutine unpackRecvBuf1dReal(nField, field, recvList, startUnpackIdx, nBuffer, buffer, nUnpacked, lastUnpackedIdx)
implicit none
@@ -1462,6 +1545,196 @@
end subroutine unpackRecvBuf3dReal
+ subroutine dmpar_exch_halo_field1dInteger(dminfo, array, dim1, sendList, recvList)
+
+ implicit none
+
+ type (dm_info), intent(in) :: dminfo
+ integer, intent(in) :: dim1
+ integer, dimension(*), intent(inout) :: array
+ type (exchange_list), pointer :: sendList, recvList
+
+ type (exchange_list), pointer :: sendListPtr, recvListPtr
+ integer :: lastPackedIdx, lastUnpackedIdx, nPacked, nUnpacked
+ integer :: mpi_ierr
+
+#ifdef _MPI
+
+ recvListPtr => recvList
+ do while (associated(recvListPtr))
+ if (recvListPtr % procID /= dminfo % my_proc_id) then
+ allocate(recvListPtr % ibuffer(recvListPtr % nlist))
+ call MPI_Irecv(recvListPtr % ibuffer, recvListPtr % nlist, MPI_INTEGERKIND, &
+ recvListPtr % procID, recvListPtr % procID, dminfo % comm, recvListPtr % reqID, mpi_ierr)
+ end if
+ recvListPtr => recvListPtr % next
+ end do
+
+ sendListPtr => sendList
+ do while (associated(sendListPtr))
+ if (sendListPtr % procID /= dminfo % my_proc_id) then
+ allocate(sendListPtr % ibuffer(sendListPtr % nlist))
+ call packSendBuf1dInteger(dim1, array, sendListPtr, 1, sendListPtr % nlist, sendListPtr % ibuffer, nPacked, lastPackedIdx)
+ call MPI_Isend(sendListPtr % ibuffer, sendListPtr % nlist, MPI_INTEGERKIND, &
+ sendListPtr % procID, dminfo % my_proc_id, dminfo % comm, sendListPtr % reqID, mpi_ierr)
+ end if
+ sendListPtr => sendListPtr % next
+ end do
+
+ recvListPtr => recvList
+ do while (associated(recvListPtr))
+ if (recvListPtr % procID /= dminfo % my_proc_id) then
+ call MPI_Wait(recvListPtr % reqID, MPI_STATUS_IGNORE, mpi_ierr)
+ call unpackRecvBuf1dInteger(dim1, array, recvListPtr, 1, recvListPtr % nlist, recvListPtr % ibuffer, nUnpacked, lastUnpackedIdx)
+ deallocate(recvListPtr % ibuffer)
+ end if
+ recvListPtr => recvListPtr % next
+ end do
+
+ sendListPtr => sendList
+ do while (associated(sendListPtr))
+ if (sendListPtr % procID /= dminfo % my_proc_id) then
+ call MPI_Wait(sendListPtr % reqID, MPI_STATUS_IGNORE, mpi_ierr)
+ deallocate(sendListPtr % ibuffer)
+ end if
+ sendListPtr => sendListPtr % next
+ end do
+
+#endif
+
+ end subroutine dmpar_exch_halo_field1dInteger
+
+
+ subroutine dmpar_exch_halo_field2dInteger(dminfo, array, dim1, dim2, sendList, recvList)
+
+ implicit none
+
+ type (dm_info), intent(in) :: dminfo
+ integer, intent(in) :: dim1, dim2
+ integer, dimension(dim1,*), intent(inout) :: array
+ type (exchange_list), pointer :: sendList, recvList
+
+ type (exchange_list), pointer :: sendListPtr, recvListPtr
+ integer :: lastPackedIdx, lastUnpackedIdx, nPacked, nUnpacked
+ integer :: mpi_ierr
+ integer :: d2
+
+#ifdef _MPI
+
+ recvListPtr => recvList
+ do while (associated(recvListPtr))
+ if (recvListPtr % procID /= dminfo % my_proc_id) then
+ d2 = dim1 * recvListPtr % nlist
+ allocate(recvListPtr % ibuffer(d2))
+ call MPI_Irecv(recvListPtr % ibuffer, d2, MPI_INTEGERKIND, &
+ recvListPtr % procID, recvListPtr % procID, dminfo % comm, recvListPtr % reqID, mpi_ierr)
+ end if
+ recvListPtr => recvListPtr % next
+ end do
+
+ sendListPtr => sendList
+ do while (associated(sendListPtr))
+ if (sendListPtr % procID /= dminfo % my_proc_id) then
+ d2 = dim1 * sendListPtr % nlist
+ allocate(sendListPtr % ibuffer(d2))
+ call packSendBuf2dInteger(1, dim1, dim2, array, sendListPtr, 1, d2, sendListPtr % ibuffer, nPacked, lastPackedIdx)
+ call MPI_Isend(sendListPtr % ibuffer, d2, MPI_INTEGERKIND, &
+ sendListPtr % procID, dminfo % my_proc_id, dminfo % comm, sendListPtr % reqID, mpi_ierr)
+ end if
+ sendListPtr => sendListPtr % next
+ end do
+
+ recvListPtr => recvList
+ do while (associated(recvListPtr))
+ if (recvListPtr % procID /= dminfo % my_proc_id) then
+ call MPI_Wait(recvListPtr % reqID, MPI_STATUS_IGNORE, mpi_ierr)
+ d2 = dim1 * recvListPtr % nlist
+ call unpackRecvBuf2dInteger(1, dim1, dim2, array, recvListPtr, 1, d2, recvListPtr % ibuffer, nUnpacked, lastUnpackedIdx)
+ deallocate(recvListPtr % ibuffer)
+ end if
+ recvListPtr => recvListPtr % next
+ end do
+
+ sendListPtr => sendList
+ do while (associated(sendListPtr))
+ if (sendListPtr % procID /= dminfo % my_proc_id) then
+ call MPI_Wait(sendListPtr % reqID, MPI_STATUS_IGNORE, mpi_ierr)
+ deallocate(sendListPtr % ibuffer)
+ end if
+ sendListPtr => sendListPtr % next
+ end do
+
+#endif
+
+ end subroutine dmpar_exch_halo_field2dInteger
+
+
+ subroutine dmpar_exch_halo_field3dInteger(dminfo, array, dim1, dim2, dim3, sendList, recvList)
+
+ implicit none
+
+ type (dm_info), intent(in) :: dminfo
+ integer, intent(in) :: dim1, dim2, dim3
+ integer, dimension(dim1,dim2,*), intent(inout) :: array
+ type (exchange_list), pointer :: sendList, recvList
+
+ type (exchange_list), pointer :: sendListPtr, recvListPtr
+ integer :: lastPackedIdx, lastUnpackedIdx, nPacked, nUnpacked
+ integer :: mpi_ierr
+ integer :: d3
+
+#ifdef _MPI
+
+ recvListPtr => recvList
+ do while (associated(recvListPtr))
+ if (recvListPtr % procID /= dminfo % my_proc_id) then
+ d3 = dim1 * dim2 * recvListPtr % nlist
+ allocate(recvListPtr % ibuffer(d3))
+ call MPI_Irecv(recvListPtr % ibuffer, d3, MPI_INTEGERKIND, &
+ recvListPtr % procID, recvListPtr % procID, dminfo % comm, recvListPtr % reqID, mpi_ierr)
+ end if
+ recvListPtr => recvListPtr % next
+ end do
+
+ sendListPtr => sendList
+ do while (associated(sendListPtr))
+ if (sendListPtr % procID /= dminfo % my_proc_id) then
+ d3 = dim1 * dim2 * sendListPtr % nlist
+ allocate(sendListPtr % ibuffer(d3))
+ call packSendBuf3dInteger(1, dim1, 1, dim2, dim3, array, sendListPtr, 1, d3, &
+ sendListPtr % ibuffer, nPacked, lastPackedIdx)
+ call MPI_Isend(sendListPtr % ibuffer, d3, MPI_INTEGERKIND, &
+ sendListPtr % procID, dminfo % my_proc_id, dminfo % comm, sendListPtr % reqID, mpi_ierr)
+ end if
+ sendListPtr => sendListPtr % next
+ end do
+
+ recvListPtr => recvList
+ do while (associated(recvListPtr))
+ if (recvListPtr % procID /= dminfo % my_proc_id) then
+ call MPI_Wait(recvListPtr % reqID, MPI_STATUS_IGNORE, mpi_ierr)
+ d3 = dim1 * dim2 * recvListPtr % nlist
+ call unpackRecvBuf3dInteger(1, dim1, 1, dim2, dim3, array, recvListPtr, 1, d3, &
+ recvListPtr % ibuffer, nUnpacked, lastUnpackedIdx)
+ deallocate(recvListPtr % ibuffer)
+ end if
+ recvListPtr => recvListPtr % next
+ end do
+
+ sendListPtr => sendList
+ do while (associated(sendListPtr))
+ if (sendListPtr % procID /= dminfo % my_proc_id) then
+ call MPI_Wait(sendListPtr % reqID, MPI_STATUS_IGNORE, mpi_ierr)
+ deallocate(sendListPtr % ibuffer)
+ end if
+ sendListPtr => sendListPtr % next
+ end do
+
+#endif
+
+ end subroutine dmpar_exch_halo_field3dInteger
+
+
subroutine dmpar_exch_halo_field1dReal(dminfo, array, dim1, sendList, recvList)
implicit none
@@ -1651,5 +1924,5 @@
end subroutine dmpar_exch_halo_field3dReal
-
+
end module dmpar
</font>
</pre>