<p><b>duda</b> 2009-12-15 16:35:01 -0700 (Tue, 15 Dec 2009)</p><p>Update and add comments to input module to clarify the<br>
process of reading fields, re-distributing fields, and<br>
determining halo regions for blocks.<br>
<br>
M swmodel/src/module_io_input.F<br>
</p><hr noshade><pre><font color="gray">Modified: trunk/swmodel/src/module_io_input.F
===================================================================
--- trunk/swmodel/src/module_io_input.F        2009-12-09 22:13:04 UTC (rev 81)
+++ trunk/swmodel/src/module_io_input.F        2009-12-15 23:35:01 UTC (rev 82)
@@ -103,8 +103,10 @@
!
#include "read_dims.inc"
+ !
! Determine the range of cells/edges/vertices that a processor will initially read
! from the input file
+ !
call dmpar_get_index_range(domain % dminfo, 1, nCells, readCellStart, readCellEnd)
nReadCells = readCellEnd - readCellStart + 1
@@ -120,9 +122,11 @@
!
- ! Allocate fields that we will need in order to ultimately work out which
- ! cells/edges/vertices are owned by each block, and which are ghost
+ ! Allocate and read fields that we will need in order to ultimately work out
+ ! which cells/edges/vertices are owned by each block, and which are ghost
!
+
+ ! Global cell indices
allocate(indexToCellIDField % ioinfo)
indexToCellIDField % ioinfo % fieldName = 'indexToCellID'
indexToCellIDField % ioinfo % start(1) = readCellStart
@@ -130,6 +134,7 @@
allocate(indexToCellIDField % array(nReadCells))
call io_input_field(input_obj, indexToCellIDField)
+ ! Global edge indices
allocate(indexToEdgeIDField % ioinfo)
indexToEdgeIDField % ioinfo % fieldName = 'indexToEdgeID'
indexToEdgeIDField % ioinfo % start(1) = readEdgeStart
@@ -137,6 +142,7 @@
allocate(indexToEdgeIDField % array(nReadEdges))
call io_input_field(input_obj, indexToEdgeIDField)
+ ! Global vertex indices
allocate(indexToVertexIDField % ioinfo)
indexToVertexIDField % ioinfo % fieldName = 'indexToVertexID'
indexToVertexIDField % ioinfo % start(1) = readVertexStart
@@ -144,6 +150,7 @@
allocate(indexToVertexIDField % array(nReadVertices))
call io_input_field(input_obj, indexToVertexIDField)
+ ! Number of cell/edges/vertices adjacent to each cell
allocate(nEdgesOnCellField % ioinfo)
nEdgesOnCellField % ioinfo % fieldName = 'nEdgesOnCell'
nEdgesOnCellField % ioinfo % start(1) = readCellStart
@@ -151,6 +158,7 @@
allocate(nEdgesOnCellField % array(nReadCells))
call io_input_field(input_obj, nEdgesOnCellField)
+ ! Global indices of cells adjacent to each cell
allocate(cellsOnCellField % ioinfo)
cellsOnCellField % ioinfo % fieldName = 'cellsOnCell'
cellsOnCellField % ioinfo % start(1) = 1
@@ -160,6 +168,7 @@
allocate(cellsOnCellField % array(maxEdges,nReadCells))
call io_input_field(input_obj, cellsOnCellField)
+ ! Global indices of edges adjacent to each cell
allocate(edgesOnCellField % ioinfo)
edgesOnCellField % ioinfo % fieldName = 'edgesOnCell'
edgesOnCellField % ioinfo % start(1) = 1
@@ -169,6 +178,7 @@
allocate(edgesOnCellField % array(maxEdges,nReadCells))
call io_input_field(input_obj, edgesOnCellField)
+ ! Global indices of vertices adjacent to each cell
allocate(verticesOnCellField % ioinfo)
verticesOnCellField % ioinfo % fieldName = 'verticesOnCell'
verticesOnCellField % ioinfo % start(1) = 1
@@ -178,6 +188,9 @@
allocate(verticesOnCellField % array(maxEdges,nReadCells))
call io_input_field(input_obj, verticesOnCellField)
+ ! Global indices of cells adjacent to each edge
+ ! used for determining which edges are owned by a block, where
+ ! iEdge is owned iff cellsOnEdge(1,iEdge) is an owned cell
allocate(cellsOnEdgeField % ioinfo)
cellsOnEdgeField % ioinfo % fieldName = 'cellsOnEdge'
cellsOnEdgeField % ioinfo % start(1) = 1
@@ -187,6 +200,9 @@
allocate(cellsOnEdgeField % array(2,nReadEdges))
call io_input_field(input_obj, cellsOnEdgeField)
+ ! Global indices of cells adjacent to each vertex
+ ! used for determining which vertices are owned by a block, where
+ ! iVtx is owned iff cellsOnVertex(1,iVtx) is an owned cell
allocate(cellsOnVertexField % ioinfo)
cellsOnVertexField % ioinfo % fieldName = 'cellsOnVertex'
cellsOnVertexField % ioinfo % start(1) = 1
@@ -198,8 +214,11 @@
!
- ! Set up a graph describing the connectivity for the cells that were read by
- ! this process
+ ! Set up a graph derived data type describing the connectivity for the cells
+ ! that were read by this process
+ ! A partial description is passed to the block decomp module by each process,
+ ! and the block decomp module returns with a list of global cell indices
+ ! that belong to the block on this process
!
partial_global_graph_info % nVertices = nReadCells
partial_global_graph_info % nVerticesTotal = nCells
@@ -215,10 +234,13 @@
! TODO: Ensure (by renaming or exchanging) that initial cell range on each proc is contiguous
+ ! This situation may occur when reading a restart file with cells/edges/vertices written
+ ! in a scrambled order
+
! Determine which cells are owned by this process
call block_decomp_cells_for_proc(domain % dminfo, partial_global_graph_info, local_cell_list)
-
+
deallocate(partial_global_graph_info % vertexID)
deallocate(partial_global_graph_info % nAdjacent)
deallocate(partial_global_graph_info % adjacencyList)
@@ -272,6 +294,7 @@
block_graph_0Halo % nAdjacent(:) = nEdgesOnCell_0Halo(:)
block_graph_0Halo % adjacencyList(:,:) = cellsOnCell_0Halo(:,:)
+ ! Get back a graph describing the owned cells plus the cells in the 1-halo
call block_decomp_add_halo(domain % dminfo, block_graph_0Halo, block_graph_1Halo)
@@ -302,6 +325,7 @@
block_graph_1Halo % nVertices = block_graph_1Halo % nVerticesTotal
block_graph_1Halo % ghostStart = block_graph_1Halo % nVerticesTotal + 1
+ ! Get back a graph describing the owned and 1-halo cells plus the cells in the 2-halo
call block_decomp_add_halo(domain % dminfo, block_graph_1Halo, block_graph_2Halo)
block_graph_2Halo % nVertices = block_graph_0Halo % nVertices
@@ -329,8 +353,8 @@
!
! Knowing which cells are in block and the 2-halo, we can exchange lists of which edges are
- ! on each cell and which vertices are on each cell between the processes that read these
- ! fields for each cell and the processes that own the cells
+ ! on each cell and which vertices are on each cell from the processes that read these
+ ! fields for each cell to the processes that own the cells
!
allocate(edgesOnCell_2Halo(maxEdges, block_graph_2Halo % nVerticesTotal))
allocate(verticesOnCell_2Halo(maxEdges, block_graph_2Halo % nVerticesTotal))
@@ -350,7 +374,7 @@
!
- ! Get a list of which edges and vertices are adjacent to cells in block
+ ! Get a list of which edges and vertices are adjacent to cells (including halo cells) in block
!
call block_decomp_all_edges_in_block(maxEdges, block_graph_2Halo % nVerticesTotal, block_graph_2Halo % nAdjacent, &
edgesOnCell_2Halo, nlocal_edges, local_edge_list)
@@ -391,7 +415,18 @@
block_graph_2Halo % vertexID(1:block_graph_2Halo % nVertices), &
3, nlocal_vertices, cellsOnVertex_2Halo, local_vertex_list, ghostVertexStart)
-
+
+ ! At this point, local_edge_list(1;ghostEdgeStart-1) contains all of the owned edges for this block
+ ! and local_edge_list(ghostEdgeStart:nlocal_edges) contains all of the ghost edges
+
+ ! At this point, local_vertex_list(1;ghostVertexStart-1) contains all of the owned vertices for this block
+ ! and local_vertex_list(ghostVertexStart:nlocal_vertices) contains all of the ghost vertices
+
+ ! Also, at this point, block_graph_2Halo % vertexID(1:block_graph_2Halo%nVertices) contains all of the owned
+ ! cells for this block, and block_graph_2Halo % vertexID(block_graph_2Halo%nVertices+1:block_graph_2Halo%nVerticesTotal)
+ ! contains all of the ghost cells
+
+
deallocate(sendEdgeList % list)
deallocate(sendEdgeList)
deallocate(recvEdgeList % list)
@@ -402,6 +437,13 @@
deallocate(recvVertexList % list)
deallocate(recvVertexList)
+
+ !
+ ! Knowing which edges/vertices are owned by this block and which are actually read
+ ! from the input or restart file, we can build exchange lists to perform
+ ! all-to-all field exchanges from process that reads a field to the processes that
+ ! need them
+ !
call dmpar_get_owner_list(domain % dminfo, &
size(indexToEdgeIDField % array), nlocal_edges, &
indexToEdgeIDField % array, local_edge_list, &
@@ -412,6 +454,11 @@
indexToVertexIDField % array, local_vertex_list, &
sendVertexList, recvVertexList)
+ !
+ ! Build ownership and exchange lists for vertical levels
+ ! Essentially, process 0 owns all vertical levels when reading and writing,
+ ! and it distributes them or gathers them to/from all other processes
+ !
if (domain % dminfo % my_proc_id == 0) then
allocate(local_vertlevel_list(nVertLevels))
do i=1,nVertLevels
@@ -452,6 +499,10 @@
else
input_obj % time = 1
+ !
+ ! If doing a restart, we need to decide which time slice to read from the
+ ! restart.nc file
+ !
if (input_obj % rdLocalTime <= 0) then
write(0,*) 'Error: Couldn''t find any times in restart file.'
call dmpar_abort(domain % dminfo)
@@ -486,12 +537,24 @@
end if
+
+ !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
+ ! Do the actual work of reading all fields in from the input or restart file
+ ! For each field:
+ ! 1) Each process reads a contiguous range of cell/edge/vertex indices, which
+ ! may not correspond with the cells/edges/vertices that are owned by the
+ ! process
+ ! 2) All processes then send the global indices that were read to the
+ ! processes that own those indices based on
+ ! {send,recv}{Cell,Edge,Vertex,VertLevel}List
+ !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
call read_and_distribute_fields(domain % dminfo, input_obj, domain % blocklist, &
readCellStart, nReadCells, readEdgeStart, nReadEdges, readVertexStart, nReadVertices, &
readVertLevelStart, nReadVertLevels, &
sendCellList, recvCellList, sendEdgeList, recvEdgeList, sendVertexList, recvVertexList, &
sendVertLevelList, recvVertLevelList)
+
call io_input_finalize(input_obj, domain % dminfo)
</font>
</pre>