new mapDistribute constructor; various fixes

This commit is contained in:
mattijs 2009-10-22 11:59:25 +01:00
parent 10dd3ea2a4
commit fe45863a11
13 changed files with 495 additions and 246 deletions

View File

@ -27,7 +27,7 @@ License
#include "mapDistribute.H"
#include "commSchedule.H"
#include "HashSet.H"
#include "ListOps.H"
#include "globalIndex.H"
// * * * * * * * * * * * * * Private Member Functions * * * * * * * * * * * //
@ -257,6 +257,292 @@ Foam::mapDistribute::mapDistribute
}
Foam::mapDistribute::mapDistribute
(
const globalIndex& globalNumbering,
labelList& elements,
List<Map<label> >& compactMap
)
:
constructSize_(0),
schedulePtr_()
{
// 1. Construct per processor compact addressing of the global elements
// needed. The ones from the local processor are not included since
// these are always all needed.
compactMap.setSize(Pstream::nProcs());
{
// Count all (non-local) elements needed. Just for presizing map.
labelList nNonLocal(Pstream::nProcs(), 0);
forAll(elements, i)
{
label globalIndex = elements[i];
if (!globalNumbering.isLocal(globalIndex))
{
label procI = globalNumbering.whichProcID(globalIndex);
nNonLocal[procI]++;
}
}
forAll(compactMap, procI)
{
if (procI != Pstream::myProcNo())
{
compactMap[procI].resize(2*nNonLocal[procI]);
}
}
// Collect all (non-local) elements needed.
forAll(elements, i)
{
label globalIndex = elements[i];
if (!globalNumbering.isLocal(globalIndex))
{
label procI = globalNumbering.whichProcID(globalIndex);
label index = globalNumbering.toLocal(procI, globalIndex);
label nCompact = compactMap[procI].size();
compactMap[procI].insert(index, nCompact);
}
}
//// Sort remote elements needed (not really necessary)
//forAll(compactMap, procI)
//{
// if (procI != Pstream::myProcNo())
// {
// Map<label>& globalMap = compactMap[procI];
//
// SortableList<label> sorted(globalMap.toc().xfer());
//
// forAll(sorted, i)
// {
// Map<label>::iterator iter = globalMap.find(sorted[i]);
// iter() = i;
// }
// }
//}
}
// 2. The overall compact addressing is
// - myProcNo data first (uncompacted)
// - all other processors consecutively
labelList compactStart(Pstream::nProcs());
compactStart[Pstream::myProcNo()] = 0;
constructSize_ = globalNumbering.localSize();
forAll(compactStart, procI)
{
if (procI != Pstream::myProcNo())
{
compactStart[procI] = constructSize_;
constructSize_ += compactMap[procI].size();
}
}
// 3. Find out what to receive/send in compact addressing.
// What I want to receive is what others have to send
labelListList wantedRemoteElements(Pstream::nProcs());
// Compact addressing for received data
constructMap_.setSize(Pstream::nProcs());
forAll(compactMap, procI)
{
if (procI == Pstream::myProcNo())
{
// All my own elements are used
label nLocal = globalNumbering.localSize();
wantedRemoteElements[procI] = identity(nLocal);
constructMap_[procI] = identity(nLocal);
}
else
{
// Remote elements wanted from processor procI
labelList& remoteElem = wantedRemoteElements[procI];
labelList& localElem = constructMap_[procI];
remoteElem.setSize(compactMap[procI].size());
localElem.setSize(compactMap[procI].size());
label i = 0;
forAllIter(Map<label>, compactMap[procI], iter)
{
remoteElem[i] = iter.key();
label compactI = compactStart[procI]+iter();
localElem[i] = compactI;
iter() = compactI;
i++;
}
}
}
subMap_.setSize(Pstream::nProcs());
exchange(wantedRemoteElements, subMap_);
// Renumber elements
forAll(elements, i)
{
elements[i] = renumber(globalNumbering, compactMap, elements[i]);
}
}
Foam::mapDistribute::mapDistribute
(
const globalIndex& globalNumbering,
labelListList& cellCells,
List<Map<label> >& compactMap
)
:
constructSize_(0),
schedulePtr_()
{
// 1. Construct per processor compact addressing of the global elements
// needed. The ones from the local processor are not included since
// these are always all needed.
compactMap.setSize(Pstream::nProcs());
{
// Count all (non-local) elements needed. Just for presizing map.
labelList nNonLocal(Pstream::nProcs(), 0);
forAll(cellCells, cellI)
{
const labelList& cCells = cellCells[cellI];
forAll(cCells, i)
{
label globalIndex = cCells[i];
if (!globalNumbering.isLocal(globalIndex))
{
label procI = globalNumbering.whichProcID(globalIndex);
nNonLocal[procI]++;
}
}
}
forAll(compactMap, procI)
{
if (procI != Pstream::myProcNo())
{
compactMap[procI].resize(2*nNonLocal[procI]);
}
}
// Collect all (non-local) elements needed.
// Collect all (non-local) elements needed.
forAll(cellCells, cellI)
{
const labelList& cCells = cellCells[cellI];
forAll(cCells, i)
{
label globalIndex = cCells[i];
if (!globalNumbering.isLocal(globalIndex))
{
label procI = globalNumbering.whichProcID(globalIndex);
label index = globalNumbering.toLocal(procI, globalIndex);
label nCompact = compactMap[procI].size();
compactMap[procI].insert(index, nCompact);
}
}
}
//// Sort remote elements needed (not really necessary)
//forAll(compactMap, procI)
//{
// if (procI != Pstream::myProcNo())
// {
// Map<label>& globalMap = compactMap[procI];
//
// SortableList<label> sorted(globalMap.toc().xfer());
//
// forAll(sorted, i)
// {
// Map<label>::iterator iter = globalMap.find(sorted[i]);
// iter() = i;
// }
// }
//}
}
// 2. The overall compact addressing is
// - myProcNo data first (uncompacted)
// - all other processors consecutively
labelList compactStart(Pstream::nProcs());
compactStart[Pstream::myProcNo()] = 0;
constructSize_ = globalNumbering.localSize();
forAll(compactStart, procI)
{
if (procI != Pstream::myProcNo())
{
compactStart[procI] = constructSize_;
constructSize_ += compactMap[procI].size();
}
}
// 3. Find out what to receive/send in compact addressing.
// What I want to receive is what others have to send
labelListList wantedRemoteElements(Pstream::nProcs());
// Compact addressing for received data
constructMap_.setSize(Pstream::nProcs());
forAll(compactMap, procI)
{
if (procI == Pstream::myProcNo())
{
// All my own elements are used
label nLocal = globalNumbering.localSize();
wantedRemoteElements[procI] = identity(nLocal);
constructMap_[procI] = identity(nLocal);
}
else
{
// Remote elements wanted from processor procI
labelList& remoteElem = wantedRemoteElements[procI];
labelList& localElem = constructMap_[procI];
remoteElem.setSize(compactMap[procI].size());
localElem.setSize(compactMap[procI].size());
label i = 0;
forAllIter(Map<label>, compactMap[procI], iter)
{
remoteElem[i] = iter.key();
label compactI = compactStart[procI]+iter();
localElem[i] = compactI;
iter() = compactI;
i++;
}
}
}
subMap_.setSize(Pstream::nProcs());
exchange(wantedRemoteElements, subMap_);
// Renumber elements
forAll(cellCells, cellI)
{
labelList& cCells = cellCells[cellI];
forAll(cCells, i)
{
cCells[i] = renumber(globalNumbering, compactMap, cCells[i]);
}
}
}
Foam::mapDistribute::mapDistribute(const mapDistribute& map)
:
constructSize_(map.constructSize_),
@ -266,7 +552,27 @@ Foam::mapDistribute::mapDistribute(const mapDistribute& map)
{}
// * * * * * * * * * * * * * Private Member Functions * * * * * * * * * * * //
// * * * * * * * * * * * * * * Member Functions * * * * * * * * * * * * * * //
Foam::label Foam::mapDistribute::renumber
(
const globalIndex& globalNumbering,
const List<Map<label> >& compactMap,
const label globalI
)
{
if (globalNumbering.isLocal(globalI))
{
return globalNumbering.toLocal(globalI);
}
else
{
label procI = globalNumbering.whichProcID(globalI);
label index = globalNumbering.toLocal(procI, globalI);
return compactMap[procI][index];
}
}
void Foam::mapDistribute::compact(const boolList& elemIsUsed)
{

View File

@ -39,6 +39,16 @@ Note:
Note2: number of items send on one processor have to equal the number
of items received on the other processor.
Constructors using compact numbering: all my own elements first
(whether used or not) followed by used-only remote elements.
So e.g 4 procs and on proc 1 the compact
table will first have all globalIndex.localSize() elements from proc1
followed by used-only elements of proc0, proc2, proc3.
The constructed mapDistribute sends the local elements from and
receives the remote elements into their compact position.
compactMap[procI] is the position of elements from procI in the compact
map. compactMap[myProcNo()] is empty since trivial addressing. The indices
into compactMap[procI] are local, not global, indices.
SourceFiles
mapDistribute.C
@ -52,6 +62,7 @@ SourceFiles
#include "labelPair.H"
#include "Pstream.H"
#include "boolList.H"
#include "Map.H"
// * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * //
@ -59,6 +70,7 @@ namespace Foam
{
class mapPolyMesh;
class globalIndex;
/*---------------------------------------------------------------------------*\
Class mapDistribute Declaration
@ -81,6 +93,15 @@ class mapDistribute
mutable autoPtr<List<labelPair> > schedulePtr_;
//- Exchange data. sendBuf[procI] : data to send to processor procI
// To be moved into Pstream.
template<class T>
static void exchange
(
const List<List<T> >& sendBuf,
List<List<T> >& recvBuf
);
public:
// Public classes
@ -123,6 +144,27 @@ public:
const labelList& recvProcs
);
//- Construct from list of (possibly) remote elements in globalIndex
// numbering. Determines compact numbering (see above) and
// distribute map to get data into this ordering and renumbers the
// elements to be in compact numbering.
mapDistribute
(
const globalIndex&,
labelList& elements,
List<Map<label> >& compactMap
);
//- Special variant that works with the into sorted into bins
// according to local indices. E.g. think cellCells where
// cellCells[localCellI] is a list of global cells
mapDistribute
(
const globalIndex&,
labelListList& cellCells,
List<Map<label> >& compactMap
);
//- Construct copy
mapDistribute(const mapDistribute&);
@ -180,6 +222,15 @@ public:
// Other
//- Helper for construct from globalIndex. Renumbers element
// (in globalIndex numbering) into compact indices.
static label renumber
(
const globalIndex&,
const List<Map<label> >& compactMap,
const label globalElement
);
//- Compact maps. Gets per field a bool whether it is used (locally)
// and works out itself what this side and sender side can remove
// from maps.

View File

@ -886,4 +886,78 @@ void Foam::mapDistribute::distribute
}
template<class T>
void Foam::mapDistribute::exchange
(
const List<List<T> >& sendBuf,
List<List<T> >& recvBuf
)
{
if (!contiguous<T>())
{
FatalErrorIn("mapDistribute::exchange(..)")
<< "Not contiguous" << exit(FatalError);
}
if (Pstream::parRun())
{
// Determine sizes
// ~~~~~~~~~~~~~~~
labelListList allNTrans(Pstream::nProcs());
allNTrans[Pstream::myProcNo()].setSize(Pstream::nProcs());
forAll(allNTrans, procI)
{
allNTrans[Pstream::myProcNo()][procI] = sendBuf[procI].size();
}
combineReduce(allNTrans, listEq());
// Set up receives
// ~~~~~~~~~~~~~~~
recvBuf.setSize(Pstream::nProcs());
forAll(recvBuf, procI)
{
if (procI != Pstream::myProcNo())
{
recvBuf[procI].setSize(allNTrans[procI][Pstream::myProcNo()]);
IPstream::read
(
Pstream::nonBlocking,
procI,
reinterpret_cast<char*>(recvBuf[procI].begin()),
recvBuf[procI].byteSize()
);
}
}
// Set up sends
// ~~~~~~~~~~~~
forAll(sendBuf, procI)
{
if (procI != Pstream::myProcNo())
{
OPstream::write
(
Pstream::nonBlocking,
procI,
reinterpret_cast<const char*>(sendBuf[procI].begin()),
sendBuf[procI].byteSize()
);
}
}
// Wait for completion
Pstream::waitRequests();
}
// Do myself
recvBuf[Pstream::myProcNo()] = sendBuf[Pstream::myProcNo()];
}
// ************************************************************************* //

View File

@ -55,7 +55,7 @@ class centredCPCCellToFaceStencilObject
public:
TypeName("centredCFCCellToFaceStencil");
TypeName("centredCPCCellToFaceStencil");
// Constructors

View File

@ -55,7 +55,7 @@ class centredFECCellToFaceStencilObject
public:
TypeName("centredCFCCellToFaceStencil");
TypeName("centredFECCellToFaceStencil");
// Constructors

View File

@ -55,7 +55,7 @@ class upwindCECCellToFaceStencilObject
public:
TypeName("upwindCFCCellToFaceStencil");
TypeName("upwindCECCellToFaceStencil");
// Constructors

View File

@ -55,7 +55,7 @@ class upwindCPCCellToFaceStencilObject
public:
TypeName("upwindCFCCellToFaceStencil");
TypeName("upwindCPCCellToFaceStencil");
// Constructors

View File

@ -55,7 +55,7 @@ class upwindFECCellToFaceStencilObject
public:
TypeName("upwindCFCCellToFaceStencil");
TypeName("upwindFECCellToFaceStencil");
// Constructors

View File

@ -93,202 +93,6 @@ void Foam::extendedCellToFaceStencil::writeStencilStats
}
Foam::autoPtr<Foam::mapDistribute>
Foam::extendedCellToFaceStencil::calcDistributeMap
(
const polyMesh& mesh,
const globalIndex& globalNumbering,
labelListList& faceStencil
)
{
// Convert stencil to schedule
// ~~~~~~~~~~~~~~~~~~~~~~~~~~~
// We now know what information we need from other processors. This needs
// to be converted into what information I need to send as well
// (mapDistribute)
// 1. Construct per processor compact addressing of the global cells
// needed. The ones from the local processor are not included since
// these are always all needed.
List<Map<label> > globalToProc(Pstream::nProcs());
{
const labelList& procPatchMap = mesh.globalData().procPatchMap();
const polyBoundaryMesh& patches = mesh.boundaryMesh();
// Presize with (as estimate) size of patch to neighbour.
forAll(procPatchMap, procI)
{
if (procPatchMap[procI] != -1)
{
globalToProc[procI].resize
(
patches[procPatchMap[procI]].size()
);
}
}
// Collect all (non-local) globalcells/faces needed.
forAll(faceStencil, faceI)
{
const labelList& stencilCells = faceStencil[faceI];
forAll(stencilCells, i)
{
label globalCellI = stencilCells[i];
label procI = globalNumbering.whichProcID(stencilCells[i]);
if (procI != Pstream::myProcNo())
{
label nCompact = globalToProc[procI].size();
globalToProc[procI].insert(globalCellI, nCompact);
}
}
}
// Sort global cells needed (not really necessary)
forAll(globalToProc, procI)
{
if (procI != Pstream::myProcNo())
{
Map<label>& globalMap = globalToProc[procI];
SortableList<label> sorted(globalMap.toc().xfer());
forAll(sorted, i)
{
Map<label>::iterator iter = globalMap.find(sorted[i]);
iter() = i;
}
}
}
//forAll(globalToProc, procI)
//{
// Pout<< "From processor:" << procI << " want cells/faces:" << endl;
// forAllConstIter(Map<label>, globalToProc[procI], iter)
// {
// Pout<< " global:" << iter.key()
// << " local:" << globalNumbering.toLocal(procI, iter.key())
// << endl;
// }
// Pout<< endl;
//}
}
// 2. The overall compact addressing is
// - myProcNo data first (uncompacted)
// - all other processors consecutively
labelList compactStart(Pstream::nProcs());
compactStart[Pstream::myProcNo()] = 0;
label nCompact = globalNumbering.localSize();
forAll(compactStart, procI)
{
if (procI != Pstream::myProcNo())
{
compactStart[procI] = nCompact;
nCompact += globalToProc[procI].size();
}
}
// 3. Find out what to receive/send in compact addressing.
labelListList recvCompact(Pstream::nProcs());
for (label procI = 0; procI < Pstream::nProcs(); procI++)
{
if (procI != Pstream::myProcNo())
{
labelList wantedGlobals(globalToProc[procI].size());
recvCompact[procI].setSize(globalToProc[procI].size());
label i = 0;
forAllConstIter(Map<label>, globalToProc[procI], iter)
{
wantedGlobals[i] = iter.key();
recvCompact[procI][i] = compactStart[procI]+iter();
i++;
}
// Send the global cell numbers I need from procI
OPstream str(Pstream::blocking, procI);
str << wantedGlobals;
}
else
{
recvCompact[procI] =
compactStart[procI]
+ identity(globalNumbering.localSize());
}
}
labelListList sendCompact(Pstream::nProcs());
for (label procI = 0; procI < Pstream::nProcs(); procI++)
{
if (procI != Pstream::myProcNo())
{
// See what neighbour wants to receive (= what I need to send)
IPstream str(Pstream::blocking, procI);
labelList globalCells(str);
labelList& procCompact = sendCompact[procI];
procCompact.setSize(globalCells.size());
// Convert from globalCells (all on my processor!) into compact
// addressing
forAll(globalCells, i)
{
label cellI = globalNumbering.toLocal(globalCells[i]);
procCompact[i] = compactStart[Pstream::myProcNo()]+cellI;
}
}
else
{
sendCompact[procI] = recvCompact[procI];
}
}
// Convert stencil to compact numbering
forAll(faceStencil, faceI)
{
labelList& stencilCells = faceStencil[faceI];
forAll(stencilCells, i)
{
label globalCellI = stencilCells[i];
label procI = globalNumbering.whichProcID(globalCellI);
if (procI != Pstream::myProcNo())
{
label localCompact = globalToProc[procI][globalCellI];
stencilCells[i] = compactStart[procI]+localCompact;
}
else
{
label localCompact = globalNumbering.toLocal(globalCellI);
stencilCells[i] = compactStart[procI]+localCompact;
}
}
}
// Constuct map for distribution of compact data.
autoPtr<mapDistribute> mapPtr
(
new mapDistribute
(
nCompact,
sendCompact.xfer(),
recvCompact.xfer()
)
);
return mapPtr;
}
// * * * * * * * * * * * * * * * * Constructors * * * * * * * * * * * * * * //
Foam::extendedCellToFaceStencil::extendedCellToFaceStencil(const polyMesh& mesh)

View File

@ -112,14 +112,6 @@ public:
// Member Functions
//- Calculate distribute map
static autoPtr<mapDistribute> calcDistributeMap
(
const polyMesh& mesh,
const globalIndex& globalNumbering,
labelListList& faceStencil
);
//- Use map to get the data into stencil order
template<class T>
static void collectData

View File

@ -35,16 +35,19 @@ Foam::extendedCentredCellToFaceStencil::extendedCentredCellToFaceStencil
const cellToFaceStencil& stencil
)
:
extendedCellToFaceStencil(stencil.mesh())
extendedCellToFaceStencil(stencil.mesh()),
stencil_(stencil)
{
stencil_ = stencil;
// Calculate distribute map (also renumbers elements in stencil)
mapPtr_ = calcDistributeMap
List<Map<label> > compactMap(Pstream::nProcs());
mapPtr_.reset
(
stencil.mesh(),
stencil.globalNumbering(),
stencil_
new mapDistribute
(
stencil.globalNumbering(),
stencil_,
compactMap
)
);
}

View File

@ -419,19 +419,32 @@ Foam::extendedUpwindCellToFaceStencil::extendedUpwindCellToFaceStencil
neiStencil_
);
ownMapPtr_ = calcDistributeMap
(
stencil.mesh(),
stencil.globalNumbering(),
ownStencil_
);
{
List<Map<label> > compactMap(Pstream::nProcs());
ownMapPtr_.reset
(
new mapDistribute
(
stencil.globalNumbering(),
ownStencil_,
compactMap
)
);
}
neiMapPtr_ = calcDistributeMap
(
stencil.mesh(),
stencil.globalNumbering(),
neiStencil_
);
{
List<Map<label> > compactMap(Pstream::nProcs());
neiMapPtr_.reset
(
new mapDistribute
(
stencil.globalNumbering(),
neiStencil_,
compactMap
)
);
}
// stencil now in compact form
if (pureUpwind_)
@ -515,12 +528,18 @@ Foam::extendedUpwindCellToFaceStencil::extendedUpwindCellToFaceStencil
ownStencil_ = stencil;
ownMapPtr_ = calcDistributeMap
(
stencil.mesh(),
stencil.globalNumbering(),
ownStencil_
);
{
List<Map<label> > compactMap(Pstream::nProcs());
ownMapPtr_.reset
(
new mapDistribute
(
stencil.globalNumbering(),
ownStencil_,
compactMap
)
);
}
const fvMesh& mesh = dynamic_cast<const fvMesh&>(stencil.mesh());

View File

@ -28,9 +28,6 @@ License
#include "extendedCentredFaceToCellStencil.H"
#include "faceToCellStencil.H"
// Only for access to calcDistributeMap <- needs to be moved out
#include "extendedCellToFaceStencil.H"
// * * * * * * * * * * * * * * * * Constructors * * * * * * * * * * * * * * //
Foam::extendedCentredFaceToCellStencil::extendedCentredFaceToCellStencil
@ -38,16 +35,19 @@ Foam::extendedCentredFaceToCellStencil::extendedCentredFaceToCellStencil
const faceToCellStencil& stencil
)
:
extendedFaceToCellStencil(stencil.mesh())
extendedFaceToCellStencil(stencil.mesh()),
stencil_(stencil)
{
stencil_ = stencil;
// Calculate distribute map (also renumbers elements in stencil)
mapPtr_ = extendedCellToFaceStencil::calcDistributeMap
List<Map<label> > compactMap(Pstream::nProcs());
mapPtr_.reset
(
stencil.mesh(),
stencil.globalNumbering(),
stencil_
new mapDistribute
(
stencil.globalNumbering(),
stencil_,
compactMap
)
);
}