Merge branch 'master' of ssh://noisy/home/noisy3/OpenFOAM/OpenFOAM-dev

This commit is contained in:
andy 2009-10-26 12:18:35 +00:00
commit 6a62baae2c
17 changed files with 923 additions and 273 deletions

View File

@ -50,6 +50,7 @@ Description
#include "mathematicalConstants.H"
#include "PackedBoolList.H"
#include "SortableList.H"
#include "unitConversion.H"
using namespace Foam;
@ -467,7 +468,7 @@ int main(int argc, char *argv[])
scalar angle(readScalar(IStringStream(args.additionalArgs()[1])()));
bool overwrite = args.optionFound("overwrite");
scalar maxCos = Foam::cos(angle*180/constant::mathematical::pi);
scalar maxCos = Foam::cos(degToRad(angle));
Info<< "Merging:" << nl
<< " edges with length less than " << minLen << " meters" << nl

View File

@ -208,12 +208,13 @@ void readPoints
{
hasWarned = true;
WarningIn
IOWarningIn
(
"readPoints(IFstream&, label&, DynamicList<point>"
", DynamicList<label>&)"
", DynamicList<label>&)",
is
) << "Points not in order starting at point " << pointI
<< " at line " << is.lineNumber()
//<< " at line " << is.lineNumber()
//<< abort(FatalError);
<< endl;
}
@ -429,47 +430,53 @@ void readPatches
>> dofSet >> tempSet >> contactSet >> nFaces;
is.getLine(line);
patchNames.append(string::validate<word>(line));
word groupName = string::validate<word>(line);
Info<< "For facegroup " << group
<< " named " << patchNames[patchNames.size()-1]
Info<< "For group " << group
<< " named " << groupName
<< " trying to read " << nFaces << " patch face indices."
<< endl;
patchFaceIndices.append(labelList(0));
labelList& faceIndices = patchFaceIndices[patchFaceIndices.size()-1];
faceIndices.setSize(nFaces);
label faceI = 0;
labelList groupIndices(nFaces);
label groupType = -1;
nFaces = 0;
while (faceI < faceIndices.size())
while (nFaces < groupIndices.size())
{
is.getLine(line);
IStringStream lineStr(line);
// Read one (for last face) or two entries from line.
label nRead = 2;
if (faceI == faceIndices.size()-1)
if (nFaces == groupIndices.size()-1)
{
nRead = 1;
}
for (label i = 0; i < nRead; i++)
{
label typeCode, tag, nodeLeaf, component;
label tag, nodeLeaf, component;
lineStr >> typeCode >> tag >> nodeLeaf >> component;
lineStr >> groupType >> tag >> nodeLeaf >> component;
if (typeCode != 8)
{
FatalErrorIn("readPatches")
<< "When reading patches expect Entity Type Code 8"
<< nl << "At line " << is.lineNumber()
<< exit(FatalError);
}
faceIndices[faceI++] = tag;
groupIndices[nFaces++] = tag;
}
}
// Store
if (groupType == 8)
{
patchNames.append(groupName);
patchFaceIndices.append(groupIndices);
}
else
{
IOWarningIn("readPatches(..)", is)
<< "When reading patches expect entity type code 8"
<< nl << " Skipping group code " << groupType
<< endl;
}
}
patchNames.shrink();

View File

@ -0,0 +1,394 @@
-1
2411
1 0 0 0
2.0000000000000000E+02 3.0000000000000000E+02 0.0000000000000000E+00
2 0 0 0
2.0000000000000000E+02 3.0000000000000000E+02 2.1000000000000000E+02
3 0 0 0
2.0000000000000000E+02 0.0000000000000000E+00 0.0000000000000000E+00
4 0 0 0
2.0000000000000000E+02 0.0000000000000000E+00 2.1000000000000000E+02
5 0 0 0
0.0000000000000000E+00 3.0000000000000000E+02 0.0000000000000000E+00
6 0 0 0
0.0000000000000000E+00 3.0000000000000000E+02 2.1000000000000000E+02
7 0 0 0
0.0000000000000000E+00 0.0000000000000000E+00 0.0000000000000000E+00
8 0 0 0
0.0000000000000000E+00 0.0000000000000000E+00 2.1000000000000000E+02
9 0 0 0
0.0000000000000000E+00 1.0020000000000000E+02 2.1000000000000000E+02
10 0 0 0
0.0000000000000000E+00 2.0009999999999999E+02 2.1000000000000000E+02
11 0 0 0
1.0000000000000000E+02 3.0000000000000000E+02 2.1000000000000000E+02
12 0 0 0
2.0000000000000000E+02 1.0020000000000000E+02 2.1000000000000000E+02
13 0 0 0
2.0000000000000000E+02 2.0009999999999999E+02 2.1000000000000000E+02
14 0 0 0
1.0000000000000000E+02 0.0000000000000000E+00 2.1000000000000000E+02
15 0 0 0
0.0000000000000000E+00 1.0020000000000000E+02 0.0000000000000000E+00
16 0 0 0
0.0000000000000000E+00 2.0009999999999999E+02 0.0000000000000000E+00
17 0 0 0
1.0000000000000000E+02 3.0000000000000000E+02 0.0000000000000000E+00
18 0 0 0
2.0000000000000000E+02 1.0020000000000000E+02 0.0000000000000000E+00
19 0 0 0
2.0000000000000000E+02 2.0009999999999999E+02 0.0000000000000000E+00
20 0 0 0
1.0000000000000000E+02 0.0000000000000000E+00 0.0000000000000000E+00
21 0 0 0
2.0000000000000000E+02 3.0000000000000000E+02 1.0500000000000000E+02
22 0 0 0
0.0000000000000000E+00 3.0000000000000000E+02 1.0500000000000000E+02
23 0 0 0
2.0000000000000000E+02 0.0000000000000000E+00 1.0500000000000000E+02
24 0 0 0
0.0000000000000000E+00 0.0000000000000000E+00 1.0500000000000000E+02
25 0 0 0
9.6161727574445891E+01 1.3290789499900029E+02 2.1000000000000000E+02
26 0 0 0
8.0082880863556014E+01 2.2412763570985578E+02 2.1000000000000000E+02
27 0 0 0
9.6161727574240444E+01 1.3290789499883087E+02 0.0000000000000000E+00
28 0 0 0
8.0082880863392205E+01 2.2412763570989483E+02 0.0000000000000000E+00
29 0 0 0
2.0000000000000000E+02 1.5011560268321134E+02 1.0500000000592973E+02
30 0 0 0
0.0000000000000000E+00 1.5011560299451648E+02 1.0499999963389952E+02
31 0 0 0
9.9094099553883055E+01 7.2457929267250009E+01 1.0499999994714025E+02
-1
-1
2412
1 11 2 1 7 2
0 0 0
9 8
2 11 2 1 7 2
0 0 0
10 9
3 11 2 1 7 2
0 0 0
6 10
4 11 2 1 7 2
0 0 0
11 6
5 11 2 1 7 2
0 0 0
2 11
6 11 2 1 7 2
0 0 0
4 12
7 11 2 1 7 2
0 0 0
12 13
8 11 2 1 7 2
0 0 0
13 2
9 11 2 1 7 2
0 0 0
8 14
10 11 2 1 7 2
0 0 0
14 4
11 11 2 1 7 2
0 0 0
7 15
12 11 2 1 7 2
0 0 0
15 16
13 11 2 1 7 2
0 0 0
16 5
14 11 2 1 7 2
0 0 0
5 17
15 11 2 1 7 2
0 0 0
17 1
16 11 2 1 7 2
0 0 0
18 3
17 11 2 1 7 2
0 0 0
19 18
18 11 2 1 7 2
0 0 0
1 19
19 11 2 1 7 2
0 0 0
20 7
20 11 2 1 7 2
0 0 0
3 20
21 11 2 1 7 2
0 0 0
21 1
22 11 2 1 7 2
0 0 0
2 21
23 11 2 1 7 2
0 0 0
5 22
24 11 2 1 7 2
0 0 0
22 6
25 11 2 1 7 2
0 0 0
3 23
26 11 2 1 7 2
0 0 0
23 4
27 11 2 1 7 2
0 0 0
24 7
28 11 2 1 7 2
0 0 0
8 24
29 41 2 1 7 3
9 8 14
30 41 2 1 7 3
10 9 25
31 41 2 1 7 3
6 10 26
32 41 2 1 7 3
11 6 26
33 41 2 1 7 3
10 25 26
34 41 2 1 7 3
2 11 13
35 41 2 1 7 3
4 12 14
36 41 2 1 7 3
12 13 25
37 41 2 1 7 3
14 12 25
38 41 2 1 7 3
9 14 25
39 41 2 1 7 3
13 11 26
40 41 2 1 7 3
13 26 25
41 41 2 1 7 3
7 15 20
42 41 2 1 7 3
15 16 27
43 41 2 1 7 3
16 5 28
44 41 2 1 7 3
27 16 28
45 41 2 1 7 3
5 17 28
46 41 2 1 7 3
17 1 19
47 41 2 1 7 3
18 3 20
48 41 2 1 7 3
19 18 27
49 41 2 1 7 3
18 20 27
50 41 2 1 7 3
20 15 27
51 41 2 1 7 3
17 19 28
52 41 2 1 7 3
28 19 27
53 41 2 1 7 3
17 5 22
54 41 2 1 7 3
1 17 21
55 41 2 1 7 3
2 21 11
56 41 2 1 7 3
6 11 22
57 41 2 1 7 3
22 11 21
58 41 2 1 7 3
22 21 17
59 41 2 1 7 3
7 20 24
60 41 2 1 7 3
20 3 23
61 41 2 1 7 3
23 4 14
62 41 2 1 7 3
14 8 24
63 41 2 1 7 3
14 24 23
64 41 2 1 7 3
20 23 24
65 41 2 1 7 3
23 3 18
66 41 2 1 7 3
4 23 12
67 41 2 1 7 3
13 12 29
68 41 2 1 7 3
2 13 21
69 41 2 1 7 3
22 16 30
70 41 2 1 7 3
13 29 21
71 41 2 1 7 3
1 21 19
72 41 2 1 7 3
12 23 29
73 41 2 1 7 3
18 19 29
74 41 2 1 7 3
23 18 29
75 41 2 1 7 3
15 24 30
76 41 2 1 7 3
29 19 21
77 41 2 1 7 3
7 24 15
78 41 2 1 7 3
24 8 9
79 41 2 1 7 3
9 10 30
80 41 2 1 7 3
10 6 22
81 41 2 1 7 3
30 10 22
82 41 2 1 7 3
16 15 30
83 41 2 1 7 3
22 5 16
84 41 2 1 7 3
24 9 30
85 111 2 1 7 4
21 28 22 17
86 111 2 1 7 4
31 18 29 23
87 111 2 1 7 4
31 29 18 27
88 111 2 1 7 4
29 21 26 13
89 111 2 1 7 4
29 25 26 30
90 111 2 1 7 4
26 28 29 30
91 111 2 1 7 4
16 28 5 22
92 111 2 1 7 4
30 25 10 9
93 111 2 1 7 4
16 30 15 27
94 111 2 1 7 4
26 25 10 30
95 111 2 1 7 4
21 28 26 22
96 111 2 1 7 4
31 20 18 23
97 111 2 1 7 4
27 28 16 30
98 111 2 1 7 4
18 27 29 19
99 111 2 1 7 4
21 22 26 11
100 111 2 1 7 4
23 4 12 14
101 111 2 1 7 4
31 25 29 30
102 111 2 1 7 4
31 18 20 27
103 111 2 1 7 4
21 11 26 13
104 111 2 1 7 4
22 28 5 17
105 111 2 1 7 4
2 11 21 13
106 111 2 1 7 4
29 25 12 13
107 111 2 1 7 4
30 28 16 22
108 111 2 1 7 4
31 24 20 23
109 111 2 1 7 4
26 28 30 22
110 111 2 1 7 4
21 17 19 28
111 111 2 1 7 4
15 24 31 30
112 111 2 1 7 4
31 9 30 24
113 111 2 1 7 4
15 30 31 27
114 111 2 1 7 4
8 14 24 9
115 111 2 1 7 4
26 22 10 6
116 111 2 1 7 4
31 25 12 29
117 111 2 1 7 4
31 29 12 23
118 111 2 1 7 4
31 30 9 25
119 111 2 1 7 4
21 1 19 17
120 111 2 1 7 4
29 19 27 28
121 111 2 1 7 4
10 22 26 30
122 111 2 1 7 4
29 28 26 21
123 111 2 1 7 4
7 20 15 24
124 111 2 1 7 4
11 22 26 6
125 111 2 1 7 4
29 28 27 30
126 111 2 1 7 4
31 23 12 14
127 111 2 1 7 4
26 25 29 13
128 111 2 1 7 4
15 27 31 20
129 111 2 1 7 4
21 19 29 28
130 111 2 1 7 4
18 3 23 20
131 111 2 1 7 4
15 20 31 24
132 111 2 1 7 4
27 31 29 30
133 111 2 1 7 4
31 14 9 24
134 111 2 1 7 4
31 9 14 25
135 111 2 1 7 4
31 14 12 25
136 111 2 1 7 4
31 23 14 24
-1
-1
2467
0 0 0 0 0 0 0 10
outlet
8 70 0 0 8 67 0 0
8 68 0 0 8 71 0 0
8 76 0 0 8 66 0 0
8 72 0 0 8 73 0 0
8 74 0 0 8 65 0 0
1 0 0 0 0 0 0 12
inlet
8 36 0 0 8 30 0 0
8 31 0 0 8 37 0 0
8 34 0 0 8 38 0 0
8 33 0 0 8 35 0 0
8 32 0 0 8 29 0 0
8 39 0 0 8 40 0 0
2 0 0 0 0 0 0 12
nodes
7 2 0 0 7 4 0 0
7 6 0 0 7 8 0 0
7 9 0 0 7 10 0 0
7 11 0 0 7 12 0 0
7 13 0 0 7 14 0 0
7 25 0 0 7 26 0 0
-1

View File

@ -94,7 +94,8 @@ int main(int argc, char *argv[])
noFailedChecks += checkGeometry(mesh, allGeometry);
reduce(noFailedChecks, sumOp<label>());
// Note: no reduction in noFailedChecks necessary since is
// counter of checks, not counter of failed cells,faces etc.
if (noFailedChecks == 0)
{
@ -112,8 +113,6 @@ int main(int argc, char *argv[])
label nFailedChecks = checkGeometry(mesh, allGeometry);
reduce(nFailedChecks, sumOp<label>());
if (nFailedChecks)
{
Info<< "\nFailed " << nFailedChecks << " mesh checks.\n"

View File

@ -27,7 +27,7 @@ License
#include "mapDistribute.H"
#include "commSchedule.H"
#include "HashSet.H"
#include "ListOps.H"
#include "globalIndex.H"
// * * * * * * * * * * * * * Private Member Functions * * * * * * * * * * * //
@ -257,6 +257,292 @@ Foam::mapDistribute::mapDistribute
}
Foam::mapDistribute::mapDistribute
(
const globalIndex& globalNumbering,
labelList& elements,
List<Map<label> >& compactMap
)
:
constructSize_(0),
schedulePtr_()
{
// 1. Construct per processor compact addressing of the global elements
// needed. The ones from the local processor are not included since
// these are always all needed.
compactMap.setSize(Pstream::nProcs());
{
// Count all (non-local) elements needed. Just for presizing map.
labelList nNonLocal(Pstream::nProcs(), 0);
forAll(elements, i)
{
label globalIndex = elements[i];
if (!globalNumbering.isLocal(globalIndex))
{
label procI = globalNumbering.whichProcID(globalIndex);
nNonLocal[procI]++;
}
}
forAll(compactMap, procI)
{
if (procI != Pstream::myProcNo())
{
compactMap[procI].resize(2*nNonLocal[procI]);
}
}
// Collect all (non-local) elements needed.
forAll(elements, i)
{
label globalIndex = elements[i];
if (!globalNumbering.isLocal(globalIndex))
{
label procI = globalNumbering.whichProcID(globalIndex);
label index = globalNumbering.toLocal(procI, globalIndex);
label nCompact = compactMap[procI].size();
compactMap[procI].insert(index, nCompact);
}
}
//// Sort remote elements needed (not really necessary)
//forAll(compactMap, procI)
//{
// if (procI != Pstream::myProcNo())
// {
// Map<label>& globalMap = compactMap[procI];
//
// SortableList<label> sorted(globalMap.toc().xfer());
//
// forAll(sorted, i)
// {
// Map<label>::iterator iter = globalMap.find(sorted[i]);
// iter() = i;
// }
// }
//}
}
// 2. The overall compact addressing is
// - myProcNo data first (uncompacted)
// - all other processors consecutively
labelList compactStart(Pstream::nProcs());
compactStart[Pstream::myProcNo()] = 0;
constructSize_ = globalNumbering.localSize();
forAll(compactStart, procI)
{
if (procI != Pstream::myProcNo())
{
compactStart[procI] = constructSize_;
constructSize_ += compactMap[procI].size();
}
}
// 3. Find out what to receive/send in compact addressing.
// What I want to receive is what others have to send
labelListList wantedRemoteElements(Pstream::nProcs());
// Compact addressing for received data
constructMap_.setSize(Pstream::nProcs());
forAll(compactMap, procI)
{
if (procI == Pstream::myProcNo())
{
// All my own elements are used
label nLocal = globalNumbering.localSize();
wantedRemoteElements[procI] = identity(nLocal);
constructMap_[procI] = identity(nLocal);
}
else
{
// Remote elements wanted from processor procI
labelList& remoteElem = wantedRemoteElements[procI];
labelList& localElem = constructMap_[procI];
remoteElem.setSize(compactMap[procI].size());
localElem.setSize(compactMap[procI].size());
label i = 0;
forAllIter(Map<label>, compactMap[procI], iter)
{
remoteElem[i] = iter.key();
label compactI = compactStart[procI]+iter();
localElem[i] = compactI;
iter() = compactI;
i++;
}
}
}
subMap_.setSize(Pstream::nProcs());
exchange(wantedRemoteElements, subMap_);
// Renumber elements
forAll(elements, i)
{
elements[i] = renumber(globalNumbering, compactMap, elements[i]);
}
}
Foam::mapDistribute::mapDistribute
(
const globalIndex& globalNumbering,
labelListList& cellCells,
List<Map<label> >& compactMap
)
:
constructSize_(0),
schedulePtr_()
{
// 1. Construct per processor compact addressing of the global elements
// needed. The ones from the local processor are not included since
// these are always all needed.
compactMap.setSize(Pstream::nProcs());
{
// Count all (non-local) elements needed. Just for presizing map.
labelList nNonLocal(Pstream::nProcs(), 0);
forAll(cellCells, cellI)
{
const labelList& cCells = cellCells[cellI];
forAll(cCells, i)
{
label globalIndex = cCells[i];
if (!globalNumbering.isLocal(globalIndex))
{
label procI = globalNumbering.whichProcID(globalIndex);
nNonLocal[procI]++;
}
}
}
forAll(compactMap, procI)
{
if (procI != Pstream::myProcNo())
{
compactMap[procI].resize(2*nNonLocal[procI]);
}
}
// Collect all (non-local) elements needed.
// Collect all (non-local) elements needed.
forAll(cellCells, cellI)
{
const labelList& cCells = cellCells[cellI];
forAll(cCells, i)
{
label globalIndex = cCells[i];
if (!globalNumbering.isLocal(globalIndex))
{
label procI = globalNumbering.whichProcID(globalIndex);
label index = globalNumbering.toLocal(procI, globalIndex);
label nCompact = compactMap[procI].size();
compactMap[procI].insert(index, nCompact);
}
}
}
//// Sort remote elements needed (not really necessary)
//forAll(compactMap, procI)
//{
// if (procI != Pstream::myProcNo())
// {
// Map<label>& globalMap = compactMap[procI];
//
// SortableList<label> sorted(globalMap.toc().xfer());
//
// forAll(sorted, i)
// {
// Map<label>::iterator iter = globalMap.find(sorted[i]);
// iter() = i;
// }
// }
//}
}
// 2. The overall compact addressing is
// - myProcNo data first (uncompacted)
// - all other processors consecutively
labelList compactStart(Pstream::nProcs());
compactStart[Pstream::myProcNo()] = 0;
constructSize_ = globalNumbering.localSize();
forAll(compactStart, procI)
{
if (procI != Pstream::myProcNo())
{
compactStart[procI] = constructSize_;
constructSize_ += compactMap[procI].size();
}
}
// 3. Find out what to receive/send in compact addressing.
// What I want to receive is what others have to send
labelListList wantedRemoteElements(Pstream::nProcs());
// Compact addressing for received data
constructMap_.setSize(Pstream::nProcs());
forAll(compactMap, procI)
{
if (procI == Pstream::myProcNo())
{
// All my own elements are used
label nLocal = globalNumbering.localSize();
wantedRemoteElements[procI] = identity(nLocal);
constructMap_[procI] = identity(nLocal);
}
else
{
// Remote elements wanted from processor procI
labelList& remoteElem = wantedRemoteElements[procI];
labelList& localElem = constructMap_[procI];
remoteElem.setSize(compactMap[procI].size());
localElem.setSize(compactMap[procI].size());
label i = 0;
forAllIter(Map<label>, compactMap[procI], iter)
{
remoteElem[i] = iter.key();
label compactI = compactStart[procI]+iter();
localElem[i] = compactI;
iter() = compactI;
i++;
}
}
}
subMap_.setSize(Pstream::nProcs());
exchange(wantedRemoteElements, subMap_);
// Renumber elements
forAll(cellCells, cellI)
{
labelList& cCells = cellCells[cellI];
forAll(cCells, i)
{
cCells[i] = renumber(globalNumbering, compactMap, cCells[i]);
}
}
}
Foam::mapDistribute::mapDistribute(const mapDistribute& map)
:
constructSize_(map.constructSize_),
@ -266,7 +552,27 @@ Foam::mapDistribute::mapDistribute(const mapDistribute& map)
{}
// * * * * * * * * * * * * * Private Member Functions * * * * * * * * * * * //
// * * * * * * * * * * * * * * Member Functions * * * * * * * * * * * * * * //
Foam::label Foam::mapDistribute::renumber
(
const globalIndex& globalNumbering,
const List<Map<label> >& compactMap,
const label globalI
)
{
if (globalNumbering.isLocal(globalI))
{
return globalNumbering.toLocal(globalI);
}
else
{
label procI = globalNumbering.whichProcID(globalI);
label index = globalNumbering.toLocal(procI, globalI);
return compactMap[procI][index];
}
}
void Foam::mapDistribute::compact(const boolList& elemIsUsed)
{

View File

@ -39,6 +39,16 @@ Note:
Note2: number of items send on one processor have to equal the number
of items received on the other processor.
Constructors using compact numbering: all my own elements first
(whether used or not) followed by used-only remote elements.
So e.g 4 procs and on proc 1 the compact
table will first have all globalIndex.localSize() elements from proc1
followed by used-only elements of proc0, proc2, proc3.
The constructed mapDistribute sends the local elements from and
receives the remote elements into their compact position.
compactMap[procI] is the position of elements from procI in the compact
map. compactMap[myProcNo()] is empty since trivial addressing. The indices
into compactMap[procI] are local, not global, indices.
SourceFiles
mapDistribute.C
@ -52,6 +62,7 @@ SourceFiles
#include "labelPair.H"
#include "Pstream.H"
#include "boolList.H"
#include "Map.H"
// * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * //
@ -59,6 +70,7 @@ namespace Foam
{
class mapPolyMesh;
class globalIndex;
/*---------------------------------------------------------------------------*\
Class mapDistribute Declaration
@ -81,6 +93,15 @@ class mapDistribute
mutable autoPtr<List<labelPair> > schedulePtr_;
//- Exchange data. sendBuf[procI] : data to send to processor procI
// To be moved into Pstream.
template<class T>
static void exchange
(
const List<List<T> >& sendBuf,
List<List<T> >& recvBuf
);
public:
// Public classes
@ -123,6 +144,27 @@ public:
const labelList& recvProcs
);
//- Construct from list of (possibly) remote elements in globalIndex
// numbering. Determines compact numbering (see above) and
// distribute map to get data into this ordering and renumbers the
// elements to be in compact numbering.
mapDistribute
(
const globalIndex&,
labelList& elements,
List<Map<label> >& compactMap
);
//- Special variant that works with the into sorted into bins
// according to local indices. E.g. think cellCells where
// cellCells[localCellI] is a list of global cells
mapDistribute
(
const globalIndex&,
labelListList& cellCells,
List<Map<label> >& compactMap
);
//- Construct copy
mapDistribute(const mapDistribute&);
@ -180,6 +222,15 @@ public:
// Other
//- Helper for construct from globalIndex. Renumbers element
// (in globalIndex numbering) into compact indices.
static label renumber
(
const globalIndex&,
const List<Map<label> >& compactMap,
const label globalElement
);
//- Compact maps. Gets per field a bool whether it is used (locally)
// and works out itself what this side and sender side can remove
// from maps.

View File

@ -886,4 +886,78 @@ void Foam::mapDistribute::distribute
}
template<class T>
void Foam::mapDistribute::exchange
(
const List<List<T> >& sendBuf,
List<List<T> >& recvBuf
)
{
if (!contiguous<T>())
{
FatalErrorIn("mapDistribute::exchange(..)")
<< "Not contiguous" << exit(FatalError);
}
if (Pstream::parRun())
{
// Determine sizes
// ~~~~~~~~~~~~~~~
labelListList allNTrans(Pstream::nProcs());
allNTrans[Pstream::myProcNo()].setSize(Pstream::nProcs());
forAll(allNTrans, procI)
{
allNTrans[Pstream::myProcNo()][procI] = sendBuf[procI].size();
}
combineReduce(allNTrans, listEq());
// Set up receives
// ~~~~~~~~~~~~~~~
recvBuf.setSize(Pstream::nProcs());
forAll(recvBuf, procI)
{
if (procI != Pstream::myProcNo())
{
recvBuf[procI].setSize(allNTrans[procI][Pstream::myProcNo()]);
IPstream::read
(
Pstream::nonBlocking,
procI,
reinterpret_cast<char*>(recvBuf[procI].begin()),
recvBuf[procI].byteSize()
);
}
}
// Set up sends
// ~~~~~~~~~~~~
forAll(sendBuf, procI)
{
if (procI != Pstream::myProcNo())
{
OPstream::write
(
Pstream::nonBlocking,
procI,
reinterpret_cast<const char*>(sendBuf[procI].begin()),
sendBuf[procI].byteSize()
);
}
}
// Wait for completion
Pstream::waitRequests();
}
// Do myself
recvBuf[Pstream::myProcNo()] = sendBuf[Pstream::myProcNo()];
}
// ************************************************************************* //

View File

@ -55,7 +55,7 @@ class centredCPCCellToFaceStencilObject
public:
TypeName("centredCFCCellToFaceStencil");
TypeName("centredCPCCellToFaceStencil");
// Constructors

View File

@ -55,7 +55,7 @@ class centredFECCellToFaceStencilObject
public:
TypeName("centredCFCCellToFaceStencil");
TypeName("centredFECCellToFaceStencil");
// Constructors

View File

@ -55,7 +55,7 @@ class upwindCECCellToFaceStencilObject
public:
TypeName("upwindCFCCellToFaceStencil");
TypeName("upwindCECCellToFaceStencil");
// Constructors

View File

@ -55,7 +55,7 @@ class upwindCPCCellToFaceStencilObject
public:
TypeName("upwindCFCCellToFaceStencil");
TypeName("upwindCPCCellToFaceStencil");
// Constructors

View File

@ -55,7 +55,7 @@ class upwindFECCellToFaceStencilObject
public:
TypeName("upwindCFCCellToFaceStencil");
TypeName("upwindFECCellToFaceStencil");
// Constructors

View File

@ -93,202 +93,6 @@ void Foam::extendedCellToFaceStencil::writeStencilStats
}
Foam::autoPtr<Foam::mapDistribute>
Foam::extendedCellToFaceStencil::calcDistributeMap
(
const polyMesh& mesh,
const globalIndex& globalNumbering,
labelListList& faceStencil
)
{
// Convert stencil to schedule
// ~~~~~~~~~~~~~~~~~~~~~~~~~~~
// We now know what information we need from other processors. This needs
// to be converted into what information I need to send as well
// (mapDistribute)
// 1. Construct per processor compact addressing of the global cells
// needed. The ones from the local processor are not included since
// these are always all needed.
List<Map<label> > globalToProc(Pstream::nProcs());
{
const labelList& procPatchMap = mesh.globalData().procPatchMap();
const polyBoundaryMesh& patches = mesh.boundaryMesh();
// Presize with (as estimate) size of patch to neighbour.
forAll(procPatchMap, procI)
{
if (procPatchMap[procI] != -1)
{
globalToProc[procI].resize
(
patches[procPatchMap[procI]].size()
);
}
}
// Collect all (non-local) globalcells/faces needed.
forAll(faceStencil, faceI)
{
const labelList& stencilCells = faceStencil[faceI];
forAll(stencilCells, i)
{
label globalCellI = stencilCells[i];
label procI = globalNumbering.whichProcID(stencilCells[i]);
if (procI != Pstream::myProcNo())
{
label nCompact = globalToProc[procI].size();
globalToProc[procI].insert(globalCellI, nCompact);
}
}
}
// Sort global cells needed (not really necessary)
forAll(globalToProc, procI)
{
if (procI != Pstream::myProcNo())
{
Map<label>& globalMap = globalToProc[procI];
SortableList<label> sorted(globalMap.toc().xfer());
forAll(sorted, i)
{
Map<label>::iterator iter = globalMap.find(sorted[i]);
iter() = i;
}
}
}
//forAll(globalToProc, procI)
//{
// Pout<< "From processor:" << procI << " want cells/faces:" << endl;
// forAllConstIter(Map<label>, globalToProc[procI], iter)
// {
// Pout<< " global:" << iter.key()
// << " local:" << globalNumbering.toLocal(procI, iter.key())
// << endl;
// }
// Pout<< endl;
//}
}
// 2. The overall compact addressing is
// - myProcNo data first (uncompacted)
// - all other processors consecutively
labelList compactStart(Pstream::nProcs());
compactStart[Pstream::myProcNo()] = 0;
label nCompact = globalNumbering.localSize();
forAll(compactStart, procI)
{
if (procI != Pstream::myProcNo())
{
compactStart[procI] = nCompact;
nCompact += globalToProc[procI].size();
}
}
// 3. Find out what to receive/send in compact addressing.
labelListList recvCompact(Pstream::nProcs());
for (label procI = 0; procI < Pstream::nProcs(); procI++)
{
if (procI != Pstream::myProcNo())
{
labelList wantedGlobals(globalToProc[procI].size());
recvCompact[procI].setSize(globalToProc[procI].size());
label i = 0;
forAllConstIter(Map<label>, globalToProc[procI], iter)
{
wantedGlobals[i] = iter.key();
recvCompact[procI][i] = compactStart[procI]+iter();
i++;
}
// Send the global cell numbers I need from procI
OPstream str(Pstream::blocking, procI);
str << wantedGlobals;
}
else
{
recvCompact[procI] =
compactStart[procI]
+ identity(globalNumbering.localSize());
}
}
labelListList sendCompact(Pstream::nProcs());
for (label procI = 0; procI < Pstream::nProcs(); procI++)
{
if (procI != Pstream::myProcNo())
{
// See what neighbour wants to receive (= what I need to send)
IPstream str(Pstream::blocking, procI);
labelList globalCells(str);
labelList& procCompact = sendCompact[procI];
procCompact.setSize(globalCells.size());
// Convert from globalCells (all on my processor!) into compact
// addressing
forAll(globalCells, i)
{
label cellI = globalNumbering.toLocal(globalCells[i]);
procCompact[i] = compactStart[Pstream::myProcNo()]+cellI;
}
}
else
{
sendCompact[procI] = recvCompact[procI];
}
}
// Convert stencil to compact numbering
forAll(faceStencil, faceI)
{
labelList& stencilCells = faceStencil[faceI];
forAll(stencilCells, i)
{
label globalCellI = stencilCells[i];
label procI = globalNumbering.whichProcID(globalCellI);
if (procI != Pstream::myProcNo())
{
label localCompact = globalToProc[procI][globalCellI];
stencilCells[i] = compactStart[procI]+localCompact;
}
else
{
label localCompact = globalNumbering.toLocal(globalCellI);
stencilCells[i] = compactStart[procI]+localCompact;
}
}
}
// Constuct map for distribution of compact data.
autoPtr<mapDistribute> mapPtr
(
new mapDistribute
(
nCompact,
sendCompact.xfer(),
recvCompact.xfer()
)
);
return mapPtr;
}
// * * * * * * * * * * * * * * * * Constructors * * * * * * * * * * * * * * //
Foam::extendedCellToFaceStencil::extendedCellToFaceStencil(const polyMesh& mesh)

View File

@ -112,14 +112,6 @@ public:
// Member Functions
//- Calculate distribute map
static autoPtr<mapDistribute> calcDistributeMap
(
const polyMesh& mesh,
const globalIndex& globalNumbering,
labelListList& faceStencil
);
//- Use map to get the data into stencil order
template<class T>
static void collectData

View File

@ -35,16 +35,19 @@ Foam::extendedCentredCellToFaceStencil::extendedCentredCellToFaceStencil
const cellToFaceStencil& stencil
)
:
extendedCellToFaceStencil(stencil.mesh())
extendedCellToFaceStencil(stencil.mesh()),
stencil_(stencil)
{
stencil_ = stencil;
// Calculate distribute map (also renumbers elements in stencil)
mapPtr_ = calcDistributeMap
List<Map<label> > compactMap(Pstream::nProcs());
mapPtr_.reset
(
stencil.mesh(),
stencil.globalNumbering(),
stencil_
new mapDistribute
(
stencil.globalNumbering(),
stencil_,
compactMap
)
);
}

View File

@ -419,19 +419,32 @@ Foam::extendedUpwindCellToFaceStencil::extendedUpwindCellToFaceStencil
neiStencil_
);
ownMapPtr_ = calcDistributeMap
(
stencil.mesh(),
stencil.globalNumbering(),
ownStencil_
);
{
List<Map<label> > compactMap(Pstream::nProcs());
ownMapPtr_.reset
(
new mapDistribute
(
stencil.globalNumbering(),
ownStencil_,
compactMap
)
);
}
neiMapPtr_ = calcDistributeMap
(
stencil.mesh(),
stencil.globalNumbering(),
neiStencil_
);
{
List<Map<label> > compactMap(Pstream::nProcs());
neiMapPtr_.reset
(
new mapDistribute
(
stencil.globalNumbering(),
neiStencil_,
compactMap
)
);
}
// stencil now in compact form
if (pureUpwind_)
@ -515,12 +528,18 @@ Foam::extendedUpwindCellToFaceStencil::extendedUpwindCellToFaceStencil
ownStencil_ = stencil;
ownMapPtr_ = calcDistributeMap
(
stencil.mesh(),
stencil.globalNumbering(),
ownStencil_
);
{
List<Map<label> > compactMap(Pstream::nProcs());
ownMapPtr_.reset
(
new mapDistribute
(
stencil.globalNumbering(),
ownStencil_,
compactMap
)
);
}
const fvMesh& mesh = dynamic_cast<const fvMesh&>(stencil.mesh());

View File

@ -28,9 +28,6 @@ License
#include "extendedCentredFaceToCellStencil.H"
#include "faceToCellStencil.H"
// Only for access to calcDistributeMap <- needs to be moved out
#include "extendedCellToFaceStencil.H"
// * * * * * * * * * * * * * * * * Constructors * * * * * * * * * * * * * * //
Foam::extendedCentredFaceToCellStencil::extendedCentredFaceToCellStencil
@ -38,16 +35,19 @@ Foam::extendedCentredFaceToCellStencil::extendedCentredFaceToCellStencil
const faceToCellStencil& stencil
)
:
extendedFaceToCellStencil(stencil.mesh())
extendedFaceToCellStencil(stencil.mesh()),
stencil_(stencil)
{
stencil_ = stencil;
// Calculate distribute map (also renumbers elements in stencil)
mapPtr_ = extendedCellToFaceStencil::calcDistributeMap
List<Map<label> > compactMap(Pstream::nProcs());
mapPtr_.reset
(
stencil.mesh(),
stencil.globalNumbering(),
stencil_
new mapDistribute
(
stencil.globalNumbering(),
stencil_,
compactMap
)
);
}