Merge branch 'master' of ssh://noisy/home/noisy3/OpenFOAM/OpenFOAM-dev

This commit is contained in:
andy 2009-09-09 16:48:18 +01:00
commit 8af623cd1e
5 changed files with 81 additions and 18 deletions

View File

@ -222,13 +222,14 @@ labelList findBaffles(const polyMesh& mesh, const labelList& boundaryFaces)
int main(int argc, char *argv[])
{
# include "addRegionOption.H"
argList::validOptions.insert("split", "");
argList::validOptions.insert("overwrite", "");
argList::validOptions.insert("detectOnly", "");
# include "setRootCase.H"
# include "createTime.H"
runTime.functionObjects().off();
# include "createMesh.H"
# include "createNamedMesh.H"
const word oldInstance = mesh.pointsInstance();
bool split = args.optionFound("split");

View File

@ -22,6 +22,12 @@ numberOfSubdomains 4;
//- Keep owner and neighbour on same processor for faces in zones:
// preserveFaceZones (heater solid1 solid3);
//- Keep owner and neighbour on same processor for faces in patches:
// (makes sense only for cyclic patches)
//preservePatches (cyclic_left_right);
method scotch;
// method hierarchical;
// method simple;

View File

@ -45,6 +45,35 @@ void domainDecomposition::distributeCells()
labelHashSet sameProcFaces;
if (decompositionDict_.found("preservePatches"))
{
wordList pNames(decompositionDict_.lookup("preservePatches"));
Info<< "Keeping owner of faces in patches " << pNames
<< " on same processor. This only makes sense for cyclics." << endl;
const polyBoundaryMesh& patches = boundaryMesh();
forAll(pNames, i)
{
label patchI = patches.findPatchID(pNames[i]);
if (patchI == -1)
{
FatalErrorIn("domainDecomposition::distributeCells()")
<< "Unknown preservePatch " << pNames[i]
<< endl << "Valid patches are " << patches.names()
<< exit(FatalError);
}
const polyPatch& pp = patches[patchI];
forAll(pp, i)
{
sameProcFaces.insert(pp.start() + i);
}
}
}
if (decompositionDict_.found("preserveFaceZones"))
{
wordList zNames(decompositionDict_.lookup("preserveFaceZones"));

View File

@ -2661,10 +2661,22 @@ void Foam::autoLayerDriver::addLayers
{
const polyBoundaryMesh& patches = mesh.boundaryMesh();
Info<< nl
<< "patch faces layers avg thickness[m]" << nl
<< " near-wall overall" << nl
<< "----- ----- ------ --------- -------" << endl;
// Find maximum length of a patch name, for a nicer output
label maxPatchNameLen = 0;
forAll(meshMover.adaptPatchIDs(), i)
{
label patchI = meshMover.adaptPatchIDs()[i];
word patchName = patches[patchI].name();
maxPatchNameLen = max(maxPatchNameLen,label(patchName.size()));
}
Info<< nl
<< setf(ios_base::left) << setw(maxPatchNameLen) << "patch"
<< setw(0) << " faces layers avg thickness[m]" << nl
<< setf(ios_base::left) << setw(maxPatchNameLen) << " "
<< setw(0) << " near-wall overall" << nl
<< setf(ios_base::left) << setw(maxPatchNameLen) << "-----"
<< setw(0) << " ----- ------ --------- -------" << endl;
forAll(meshMover.adaptPatchIDs(), i)
{
@ -2704,18 +2716,24 @@ void Foam::autoLayerDriver::addLayers
label totNPoints = returnReduce(meshPoints.size(), sumOp<label>());
//reduce(maxThickness, maxOp<scalar>());
//reduce(minThickness, minOp<scalar>());
scalar avgThickness =
returnReduce(sumThickness, sumOp<scalar>())
/ totNPoints;
scalar avgNearWallThickness =
returnReduce(sumNearWallThickness, sumOp<scalar>())
/ totNPoints;
// For empty patches, totNPoints is 0.
scalar avgThickness = 0;
scalar avgNearWallThickness = 0;
Info<< setf(ios_base::left) << setw(19) << patches[patchI].name();
//Sout.unsetf(ios_base::left);
Info<< setprecision(3)
if (totNPoints > 0)
{
//reduce(maxThickness, maxOp<scalar>());
//reduce(minThickness, minOp<scalar>());
avgThickness =
returnReduce(sumThickness, sumOp<scalar>())
/ totNPoints;
avgNearWallThickness =
returnReduce(sumNearWallThickness, sumOp<scalar>())
/ totNPoints;
}
Info<< setf(ios_base::left) << setw(maxPatchNameLen)
<< patches[patchI].name() << setprecision(3)
<< " " << setw(8)
<< returnReduce(patches[patchI].size(), sumOp<scalar>())
<< " " << setw(6) << layerParams.numLayers()[patchI]

View File

@ -141,9 +141,18 @@ Foam::labelList Foam::decompositionMethod::decompose
const pointField& coarsePoints
)
{
scalarField coarseWeights(0);
// Decompose based on agglomerated points
labelList coarseDistribution(decompose(coarsePoints));
return decompose(fineToCoarse, coarsePoints, coarseWeights);
// Rework back into decomposition for original mesh_
labelList fineDistribution(fineToCoarse.size());
forAll(fineDistribution, i)
{
fineDistribution[i] = coarseDistribution[fineToCoarse[i]];
}
return fineDistribution;
}