Merge branch 'develop' of develop.openfoam.com:Development/OpenFOAM-plus into develop

This commit is contained in:
Andrew Heather 2016-11-24 12:05:24 +00:00
commit b7ea6ee24e
18 changed files with 356 additions and 124 deletions

View File

@ -901,33 +901,42 @@ int main(int argc, char *argv[])
// Read decomposePar dictionary
dictionary decomposeDict;
if (Pstream::parRun())
{
if (Pstream::parRun())
{
fileName decompDictFile;
args.optionReadIfPresent("decomposeParDict", decompDictFile);
fileName decompDictFile;
args.optionReadIfPresent("decomposeParDict", decompDictFile);
decomposeDict = IOdictionary
// A demand-driven decompositionMethod can have issues finding
// an alternative decomposeParDict location.
IOdictionary* dictPtr = new IOdictionary
(
decompositionModel::selectIO
(
decompositionModel::selectIO
IOobject
(
IOobject
(
"decomposeParDict",
runTime.system(),
mesh,
IOobject::MUST_READ_IF_MODIFIED,
IOobject::NO_WRITE
),
decompDictFile
)
);
}
else
{
decomposeDict.add("method", "none");
decomposeDict.add("numberOfSubdomains", 1);
}
"decomposeParDict",
runTime.system(),
runTime,
IOobject::MUST_READ,
IOobject::NO_WRITE
),
decompDictFile
)
);
// Store it on the object registry, but to be found it must also
// have the expected "decomposeParDict" name.
dictPtr->rename("decomposeParDict");
runTime.store(dictPtr);
decomposeDict = *dictPtr;
}
else
{
decomposeDict.add("method", "none");
decomposeDict.add("numberOfSubdomains", 1);
}

View File

@ -3,7 +3,7 @@
\\ / F ield | OpenFOAM: The Open Source CFD Toolbox
\\ / O peration |
\\ / A nd | Copyright (C) 2011-2016 OpenFOAM Foundation
\\/ M anipulation |
\\/ M anipulation | Copyright (C) 2016 OpenCFD Ltd.
-------------------------------------------------------------------------------
License
This file is part of OpenFOAM.
@ -28,15 +28,31 @@ Group
grpMeshManipulationUtilities
Description
Detects faces that share points (baffles). Either merge them or
Detects boundary faces that share points (baffles). Either merges them or
duplicate the points.
Notes:
Usage
\b mergeOrSplitBaffles [OPTION]
Options:
- \par -detect
Detect baffles and write to faceSet duplicateFaces.
- \par -merge
Detect baffles and convert to internal faces.
- \par -split
Detect baffles and duplicate the points (used so the two sides
can move independently)
- \par -dict \<dictionary\>
Specify a dictionary to read actions from.
Note
- can only handle pairwise boundary faces. So three faces using
the same points is not handled (is illegal mesh anyway)
- there is no option to only split/merge some baffles.
- surfaces consisting of duplicate faces can be topologically split
if the points on the interior of the surface cannot walk to all the
cells that use them in one go.
@ -71,6 +87,7 @@ using namespace Foam;
void insertDuplicateMerge
(
const polyMesh& mesh,
const labelList& boundaryFaces,
const labelList& duplicates,
polyTopoChange& meshMod
)
@ -87,8 +104,8 @@ void insertDuplicateMerge
{
// Two duplicate faces. Merge.
label face0 = mesh.nInternalFaces() + bFacei;
label face1 = mesh.nInternalFaces() + otherFacei;
label face0 = boundaryFaces[bFacei];
label face1 = boundaryFaces[otherFacei];
label own0 = faceOwner[face0];
label own1 = faceOwner[face1];
@ -156,6 +173,45 @@ void insertDuplicateMerge
}
label patchSize(const polyMesh& mesh, const labelList& patchIDs)
{
const polyBoundaryMesh& patches = mesh.boundaryMesh();
label sz = 0;
forAll(patchIDs, i)
{
const polyPatch& pp = patches[patchIDs[i]];
sz += pp.size();
}
return sz;
}
labelList patchFaces(const polyMesh& mesh, const labelList& patchIDs)
{
const polyBoundaryMesh& patches = mesh.boundaryMesh();
labelList faceIDs(patchSize(mesh, patchIDs));
label sz = 0;
forAll(patchIDs, i)
{
const polyPatch& pp = patches[patchIDs[i]];
forAll(pp, ppi)
{
faceIDs[sz++] = pp.start()+ppi;
}
}
if (faceIDs.size() != sz)
{
FatalErrorInFunction << exit(FatalError);
}
return faceIDs;
}
labelList findBaffles(const polyMesh& mesh, const labelList& boundaryFaces)
{
// Get all duplicate face labels (in boundaryFaces indices!).
@ -173,7 +229,7 @@ labelList findBaffles(const polyMesh& mesh, const labelList& boundaryFaces)
{
if (duplicates[bFacei] != -1)
{
label facei = mesh.nInternalFaces() + bFacei;
label facei = boundaryFaces[bFacei];
label patchi = patches.whichPatch(facei);
if (isA<processorPolyPatch>(patches[patchi]))
@ -205,12 +261,12 @@ labelList findBaffles(const polyMesh& mesh, const labelList& boundaryFaces)
if (otherFacei != -1 && otherFacei > bFacei)
{
duplicateSet.insert(mesh.nInternalFaces() + bFacei);
duplicateSet.insert(mesh.nInternalFaces() + otherFacei);
duplicateSet.insert(boundaryFaces[bFacei]);
duplicateSet.insert(boundaryFaces[otherFacei]);
}
}
Pout<< "Writing " << duplicateSet.size()
Info<< "Writing " << returnReduce(duplicateSet.size(), sumOp<label>())
<< " duplicate faces to faceSet " << duplicateSet.objectPath()
<< nl << endl;
duplicateSet.write();
@ -220,8 +276,6 @@ labelList findBaffles(const polyMesh& mesh, const labelList& boundaryFaces)
}
int main(int argc, char *argv[])
{
argList::addNote
@ -232,6 +286,7 @@ int main(int argc, char *argv[])
#include "addOverwriteOption.H"
#include "addRegionOption.H"
#include "addDictOption.H"
argList::addBoolOption
(
"detectOnly",
@ -249,25 +304,89 @@ int main(int argc, char *argv[])
#include "createNamedMesh.H"
const word oldInstance = mesh.pointsInstance();
const polyBoundaryMesh& patches = mesh.boundaryMesh();
const bool readDict = args.optionFound("dict");
const bool split = args.optionFound("split");
const bool overwrite = args.optionFound("overwrite");
const bool detectOnly = args.optionFound("detectOnly");
// Collect all boundary faces
labelList boundaryFaces(mesh.nFaces() - mesh.nInternalFaces());
forAll(boundaryFaces, i)
if (readDict && (split || detectOnly))
{
boundaryFaces[i] = i+mesh.nInternalFaces();
FatalErrorInFunction
<< "Use of dictionary for settings not compatible with"
<< " using command line arguments for \"split\""
<< " or \"detectOnly\"" << exit(FatalError);
}
if (detectOnly)
labelList detectPatchIDs;
labelList splitPatchIDs;
labelList mergePatchIDs;
if (readDict)
{
findBaffles(mesh, boundaryFaces);
return 0;
const word dictName;
#include "setSystemMeshDictionaryIO.H"
Info<< "Reading " << dictName << "\n" << endl;
IOdictionary dict(dictIO);
if (dict.found("detect"))
{
wordReList patchNames(dict.subDict("detect").lookup("patches"));
detectPatchIDs = patches.patchSet(patchNames).sortedToc();
Info<< "Detecting baffles on " << detectPatchIDs.size()
<< " patches with "
<< returnReduce(patchSize(mesh, detectPatchIDs), sumOp<label>())
<< " faces" << endl;
}
if (dict.found("merge"))
{
wordReList patchNames(dict.subDict("merge").lookup("patches"));
mergePatchIDs = patches.patchSet(patchNames).sortedToc();
Info<< "Detecting baffles on " << mergePatchIDs.size()
<< " patches with "
<< returnReduce(patchSize(mesh, mergePatchIDs), sumOp<label>())
<< " faces" << endl;
}
if (dict.found("split"))
{
wordReList patchNames(dict.subDict("split").lookup("patches"));
splitPatchIDs = patches.patchSet(patchNames).sortedToc();
Info<< "Detecting baffles on " << splitPatchIDs.size()
<< " patches with "
<< returnReduce(patchSize(mesh, splitPatchIDs), sumOp<label>())
<< " faces" << endl;
}
}
else
{
if (detectOnly)
{
detectPatchIDs = identity(patches.size());
}
else if (split)
{
splitPatchIDs = identity(patches.size());
}
else
{
mergePatchIDs = identity(patches.size());
}
}
if (detectPatchIDs.size())
{
findBaffles(mesh, patchFaces(mesh, detectPatchIDs));
if (detectOnly)
{
return 0;
}
}
// Read objects in time directory
@ -308,64 +427,118 @@ int main(int argc, char *argv[])
ReadFields(mesh, objects, stFlds);
// Mesh change engine
polyTopoChange meshMod(mesh);
if (split)
if (mergePatchIDs.size())
{
Pout<< "Topologically splitting duplicate surfaces"
<< ", i.e. duplicating points internal to duplicate surfaces."
Info<< "Merging duplicate faces" << nl << endl;
// Mesh change engine
polyTopoChange meshMod(mesh);
const labelList boundaryFaces(patchFaces(mesh, mergePatchIDs));
// Get all duplicate face pairs (in boundaryFaces indices!).
labelList duplicates(findBaffles(mesh, boundaryFaces));
// Merge into internal faces.
insertDuplicateMerge(mesh, boundaryFaces, duplicates, meshMod);
if (!overwrite)
{
runTime++;
}
// Change the mesh. No inflation.
autoPtr<mapPolyMesh> map = meshMod.changeMesh(mesh, false);
// Update fields
mesh.updateMesh(map);
// Move mesh (since morphing does not do this)
if (map().hasMotionPoints())
{
mesh.movePoints(map().preMotionPoints());
}
if (overwrite)
{
mesh.setInstance(oldInstance);
}
Info<< "Writing mesh to time " << runTime.timeName() << endl;
mesh.write();
}
if (splitPatchIDs.size())
{
Info<< "Topologically splitting duplicate surfaces"
<< ", i.e. duplicating points internal to duplicate surfaces"
<< nl << endl;
// Determine points on split patches
DynamicList<label> candidates;
{
label sz = 0;
forAll(splitPatchIDs, i)
{
sz += patches[splitPatchIDs[i]].nPoints();
}
candidates.setCapacity(sz);
PackedBoolList isCandidate(mesh.nPoints());
forAll(splitPatchIDs, i)
{
const labelList& mp = patches[splitPatchIDs[i]].meshPoints();
forAll(mp, mpi)
{
label pointi = mp[mpi];
if (isCandidate.set(pointi))
{
candidates.append(pointi);
}
}
}
}
// Analyse which points need to be duplicated
localPointRegion regionSide(mesh);
localPointRegion regionSide(mesh, candidates);
// Point duplication engine
duplicatePoints pointDuplicator(mesh);
// Mesh change engine
polyTopoChange meshMod(mesh);
// Insert topo changes
pointDuplicator.setRefinement(regionSide, meshMod);
}
else
{
Pout<< "Merging duplicate faces."
<< nl << endl;
// Get all duplicate face labels (in boundaryFaces indices!).
labelList duplicates(findBaffles(mesh, boundaryFaces));
if (!overwrite)
{
runTime++;
}
// Merge into internal faces.
insertDuplicateMerge(mesh, duplicates, meshMod);
}
// Change the mesh. No inflation.
autoPtr<mapPolyMesh> map = meshMod.changeMesh(mesh, false);
if (!overwrite)
{
runTime++;
}
// Update fields
mesh.updateMesh(map);
// Change the mesh. No inflation.
autoPtr<mapPolyMesh> map = meshMod.changeMesh(mesh, false);
// Move mesh (since morphing does not do this)
if (map().hasMotionPoints())
{
mesh.movePoints(map().preMotionPoints());
}
// Update fields
mesh.updateMesh(map);
if (overwrite)
{
mesh.setInstance(oldInstance);
}
Info<< "Writing mesh to time " << runTime.timeName() << endl;
mesh.write();
// Move mesh (since morphing does not do this)
if (map().hasMotionPoints())
{
mesh.movePoints(map().preMotionPoints());
}
if (overwrite)
{
mesh.setInstance(oldInstance);
}
Pout<< "Writing mesh to time " << runTime.timeName() << endl;
mesh.write();
// Dump duplicated points (if any)
if (split)
{
// Dump duplicated points (if any)
const labelList& pointMap = map().pointMap();
labelList nDupPerPoint(map().nOldPoints(), 0);
@ -385,7 +558,7 @@ int main(int argc, char *argv[])
}
}
Pout<< "Writing " << dupPoints.size()
Info<< "Writing " << returnReduce(dupPoints.size(), sumOp<label>())
<< " duplicated points to pointSet "
<< dupPoints.objectPath() << nl << endl;

View File

@ -0,0 +1,39 @@
/*--------------------------------*- C++ -*----------------------------------*\
| ========= | |
| \\ / F ield | OpenFOAM: The Open Source CFD Toolbox |
| \\ / O peration | Version: plus |
| \\ / A nd | Web: www.OpenFOAM.com |
| \\/ M anipulation | |
\*---------------------------------------------------------------------------*/
FoamFile
{
version 2.0;
format ascii;
class dictionary;
object mergeOrSplitBafflesDict;
}
// * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * //
// Detect baffles (boundary faces sharing points) on selected set of patches
// and write to a faceSet.
detect
{
patches (".*Wall");
}
// Detect baffles (on selected patches) and merge these into internal faces.
merge
{
patches ("mergePatch");
}
// Detect baffles (on selected patches) and duplicate the points. This is
// used if e.g. the two sides need to move separately. Note that since the
// points are duplicated the two faces are no longer baffles.
split
{
patches ("split.*Patches");
}
// ************************************************************************* //

View File

@ -347,7 +347,7 @@ int main(int argc, char *argv[])
runTime.time().system(),
regionDir, // use region if non-standard
runTime,
IOobject::MUST_READ_IF_MODIFIED,
IOobject::MUST_READ,
IOobject::NO_WRITE,
false
),

View File

@ -69,7 +69,7 @@ int readNumProcs
dictName,
runTime.system(),
runTime,
IOobject::MUST_READ_IF_MODIFIED,
IOobject::MUST_READ,
IOobject::NO_WRITE,
false
),

View File

@ -161,7 +161,10 @@ int main(int argc, char *argv[])
fileName decompDictFile;
args.optionReadIfPresent("decomposeParDict", decompDictFile);
IOdictionary* dict = new IOdictionary
// A demand-driven decompositionMethod can have issues finding
// an alternative decomposeParDict location.
IOdictionary* dictPtr = new IOdictionary
(
decompositionModel::selectIO
(
@ -170,18 +173,18 @@ int main(int argc, char *argv[])
"decomposeParDict",
runTime.system(),
runTime,
IOobject::MUST_READ_IF_MODIFIED,
IOobject::MUST_READ,
IOobject::NO_WRITE
),
decompDictFile
)
);
// The object must have the expected "decomposeParDict" name.
// This also implies that it cannot be changed during the run.
dict->rename("decomposeParDict");
// Store it on the object registry, but to be found it must also
// have the expected "decomposeParDict" name.
runTime.store(dict);
dictPtr->rename("decomposeParDict");
runTime.store(dictPtr);
}
// Determine mesh bounding boxes:

View File

@ -56,15 +56,17 @@ isTest()
#
getNumberOfProcessors()
{
local dict="${1:-system/decomposeParDict}"
# Re-use positional parameters for automatic whitespace elimination
set -- $(foamDictionary -entry numberOfSubdomains -value "${1:-system/decomposeParDict}")
set -- $(foamDictionary -entry numberOfSubdomains -value "$dict" 2>/dev/null)
if [ "$#" -eq 1 ]
then
echo "$1"
else
echo "Error retrieving 'numberOfSubdomains' from decomposeParDict" 1>&2
echo 1
echo "Warning no 'numberOfSubdomains' in '$dict'" 1>&2
echo 1 # serial as fallback
return 1
fi
}

View File

@ -50,8 +50,8 @@
#
#------------------------------------------------------------------------------
set boost_version=boost_1_61_0
set cgal_version=CGAL-4.8
set boost_version=boost_1_62_0
set cgal_version=CGAL-4.9
setenv BOOST_ARCH_PATH $WM_THIRD_PARTY_DIR/platforms/$WM_ARCH$WM_COMPILER/$boost_version
setenv CGAL_ARCH_PATH $WM_THIRD_PARTY_DIR/platforms/$WM_ARCH$WM_COMPILER/$cgal_version

View File

@ -22,7 +22,7 @@
# along with OpenFOAM. If not, see <http://www.gnu.org/licenses/>.
#
# File
# etc/config.sh/FFTW
# etc/config.csh/FFTW
#
# Description
# Setup file for FFTW include/libraries.
@ -48,7 +48,7 @@
#
#------------------------------------------------------------------------------
set fftw_version=fftw-3.3.4
set fftw_version=fftw-3.3.5
setenv FFTW_ARCH_PATH $WM_THIRD_PARTY_DIR/platforms/$WM_ARCH$WM_COMPILER/$fftw_version

View File

@ -51,7 +51,7 @@ case SYSTEMOPENMPI:
breaksw
case OPENMPI:
setenv FOAM_MPI openmpi-1.10.2
setenv FOAM_MPI openmpi-1.10.4
# Optional configuration tweaks:
_foamSource `$WM_PROJECT_DIR/bin/foamEtcFile config.csh/openmpi`

View File

@ -51,7 +51,7 @@
#
#------------------------------------------------------------------------------
setenv ParaView_VERSION 5.1.2
setenv ParaView_VERSION 5.2.0
setenv ParaView_MAJOR detect # Automatically determine major version
set cmake_version=cmake-system
@ -63,12 +63,10 @@ if ( ! $?ParaView_DIR ) setenv ParaView_DIR
set cleaned=`$WM_PROJECT_DIR/bin/foamCleanPath "$PATH" "$ParaView_DIR $WM_THIRD_PARTY_DIR/platforms/$WM_ARCH$WM_COMPILER/cmake- $WM_THIRD_PARTY_DIR/platforms/$WM_ARCH$WM_COMPILER/paraview-"`
if ( $status == 0 ) setenv PATH $cleaned
# Environment for ThirdParty cmake
unsetenv CMAKE_HOME
# ThirdParty cmake
set cmake=$WM_THIRD_PARTY_DIR/platforms/$WM_ARCH$WM_COMPILER/$cmake_version
if ( -r $cmake ) then
setenv CMAKE_HOME $cmake
setenv PATH ${CMAKE_HOME}/bin:${PATH}
if ( -r $cmake/bin/cmake ) then
_foamAddPath $cmake/bin
endif
# Evaluate command-line parameters for ParaView
@ -112,9 +110,9 @@ if ( $?ParaView_VERSION ) then
set pvLibDir=${ParaView_DIR}/lib/$pvMajor
set pvPython=$ParaView_DIR/Utilities/VTKPythonWrapping
setenv PATH ${ParaView_DIR}/bin:${PATH}
setenv ParaView_INCLUDE_DIR $ParaView_DIR/include/$pvMajor
setenv PV_PLUGIN_PATH $FOAM_LIBBIN/$pvMajor
setenv PATH ${ParaView_DIR}/bin:${PATH}
setenv LD_LIBRARY_PATH "${pvLibDir}:${LD_LIBRARY_PATH}"
# Add in python libraries if required

View File

@ -49,8 +49,8 @@
# - the LD_LIBRARY_PATH is not adjusted.
#------------------------------------------------------------------------------
boost_version=boost_1_61_0
cgal_version=CGAL-4.8
boost_version=boost_1_62_0
cgal_version=CGAL-4.9
export BOOST_ARCH_PATH=$WM_THIRD_PARTY_DIR/platforms/$WM_ARCH$WM_COMPILER/$boost_version
export CGAL_ARCH_PATH=$WM_THIRD_PARTY_DIR/platforms/$WM_ARCH$WM_COMPILER/$cgal_version

View File

@ -47,7 +47,7 @@
# - the LD_LIBRARY_PATH is not adjusted.
#------------------------------------------------------------------------------
fftw_version=fftw-3.3.4
fftw_version=fftw-3.3.5
export FFTW_ARCH_PATH=$WM_THIRD_PARTY_DIR/platforms/$WM_ARCH$WM_COMPILER/$fftw_version

View File

@ -54,7 +54,7 @@ SYSTEMOPENMPI)
;;
OPENMPI)
export FOAM_MPI=openmpi-1.10.2
export FOAM_MPI=openmpi-1.10.4
# Optional configuration tweaks:
_foamSource `$WM_PROJECT_DIR/bin/foamEtcFile config.sh/openmpi`

View File

@ -51,7 +51,7 @@
#
#------------------------------------------------------------------------------
ParaView_VERSION=5.1.2
ParaView_VERSION=5.2.0
ParaView_MAJOR=detect # Automatically determine major version
cmake_version=cmake-system
@ -66,14 +66,11 @@ cleaned=$($WM_PROJECT_DIR/bin/foamCleanPath "$PATH" \
) \
&& PATH="$cleaned"
# Environment for ThirdParty cmake
unset CMAKE_HOME
# ThirdParty cmake
cmake=$WM_THIRD_PARTY_DIR/platforms/$WM_ARCH$WM_COMPILER/$cmake_version
if [ -r $cmake ]
if [ -r $cmake/bin/cmake ]
then
export CMAKE_HOME=$cmake
export CMAKE_ROOT=$cmake
export PATH=$CMAKE_HOME/bin:$PATH
_foamAddPath $cmake/bin
fi
# Evaluate command-line parameters for ParaView
@ -125,9 +122,9 @@ then
pvLibDir=$ParaView_DIR/lib/$pvMajor
pvPython=$ParaView_DIR/Utilities/VTKPythonWrapping
export PATH=$ParaView_DIR/bin:$PATH
export ParaView_INCLUDE_DIR=$ParaView_DIR/include/$pvMajor
export PV_PLUGIN_PATH=$FOAM_LIBBIN/$pvMajor
export PATH=$ParaView_DIR/bin:$PATH
export LD_LIBRARY_PATH=$pvLibDir:$LD_LIBRARY_PATH
# Add in python libraries if required

View File

@ -847,7 +847,7 @@ Foam::distributedTriSurfaceMesh::independentlyDistributedBbs
"decomposeParDict",
searchableSurface::time().system(),
searchableSurface::time(),
IOobject::MUST_READ_IF_MODIFIED,
IOobject::MUST_READ,
IOobject::NO_WRITE
)
)

View File

@ -4,7 +4,10 @@ cd ${0%/*} || exit 1 # Run from this directory
# Source tutorial run functions
. $WM_PROJECT_DIR/bin/tools/RunFunctions
# Alternative decomposeParDict name:
decompDict="-decomposeParDict system/decomposeParDict.6"
## Standard decomposeParDict name:
# unset decompDict
# copy motorbike surface from resources directory
\cp $FOAM_TUTORIALS/resources/geometry/motorBike.obj.gz constant/triSurface/
@ -13,6 +16,14 @@ runApplication surfaceFeatureExtract
runApplication blockMesh
runApplication decomposePar $decompDict
# Using distributedTriSurfaceMesh?
if foamDictionary -entry geometry -value system/snappyHexMeshDict | \
grep -q distributedTriSurfaceMesh
then
runParallel $decompDict surfaceRedistributePar motorBike.obj independent
fi
runParallel $decompDict snappyHexMesh -overwrite
#- For non-parallel running: - set the initial fields

View File

@ -15,20 +15,20 @@ FoamFile
// * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * //
numberOfSubdomains 4;
numberOfSubdomains 6;
method hierarchical;
// method ptscotch;
simpleCoeffs
{
n (4 1 1);
n (6 1 1);
delta 0.001;
}
hierarchicalCoeffs
{
n (2 2 1);
n (3 2 1);
delta 0.001;
order xyz;
}