ENH: additional MPI gather/scatter routines, globalIndex gather methods

- UPstream::mpiGather (MPI_Gather)   - used by Pstream::listGatherValues
- UPstream::mpiScatter (MPI_Scatter) - used by Pstream::listScatterValues

  These are much simpler forms for gather/scatter of fixed-sized
  contiguous types data types (eg, primitives, simple pairs etc).

  In the gather form, creates a list of gathered values on the master
  process. The subranks have a list size of zero.

  Similarly, scatter will distribute a list of values to single values
  on each process.

  Instead of

      labelList sendSizes(Pstream::nProcs());
      sendSizes[Pstream::myProcNo()] = sendData.size();
      Pstream::gatherList(sendSizes);

  Can write

      const labelList sendSizes
      (
          UPstream::listGatherValues<label>(sendData.size())
      );

  // Less code, lower overhead and list can be const.

  For scattering an individual value only,
  instead of

      labelList someValues;
      if (Pstream::master()) someValues = ...;

      Pstream::gatherList(sendSizes);
      const label localValue
      (
          someValues[Pstream::myProcNo()]
      );

  Can write

      labelList someValues;
      if (Pstream::master()) someValues = ...;

      Pstream::gatherList(sendSizes);
      const label localValue
      (
          UPstream::listScatterValues<label>(someValues)
      );

   Can of course also mix listGatherValues to assemble a list on master
   and use Pstream::scatterList to distribute.

ENH: adjusted globalIndex gather methods

- added mpiGather() method [contiguous data only] using MPI_Gatherv

- respect localSize if gathering master data to ensure that a
  request for 0 master elements is properly handled.
This commit is contained in:
Mark Olesen 2021-10-15 14:42:21 +02:00
parent 1f20747b1a
commit b6539cd02e
9 changed files with 748 additions and 95 deletions

View File

@ -0,0 +1,3 @@
Test-gatherValues1.C
EXE = $(FOAM_USER_APPBIN)/Test-gatherValues1

View File

@ -0,0 +1,2 @@
/* EXE_INC = */
/* EXE_LIBS = */

View File

@ -0,0 +1,189 @@
/*---------------------------------------------------------------------------*\
========= |
\\ / F ield | OpenFOAM: The Open Source CFD Toolbox
\\ / O peration |
\\ / A nd | www.openfoam.com
\\/ M anipulation |
-------------------------------------------------------------------------------
Copyright (C) 2021 OpenCFD Ltd.
-------------------------------------------------------------------------------
License
This file is part of OpenFOAM.
OpenFOAM is free software: you can redistribute it and/or modify it
under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
OpenFOAM is distributed in the hope that it will be useful, but WITHOUT
ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
for more details.
You should have received a copy of the GNU General Public License
along with OpenFOAM. If not, see <http://www.gnu.org/licenses/>.
Application
Test-gatherValues1
Description
Test list gather functionality
\*---------------------------------------------------------------------------*/
#include "argList.H"
#include "Time.H"
#include "IPstream.H"
#include "OPstream.H"
#include "vector.H"
#include "IOstreams.H"
#include "Pstream.H"
#include "globalIndex.H"
using namespace Foam;
// * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * //
int main(int argc, char *argv[])
{
argList::noCheckProcessorDirectories();
#include "setRootCase.H"
const labelList localValues
(
identity(2 *(Pstream::myProcNo()+1), -5*Pstream::myProcNo())
);
// Test resize
{
globalIndex globIdx(localValues.size());
Info<< "globIdx = " << flatOutput(globIdx.offsets()) << nl;
globIdx.setLocalSize(4, 0);
Info<< "globIdx = " << flatOutput(globIdx.offsets()) << nl;
globIdx.setLocalSize(3, 0);
Info<< "globIdx = " << flatOutput(globIdx.offsets()) << nl;
}
// Gather all values
{
const auto& sendData = localValues;
// One-sided sizing! master only
const globalIndex allProcAddr
(
UPstream::listGatherValues<label>(sendData.size()),
globalIndex::SIZES
);
Pout<< "listGather sizes: " << flatOutput(allProcAddr.sizes()) << nl;
// Collect all values
labelList allValues
(
allProcAddr.mpiGather(sendData)
);
Pout<< "all-data: " << allValues << endl;
}
{
const labelList::subList& sendData =
(
Pstream::master()
? SubList<label>(localValues, 0) // exclude
: SubList<label>(localValues)
);
const labelList sendSizes
(
UPstream::listGatherValues<label>(sendData.size())
);
const label sendSize
(
UPstream::listScatterValues<label>(sendSizes)
);
const globalIndex subProcAddr(sendSizes, globalIndex::SIZES);
Pout<< "listGather "
<< localValues.size() << " = " << flatOutput(sendSizes)
<< " offsets " << flatOutput(subProcAddr.offsets())
<< nl;
label newLocalValue = 5 + UPstream::listScatterValues(sendSizes);
Pout<< "listScattered: " << newLocalValue << nl;
// Can also scatter a longer list
Pout<< "listScatter off "
<< UPstream::listScatterValues(subProcAddr.offsets()) << nl;
Pout<< endl << "local list [" << Pstream::myProcNo() << "] "
<< flatOutput(localValues) << nl;
Pout<< endl << "local send [" << Pstream::myProcNo() << "] "
<< sendSize << nl;
// Collect all off-processor values
labelList allValues
(
subProcAddr.mpiGather(sendData)
);
Pout<< "off-proc: " << allValues << endl;
if (Pstream::master())
{
Info<< "master: " << flatOutput(localValues) << nl;
label proci = 0;
for (const labelRange& range : subProcAddr)
{
Info<< proci << ": " << flatOutput(allValues.slice(range)) << nl;
++proci;
}
Info<< nl << "verify ranges" << nl;
{
globalIndex glob;
Info<< "empty:" << nl;
for (const labelRange& range : glob)
{
Info<< " range: " << range << endl;
}
}
{
globalIndex glob(labelList(Foam::one{}, 0), globalIndex::OFFSETS);
Info<< "degenerate:" << nl;
for (const labelRange& range : glob)
{
Info<< " range: " << range << endl;
}
}
{
globalIndex glob(labelList(Foam::one{}, 0), globalIndex::SIZES);
Info<< "single:" << nl;
for (const labelRange& range : glob)
{
Info<< " range: " << range << endl;
}
}
}
}
Info<< "\nEnd\n" << endl;
return 0;
}
// ************************************************************************* //

View File

@ -6,7 +6,7 @@
\\/ M anipulation |
-------------------------------------------------------------------------------
Copyright (C) 2011-2017 OpenFOAM Foundation
Copyright (C) 2015-2020 OpenCFD Ltd.
Copyright (C) 2015-2021 OpenCFD Ltd.
-------------------------------------------------------------------------------
License
This file is part of OpenFOAM.
@ -33,8 +33,9 @@ Description
SourceFiles
UPstream.C
UPstreamCommsStruct.C
gatherScatter.C
UPstreamTemplates.C
combineGatherScatter.C
gatherScatter.C
gatherScatterList.C
\*---------------------------------------------------------------------------*/
@ -595,6 +596,28 @@ public:
const label communicator = worldComm
);
//- Receive data from all processors on the master (low-level)
static void mpiGather
(
const char* sendData,
int sendSize,
char* recvData,
int recvSize,
const label communicator = worldComm
);
//- Send data to all processors from master (low-level)
static void mpiScatter
(
const char* sendData,
int sendSize,
char* recvData,
int recvSize,
const label communicator = worldComm
);
//- Receive data from all processors on the master
static void gather
(
@ -620,6 +643,27 @@ public:
);
// Gather single, contiguous value(s)
//- Individual values into list locations.
// On master list length == nProcs, otherwise zero length
template<class T>
static List<T> listGatherValues
(
const T& localValue,
const label communicator = worldComm
);
//- Individual values into list locations.
// On master list length == nProcs, otherwise zero length
template<class T>
static T listScatterValues
(
const UList<T>& allValues,
const label communicator = worldComm
);
// Housekeeping
//- Process index of first sub-process
@ -656,6 +700,12 @@ UList<UPstream::commsStruct>::operator[](const label) const;
// * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * //
#ifdef NoRepository
#include "UPstreamTemplates.C"
#endif
// * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * //
#endif
// ************************************************************************* //

View File

@ -0,0 +1,132 @@
/*---------------------------------------------------------------------------*\
========= |
\\ / F ield | OpenFOAM: The Open Source CFD Toolbox
\\ / O peration |
\\ / A nd | www.openfoam.com
\\/ M anipulation |
-------------------------------------------------------------------------------
Copyright (C) 2021 OpenCFD Ltd.
-------------------------------------------------------------------------------
License
This file is part of OpenFOAM.
OpenFOAM is free software: you can redistribute it and/or modify it
under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
OpenFOAM is distributed in the hope that it will be useful, but WITHOUT
ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
for more details.
You should have received a copy of the GNU General Public License
along with OpenFOAM. If not, see <http://www.gnu.org/licenses/>.
\*---------------------------------------------------------------------------*/
// * * * * * * * * * * * * * * * Member Functions * * * * * * * * * * * * * //
template<class T>
Foam::List<T> Foam::UPstream::listGatherValues
(
const T& localValue,
const label comm
)
{
if (!is_contiguous<T>::value)
{
FatalErrorInFunction
<< "Cannot gather values for non-contiguous types" << endl
<< Foam::abort(FatalError);
}
List<T> allValues;
const label nproc = (UPstream::parRun() ? UPstream::nProcs(comm) : 1);
if (nproc > 1)
{
if (UPstream::master(comm))
{
allValues.resize(nproc);
}
UPstream::mpiGather
(
reinterpret_cast<const char*>(&localValue),
sizeof(T),
allValues.data_bytes(),
sizeof(T),
comm
);
}
else
{
// non-parallel: return own value
allValues.resize(1);
allValues[0] = localValue;
}
return allValues;
}
template<class T>
T Foam::UPstream::listScatterValues
(
const UList<T>& allValues,
const label comm
)
{
if (!is_contiguous<T>::value)
{
FatalErrorInFunction
<< "Cannot scatter values for non-contiguous types" << endl
<< Foam::abort(FatalError);
}
const label nproc = (UPstream::parRun() ? UPstream::nProcs(comm) : 1);
T localValue;
if (nproc > 1)
{
if (UPstream::master(comm) && allValues.size() < nproc)
{
FatalErrorInFunction
<< "Attempting to send " << allValues.size()
<< " values to " << nproc << " processors" << endl
<< Foam::abort(FatalError);
}
UPstream::mpiScatter
(
allValues.cdata_bytes(),
sizeof(T),
reinterpret_cast<char*>(&localValue),
sizeof(T),
comm
);
}
else
{
// non-parallel: return local value
if (allValues.empty()) // Extra safety
{
localValue = Zero;
}
else
{
localValue = allValues[0];
}
}
return localValue;
}
// ************************************************************************* //

View File

@ -375,16 +375,18 @@ public:
);
// Other
// Other
// Gather
//- Collect data in processor order on master (== procIDs[0]).
// Offsets needed on master only.
template<class Container, class Type>
template<class ProcIDsContainer, class Type>
static void gather
(
const labelUList& offsets,
const label comm,
const Container& procIDs,
const label comm, //!< communicator
const ProcIDsContainer& procIDs,
const UList<Type>& fld,
List<Type>& allFld,
const int tag = UPstream::msgType(),
@ -397,7 +399,7 @@ public:
static void gather
(
const labelUList& offsets,
const label comm,
const label comm, //!< communicator
const UList<int>& procIDs,
const IndirectListBase<Type, Addr>& fld,
List<Type>& allFld,
@ -407,11 +409,11 @@ public:
//- Collect data in processor order on master (== procIDs[0]).
// Offsets needed on master only.
template<class Container, class Type>
template<class ProcIDsContainer, class Type>
void gather
(
const label comm,
const Container& procIDs,
const label comm, //!< communicator
const ProcIDsContainer& procIDs,
const UList<Type>& fld,
List<Type>& allFld,
const int tag = UPstream::msgType(),
@ -430,7 +432,8 @@ public:
const UList<Type>& fld,
List<Type>& allFld,
const int tag = UPstream::msgType(),
const Pstream::commsTypes = Pstream::commsTypes::nonBlocking
const Pstream::commsTypes = Pstream::commsTypes::nonBlocking,
const label comm = UPstream::worldComm //!< communicator
) const;
//- Collect data indirectly in processor order on master.
@ -441,29 +444,18 @@ public:
const IndirectListBase<Type, Addr>& fld,
List<Type>& allFld,
const int tag = UPstream::msgType(),
const Pstream::commsTypes = Pstream::commsTypes::scheduled
const Pstream::commsTypes = Pstream::commsTypes::scheduled,
const label comm = UPstream::worldComm //!< communicator
) const;
//- Collect data in processor order on master.
// Does communication with default communicator and message tag.
template<class Type>
static void gatherOp
(
const UList<Type>& fld,
List<Type>& allFld,
const int tag = UPstream::msgType(),
const Pstream::commsTypes = Pstream::commsTypes::nonBlocking
);
//- Inplace collect in processor order on master (== procIDs[0]).
//- Needs offsets only on master.
template<class Container, class Type>
template<class ProcIDsContainer, class Type>
static void gather
(
const labelUList& offsets,
const label comm,
const Container& procIDs,
const label comm, //!< communicator
const ProcIDsContainer& procIDs,
List<Type>& fld,
const int tag = UPstream::msgType(),
const Pstream::commsTypes = Pstream::commsTypes::nonBlocking
@ -471,11 +463,11 @@ public:
//- Inplace collect in processor order on master (== procIDs[0]).
//- Needs offsets only on master.
template<class Container, class Type>
template<class ProcIDsContainer, class Type>
void gather
(
const label comm,
const Container& procIDs,
const label comm, //!< communicator
const ProcIDsContainer& procIDs,
List<Type>& fld,
const int tag = UPstream::msgType(),
const Pstream::commsTypes commsType =
@ -493,9 +485,43 @@ public:
(
List<Type>& fld,
const int tag = UPstream::msgType(),
const Pstream::commsTypes = Pstream::commsTypes::nonBlocking
const Pstream::commsTypes = Pstream::commsTypes::nonBlocking,
const label comm = UPstream::worldComm //!< communicator
) const;
//- Collect \em contiguous data using a MPI_Gatherv call
// \attention The nProcs for globalIndex and communicator
// must match!!
template<class Type, class OutputContainer = List<Type>>
void mpiGather
(
const UList<Type>& sendData,
OutputContainer& allValues,
const label comm = UPstream::worldComm //!< communicator
) const;
//- Collect \em contiguous data using a MPI_Gatherv call
// \attention The nProcs for globalIndex and communicator
// must match!!
template<class Type, class OutputContainer = List<Type>>
OutputContainer mpiGather
(
const UList<Type>& sendData,
const label comm = UPstream::worldComm //!< communicator
) const;
//- Collect data in processor order on master.
// Does communication with default communicator and message tag.
template<class Type>
static void gatherOp
(
const UList<Type>& fld,
List<Type>& allFld,
const int tag = UPstream::msgType(),
const Pstream::commsTypes = Pstream::commsTypes::nonBlocking
);
//- Inplace collect data in processor order on master
// Does communication with default communicator and message tag.
// After the gather, the field is zero-sized on the slaves.
@ -508,13 +534,15 @@ public:
);
// Scatter
//- Distribute data in processor order. Requires fld to be sized!
template<class Container, class Type>
template<class ProcIDsContainer, class Type>
static void scatter
(
const labelUList& offsets,
const label comm,
const Container& procIDs,
const label comm, //!< communicator
const ProcIDsContainer& procIDs,
const UList<Type>& allFld,
UList<Type>& fld,
const int tag = UPstream::msgType(),
@ -522,11 +550,11 @@ public:
);
//- Distribute data in processor order. Requires fld to be sized!
template<class Container, class Type>
template<class ProcIDsContainer, class Type>
void scatter
(
const label comm,
const Container& procIDs,
const label comm, //!< communicator
const ProcIDsContainer& procIDs,
const UList<Type>& allFld,
UList<Type>& fld,
const int tag = UPstream::msgType(),
@ -545,18 +573,22 @@ public:
const UList<Type>& allFld,
UList<Type>& fld,
const int tag = UPstream::msgType(),
const Pstream::commsTypes = Pstream::commsTypes::nonBlocking
const Pstream::commsTypes = Pstream::commsTypes::nonBlocking,
const label comm = UPstream::worldComm //!< communicator
) const;
//- Get (potentially remote) data. Elements required given as
// global indices
// Scatter
//- Get (potentially remote) data.
//- Elements required given as global indices
template<class Type, class CombineOp>
void get
(
List<Type>& allFld,
const labelUList& globalIds,
const CombineOp& cop,
const label comm = Pstream::worldComm,
const label comm = UPstream::worldComm, //!< communicator
const int tag = UPstream::msgType()
) const;

View File

@ -70,12 +70,12 @@ Foam::globalIndex::calcListOffsets
// * * * * * * * * * * * * * * * Member Functions * * * * * * * * * * * * * //
template<class Container, class Type>
template<class ProcIDsContainer, class Type>
void Foam::globalIndex::gather
(
const labelUList& off,
const label comm,
const Container& procIDs,
const ProcIDsContainer& procIDs,
const UList<Type>& fld,
List<Type>& allFld,
const int tag,
@ -96,10 +96,16 @@ void Foam::globalIndex::gather
if (Pstream::myProcNo(comm) == procIDs[0])
{
allFld.resize(off.last());
allFld.resize_nocopy(off.last());
// Assign my local data
SubList<Type>(allFld, fld.size(), 0) = fld;
// Assign my local data - respect offset information
// so that we can request 0 entries to be copied
SubList<Type> localSlot(allFld, off[1]-off[0], off[0]);
if (!localSlot.empty())
{
localSlot = fld;
}
if
(
@ -242,10 +248,16 @@ void Foam::globalIndex::gather
if (Pstream::myProcNo(comm) == procIDs[0])
{
allFld.resize(off.last());
allFld.resize_nocopy(off.last());
// Assign my local data
SubList<Type>(allFld, fld.size(), 0) = fld;
// Assign my local data - respect offset information
// so that we can request 0 entries to be copied
SubList<Type> localSlot(allFld, off[1]-off[0], off[0]);
if (!localSlot.empty())
{
localSlot = fld;
}
// Already verified commsType != nonBlocking
for (label i = 1; i < procIDs.size(); ++i)
@ -284,13 +296,14 @@ void Foam::globalIndex::gather
const UList<Type>& fld,
List<Type>& allFld,
const int tag,
const Pstream::commsTypes commsType
const Pstream::commsTypes commsType,
const label comm
) const
{
gather
(
UPstream::worldComm,
UPstream::procID(UPstream::worldComm),
comm,
UPstream::procID(comm),
fld,
allFld,
tag,
@ -305,14 +318,15 @@ void Foam::globalIndex::gather
const IndirectListBase<Type, Addr>& fld,
List<Type>& allFld,
const int tag,
const Pstream::commsTypes commsType
const Pstream::commsTypes commsType,
const label comm
) const
{
gather
(
offsets_,
UPstream::worldComm,
UPstream::procID(UPstream::worldComm),
comm,
UPstream::procID(comm),
fld,
allFld,
tag,
@ -321,25 +335,12 @@ void Foam::globalIndex::gather
}
template<class Type>
void Foam::globalIndex::gatherOp
(
const UList<Type>& fld,
List<Type>& allFld,
const int tag,
const Pstream::commsTypes commsType
)
{
globalIndex(fld.size()).gather(fld, allFld, tag, commsType);
}
template<class Container, class Type>
template<class ProcIDsContainer, class Type>
void Foam::globalIndex::gather
(
const labelUList& off,
const label comm,
const Container& procIDs,
const ProcIDsContainer& procIDs,
List<Type>& fld,
const int tag,
const Pstream::commsTypes commsType
@ -361,22 +362,23 @@ void Foam::globalIndex::gather
(
List<Type>& fld,
const int tag,
const Pstream::commsTypes commsType
const Pstream::commsTypes commsType,
const label comm
) const
{
List<Type> allFld;
gather
(
UPstream::worldComm,
UPstream::procID(UPstream::worldComm),
comm,
UPstream::procID(comm),
fld,
allFld,
tag,
commsType
);
if (Pstream::master(UPstream::worldComm))
if (Pstream::master(comm))
{
fld.transfer(allFld);
}
@ -387,6 +389,98 @@ void Foam::globalIndex::gather
}
template<class Type, class OutputContainer>
void Foam::globalIndex::mpiGather
(
const UList<Type>& sendData,
OutputContainer& allValues,
const label comm
) const
{
if (!is_contiguous<Type>::value)
{
FatalErrorInFunction
<< "Cannot be called for non-contiguous data" << nl
<< abort(FatalError);
}
const label proci = Pstream::myProcNo(comm);
const globalIndex& globalAddr = *this;
// Must be the same as Pstream::nProcs(comm), at least on master!!
const label nproc = globalAddr.nProcs();
auto nSendBytes = sendData.size_bytes();
// Respect local size information so that we can request
// 0 entries to be sent on master
if (proci < nproc && !globalAddr.localSize(proci))
{
nSendBytes = 0;
}
List<int> recvSizes;
List<int> recvOffsets;
if (Pstream::master(comm))
{
allValues.resize_nocopy(globalAddr.size());
recvSizes.resize(nproc);
recvOffsets.resize(nproc+1);
for (label proci = 0; proci < nproc; ++proci)
{
recvSizes[proci] = globalAddr.localSize(proci) * sizeof(Type);
recvOffsets[proci] = globalAddr.localStart(proci) * sizeof(Type);
}
recvOffsets[nproc] = globalAddr.size() * sizeof(Type);
}
else
{
allValues.clear();
}
UPstream::gather
(
sendData.cdata_bytes(),
nSendBytes,
allValues.data_bytes(),
recvSizes,
recvOffsets,
comm
);
}
template<class Type, class OutputContainer>
OutputContainer Foam::globalIndex::mpiGather
(
const UList<Type>& sendData,
const label comm
) const
{
OutputContainer allValues;
mpiGather<Type, OutputContainer>(sendData, allValues, comm);
return allValues;
}
template<class Type>
void Foam::globalIndex::gatherOp
(
const UList<Type>& fld,
List<Type>& allFld,
const int tag,
const Pstream::commsTypes commsType
)
{
globalIndex(fld.size()).gather(fld, allFld, tag, commsType);
}
template<class Type>
void Foam::globalIndex::gatherOp
(
@ -399,12 +493,12 @@ void Foam::globalIndex::gatherOp
}
template<class Container, class Type>
template<class ProcIDsContainer, class Type>
void Foam::globalIndex::scatter
(
const labelUList& off,
const label comm,
const Container& procIDs,
const ProcIDsContainer& procIDs,
const UList<Type>& allFld,
UList<Type>& fld,
const int tag,
@ -425,7 +519,12 @@ void Foam::globalIndex::scatter
if (Pstream::myProcNo(comm) == procIDs[0])
{
fld.deepCopy(SubList<Type>(allFld, off[1]-off[0]));
const SubList<Type> localSlot(allFld, off[1]-off[0], off[0]);
if (!localSlot.empty())
{
fld.deepCopy(localSlot);
}
if
(
@ -552,14 +651,15 @@ void Foam::globalIndex::scatter
const UList<Type>& allFld,
UList<Type>& fld,
const int tag,
const Pstream::commsTypes commsType
const Pstream::commsTypes commsType,
const label comm
) const
{
scatter
(
offsets_,
UPstream::worldComm,
UPstream::procID(UPstream::worldComm),
comm,
UPstream::procID(comm),
allFld,
fld,
tag,
@ -578,7 +678,7 @@ void Foam::globalIndex::get
const int tag
) const
{
allFld.resize(globalIds.size());
allFld.resize_nocopy(globalIds.size());
if (globalIds.size())
{
// Sort according to processor

View File

@ -6,7 +6,7 @@
\\/ M anipulation |
-------------------------------------------------------------------------------
Copyright (C) 2011-2018 OpenFOAM Foundation
Copyright (C) 2016-2020 OpenCFD Ltd.
Copyright (C) 2016-2021 OpenCFD Ltd.
-------------------------------------------------------------------------------
License
This file is part of OpenFOAM.
@ -179,6 +179,34 @@ void Foam::UPstream::allToAll
}
void Foam::UPstream::mpiGather
(
const char* sendData,
int sendSize,
char* recvData,
int recvSize,
const label communicator
)
{
std::memmove(recvData, sendData, sendSize);
}
void Foam::UPstream::mpiScatter
(
const char* sendData,
int sendSize,
char* recvData,
int recvSize,
const label communicator
)
{
std::memmove(recvData, sendData, sendSize);
}
void Foam::UPstream::gather
(
const char* sendData,
@ -190,7 +218,7 @@ void Foam::UPstream::gather
const label communicator
)
{
memmove(recvData, sendData, sendSize);
std::memmove(recvData, sendData, sendSize);
}
@ -205,7 +233,7 @@ void Foam::UPstream::scatter
const label communicator
)
{
memmove(recvData, sendData, recvSize);
std::memmove(recvData, sendData, recvSize);
}

View File

@ -805,7 +805,7 @@ void Foam::UPstream::allToAll
if (UPstream::warnComm != -1 && communicator != UPstream::warnComm)
{
Pout<< "** allToAll :"
Pout<< "** MPI_Alltoallv :"
<< " sendSizes:" << sendSizes
<< " sendOffsets:" << sendOffsets
<< " with comm:" << communicator
@ -852,12 +852,12 @@ void Foam::UPstream::allToAll
MPI_Alltoallv
(
const_cast<char*>(sendData),
const_cast<int*>(sendSizes.begin()),
const_cast<int*>(sendOffsets.begin()),
const_cast<int*>(sendSizes.cdata()),
const_cast<int*>(sendOffsets.cdata()),
MPI_BYTE,
recvData,
const_cast<int*>(recvSizes.begin()),
const_cast<int*>(recvOffsets.begin()),
const_cast<int*>(recvSizes.cdata()),
const_cast<int*>(recvOffsets.cdata()),
MPI_BYTE,
PstreamGlobals::MPICommunicators_[communicator]
)
@ -875,6 +875,122 @@ void Foam::UPstream::allToAll
}
void Foam::UPstream::mpiGather
(
const char* sendData,
int sendSize,
char* recvData,
int recvSize,
const label communicator
)
{
const label np = nProcs(communicator);
if (UPstream::warnComm != -1 && communicator != UPstream::warnComm)
{
Pout<< "** MPI_Gather :"
<< " np:" << np
<< " recvSize:" << recvSize
<< " with comm:" << communicator
<< " warnComm:" << UPstream::warnComm
<< endl;
error::printStack(Pout);
}
if (!UPstream::parRun())
{
std::memmove(recvData, sendData, recvSize);
}
else
{
profilingPstream::beginTiming();
if
(
MPI_Gather
(
const_cast<char*>(sendData),
sendSize,
MPI_BYTE,
recvData,
recvSize,
MPI_BYTE,
0,
MPI_Comm(PstreamGlobals::MPICommunicators_[communicator])
)
)
{
FatalErrorInFunction
<< "MPI_Gather failed for sendSize " << sendSize
<< " recvSize " << recvSize
<< " communicator " << communicator
<< Foam::abort(FatalError);
}
profilingPstream::addGatherTime();
}
}
void Foam::UPstream::mpiScatter
(
const char* sendData,
int sendSize,
char* recvData,
int recvSize,
const label communicator
)
{
const label np = nProcs(communicator);
if (UPstream::warnComm != -1 && communicator != UPstream::warnComm)
{
Pout<< "** MPI_Scatter :"
<< " np:" << np
<< " recvSize:" << recvSize
<< " with comm:" << communicator
<< " warnComm:" << UPstream::warnComm
<< endl;
error::printStack(Pout);
}
if (!UPstream::parRun())
{
std::memmove(recvData, sendData, recvSize);
}
else
{
profilingPstream::beginTiming();
if
(
MPI_Scatter
(
const_cast<char*>(sendData),
sendSize,
MPI_BYTE,
recvData,
recvSize,
MPI_BYTE,
0,
MPI_Comm(PstreamGlobals::MPICommunicators_[communicator])
)
)
{
FatalErrorInFunction
<< "MPI_Scatter failed for sendSize " << sendSize
<< " recvSize " << recvSize
<< " communicator " << communicator
<< Foam::abort(FatalError);
}
profilingPstream::addScatterTime();
}
}
void Foam::UPstream::gather
(
const char* sendData,
@ -890,7 +1006,7 @@ void Foam::UPstream::gather
if (UPstream::warnComm != -1 && communicator != UPstream::warnComm)
{
Pout<< "** allToAll :"
Pout<< "** MPI_Gatherv :"
<< " np:" << np
<< " recvSizes:" << recvSizes
<< " recvOffsets:" << recvOffsets
@ -919,6 +1035,7 @@ void Foam::UPstream::gather
if (!UPstream::parRun())
{
// recvSizes[0] may be invalid - use sendSize instead
std::memmove(recvData, sendData, sendSize);
}
else
@ -933,8 +1050,8 @@ void Foam::UPstream::gather
sendSize,
MPI_BYTE,
recvData,
const_cast<int*>(recvSizes.begin()),
const_cast<int*>(recvOffsets.begin()),
const_cast<int*>(recvSizes.cdata()),
const_cast<int*>(recvOffsets.cdata()),
MPI_BYTE,
0,
MPI_Comm(PstreamGlobals::MPICommunicators_[communicator])
@ -968,7 +1085,7 @@ void Foam::UPstream::scatter
if (UPstream::warnComm != -1 && communicator != UPstream::warnComm)
{
Pout<< "** allToAll :"
Pout<< "** MPI_Scatterv :"
<< " np:" << np
<< " sendSizes:" << sendSizes
<< " sendOffsets:" << sendOffsets
@ -1005,8 +1122,8 @@ void Foam::UPstream::scatter
MPI_Scatterv
(
const_cast<char*>(sendData),
const_cast<int*>(sendSizes.begin()),
const_cast<int*>(sendOffsets.begin()),
const_cast<int*>(sendSizes.cdata()),
const_cast<int*>(sendOffsets.cdata()),
MPI_BYTE,
recvData,
recvSize,