52 #ifdef HAVE_TEUCHOSCORE_KOKKOSCORE 53 # include "Kokkos_Core.hpp" 54 #endif // HAVE_TEUCHOSCORE_KOKKOSCORE 66 #ifdef HAVE_TEUCHOSCORE_KOKKOSCORE 70 std::vector<std::string> GlobalMPISession::argvCopy_;
72 #endif // HAVE_TEUCHOSCORE_KOKKOSCORE 77 std::ostringstream oss;
87 int mpiHasBeenStarted = 0;
88 MPI_Initialized(&mpiHasBeenStarted);
89 if (mpiHasBeenStarted) {
91 *out <<
"GlobalMPISession(): Error, MPI_Intialized() return true," 92 <<
" calling std::terminate()!\n" 99 mpierr = ::MPI_Init(argc, (
char ***) argv);
102 *out <<
"GlobalMPISession(): Error, MPI_Init() returned error code=" 103 << mpierr <<
"!=0, calling std::terminate()!\n" 112 char procName[MPI_MAX_PROCESSOR_NAME];
113 mpierr = ::MPI_Get_processor_name(procName, &nameLen);
116 *out <<
"GlobalMPISession(): Error, MPI_Get_processor_name() error code=" 117 << mpierr <<
"!=0, calling std::terminate()!\n" 123 oss <<
"Teuchos::GlobalMPISession::GlobalMPISession(): started processor with name " 124 << procName <<
" and rank " <<
rank_ <<
"!" << std::endl;
128 oss <<
"Teuchos::GlobalMPISession::GlobalMPISession(): started serial run" 133 #ifndef TEUCHOS_SUPPRESS_PROC_STARTUP_BANNER 136 bool printStartupBanner =
true;
137 const std::string suppress_option(
"--teuchos-suppress-startup-banner");
138 for (
int opt_i = 0; opt_i < *argc; ++opt_i ) {
139 if ( suppress_option == (*argv)[opt_i] ) {
141 printStartupBanner =
false;
144 for(
int i = opt_i; i < *argc; ++i )
145 (*argv)[i] = (*argv)[i+1];
149 if (out && printStartupBanner) {
150 *out << oss.str() << std::flush;
155 #ifdef HAVE_TEUCHOSCORE_KOKKOSCORE 170 const int numArgs = *argc;
171 argvCopy_.resize (numArgs);
172 for (
int c = 0; c < numArgs; ++c) {
173 argvCopy_[c] = std::string ((*argv)[c]);
175 #endif // HAVE_TEUCHOSCORE_KOKKOSCORE 179 #ifdef HAVE_TEUCHOSCORE_KOKKOSCORE 180 std::vector<std::string> GlobalMPISession::getArgv ()
184 #endif // HAVE_TEUCHOSCORE_KOKKOSCORE 190 #ifdef HAVE_TEUCHOSCORE_KOKKOSCORE 192 Kokkos::finalize_all();
194 catch (
const std::runtime_error& e) {
195 std::cerr <<
"Kokkos::finalize_all failed:\n" 202 const int mpierr = ::MPI_Finalize();
205 std::cerr <<
"Error code " << mpierr <<
" returned from MPI_Finalize()\n";
241 MPI_Barrier(MPI_COMM_WORLD);
251 MPI_Allreduce(&localVal, &globalSum, 1, MPI_INT, MPI_SUM, MPI_COMM_WORLD);
264 MPI_Allgather( &localVal, 1, MPI_INT, allVals.
getRawPtr(), 1, MPI_INT,
267 allVals[0] = localVal;
294 int mpiHasBeenStarted = 0;
295 MPI_Initialized(&mpiHasBeenStarted);
297 if(!mpiHasBeenStarted)
305 mpierr = ::MPI_Comm_rank( MPI_COMM_WORLD, &
rank_ );
307 *out <<
"Error code=" << mpierr <<
" detected in MPI_Comm_rank()" 311 mpierr = ::MPI_Comm_size( MPI_COMM_WORLD, &
nProc_ );
313 *out <<
"Error code=" << mpierr <<
" detected in MPI_Comm_size()" static int getRank()
The rank of the calling process in MPI_COMM_WORLD.
static int getNProc()
The number of processes in MPI_COMM_WORLD.
size_type size() const
The total number of items in the managed array.
static bool mpiIsFinalized_
static bool mpiIsFinalized()
Return whether MPI was already finalized.
GlobalMPISession(int *argc, char ***argv, std::ostream *out=&std::cout)
Calls MPI_Init() if MPI is enabled.
static int sum(int localVal)
Sum a set of integers across processes.
T * getRawPtr() const
Return a raw pointer to beginning of array or NULL if unsized.
A MPI utilities class, providing methods for initializing, finalizing, and querying the global MPI se...
static void barrier()
Call MPI_Barrier() on MPI_COMM_WORLD.
static void justInTimeInitialize()
static bool mpiIsInitialized()
Return whether MPI was initialized.
static void allGather(int localVal, const ArrayView< int > &allVals)
Global all-to-all of a set of integers across processes.
static bool haveMPIState_
#define TEUCHOS_ASSERT_EQUALITY(val1, val2)
This macro is checks that to numbers are equal and if not then throws an exception with a good error ...
static void initialize(std::ostream *out)
~GlobalMPISession()
Call MPI_Finalize() if MPI is enabled.