Domi
Multi-dimensional, distributed data structures
Domi_MDMap.hpp
1 // @HEADER
2 // ***********************************************************************
3 //
4 // Domi: Multi-dimensional Distributed Linear Algebra Services
5 // Copyright (2014) Sandia Corporation
6 //
7 // Under the terms of Contract DE-AC04-94AL85000 with Sandia
8 // Corporation, the U.S. Government retains certain rights in this
9 // software.
10 //
11 // Redistribution and use in source and binary forms, with or without
12 // modification, are permitted provided that the following conditions are
13 // met:
14 //
15 // 1. Redistributions of source code must retain the above copyright
16 // notice, this list of conditions and the following disclaimer.
17 //
18 // 2. Redistributions in binary form must reproduce the above copyright
19 // notice, this list of conditions and the following disclaimer in the
20 // documentation and/or other materials provided with the distribution.
21 //
22 // 3. Neither the name of the Corporation nor the names of the
23 // contributors may be used to endorse or promote products derived from
24 // this software without specific prior written permission.
25 //
26 // THIS SOFTWARE IS PROVIDED BY SANDIA CORPORATION "AS IS" AND ANY
27 // EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
28 // IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
29 // PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL SANDIA CORPORATION OR THE
30 // CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
31 // EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
32 // PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
33 // PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
34 // LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
35 // NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
36 // SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
37 //
38 // Questions? Contact William F. Spotz (wfspotz@sandia.gov)
39 //
40 // ***********************************************************************
41 // @HEADER
42 
43 #ifndef DOMI_MDMAP_HPP
44 #define DOMI_MDMAP_HPP
45 
46 // System includes
47 #include <algorithm>
48 #include <limits>
49 #include <sstream>
50 
51 // Domi includes
52 #include "Domi_ConfigDefs.hpp"
53 #include "Domi_DefaultNode.hpp"
54 #include "Domi_Utils.hpp"
55 #include "Domi_getValidParameters.hpp"
56 #include "Domi_Slice.hpp"
57 #include "Domi_MDComm.hpp"
58 #include "Domi_MDArray.hpp"
59 
60 // Teuchos includes
61 #include "Teuchos_Comm.hpp"
62 #include "Teuchos_DefaultComm.hpp"
63 #include "Teuchos_CommHelpers.hpp"
64 #include "Teuchos_Tuple.hpp"
65 
66 // Kokkos includes
67 #include "Kokkos_Core.hpp"
68 
69 #ifdef HAVE_EPETRA
70 #include "Epetra_Map.h"
71 #endif
72 
73 #ifdef HAVE_TPETRA
74 #include "Tpetra_Map.hpp"
75 #include "Tpetra_Util.hpp"
76 #endif
77 
78 namespace Domi
79 {
80 
143 template< class Node = DefaultNode::DefaultNodeType >
144 class MDMap
145 {
146 public:
147 
150 
184  MDMap(const Teuchos::RCP< const MDComm > mdComm,
185  const Teuchos::ArrayView< const dim_type > & dimensions,
186  const Teuchos::ArrayView< const int > & commPad =
187  Teuchos::ArrayView< const int >(),
188  const Teuchos::ArrayView< const int > & bndryPad =
189  Teuchos::ArrayView< const int >(),
190  const Teuchos::ArrayView< const int > & replicatedBoundary =
191  Teuchos::ArrayView< const int >(),
192  const Layout layout = DEFAULT_ORDER);
193 
205  MDMap(Teuchos::ParameterList & plist);
206 
219  MDMap(const Teuchos::RCP< const Teuchos::Comm< int > > teuchosComm,
220  Teuchos::ParameterList & plist);
221 
234  MDMap(const Teuchos::RCP< const MDComm > mdComm,
235  Teuchos::ParameterList & plist);
236 
260  MDMap(const Teuchos::RCP< const MDComm > mdComm,
261  const Teuchos::ArrayView< Slice > & myGlobalBounds,
262  const Teuchos::ArrayView< padding_type > & padding =
263  Teuchos::ArrayView< padding_type >(),
264  const Teuchos::ArrayView< const int > & replicatedBoundary =
265  Teuchos::ArrayView< const int >(),
266  const Layout layout = DEFAULT_ORDER);
267 
272  MDMap(const MDMap< Node > & source);
273 
287  MDMap(const MDMap< Node > & parent,
288  int axis,
289  dim_type index);
290 
312  MDMap(const MDMap< Node > & parent,
313  int axis,
314  const Slice & slice,
315  int bndryPad = 0);
316 
330  MDMap(const MDMap< Node > & parent,
331  const Teuchos::ArrayView< Slice > & slices,
332  const Teuchos::ArrayView< int > & bndryPad =
333  Teuchos::ArrayView< int >());
334 
337  ~MDMap();
338 
340 
343 
348  MDMap< Node > & operator=(const MDMap< Node > & source);
349 
351 
354 
360  inline Teuchos::RCP< const MDComm > getMDComm() const;
361 
368  inline bool onSubcommunicator() const;
369 
376  inline Teuchos::RCP< const Teuchos::Comm< int > > getTeuchosComm() const;
377 
384  inline int numDims() const;
385 
392  inline Teuchos::Array< int > getCommDims() const;
393 
403  inline int getCommDim(int axis) const;
404 
414  inline bool isPeriodic(int axis) const;
415 
425  inline int getCommIndex(int axis) const;
426 
443  inline int getLowerNeighbor(int axis) const;
444 
462  inline int getUpperNeighbor(int axis) const;
463 
465 
468 
472  Teuchos::Array< dim_type > getGlobalDims() const;
473 
482  dim_type getGlobalDim(int axis,
483  bool withBndryPad=false) const;
484 
493  Slice getGlobalBounds(int axis,
494  bool withBndryPad=false) const;
495 
509  Slice getGlobalRankBounds(int axis,
510  bool withBndryPad=false) const;
511 
514  Teuchos::Array< dim_type > getLocalDims() const;
515 
524  dim_type getLocalDim(int axis,
525  bool withPad=false) const;
526 
534  Teuchos::ArrayView< const Slice > getLocalBounds() const;
535 
549  Slice getLocalBounds(int axis,
550  bool withPad=false) const;
551 
569  Slice getLocalInteriorBounds(int axis) const;
570 
572 
575 
586  bool hasPadding() const;
587 
596  int getLowerPadSize(int axis) const;
597 
606  int getUpperPadSize(int axis) const;
607 
617  int getCommPadSize(int axis) const;
618 
625  int getLowerBndryPad(int axis) const;
626 
633  int getUpperBndryPad(int axis) const;
634 
641  Teuchos::Array< int > getBndryPadSizes() const;
642 
652  int getBndryPadSize(int axis) const;
653 
654  /* \brief Return whether given local index is in the padding
655  *
656  * \param index [in] An array of indexes ((i) for 1D, (i,j) for 2D,
657  * (i,j,k) for 3D, etc)
658  */
659  bool isPad(const Teuchos::ArrayView< dim_type > & index) const;
660 
661  /* \brief Return whether given local index is in the communication
662  * padding
663  *
664  * \param index [in] An array of indexes ((i) for 1D, (i,j) for 2D,
665  * (i,j,k) for 3D, etc)
666  */
667  bool isCommPad(const Teuchos::ArrayView< dim_type > & index) const;
668 
669  /* \brief Return whether given local index is in the boundary
670  * padding
671  *
672  * \param index [in] An array of indexes ((i) for 1D, (i,j) for 2D,
673  * (i,j,k) for 3D, etc)
674  */
675  bool isBndryPad(const Teuchos::ArrayView< dim_type > & index) const;
676 
679  bool isReplicatedBoundary(int axis) const;
680 
683  Layout getLayout() const;
684 
687  Teuchos::RCP< Node > getNode() const;
688 
690 
693 
698  Teuchos::ArrayView< Teuchos::RCP< const Domi::MDMap< Node > > >
699  getAxisMaps() const;
700 
705  Teuchos::RCP< const Domi::MDMap< Node > > getAxisMap(int axis) const;
706 
723  Teuchos::RCP< const MDMap< Node > >
724  getAugmentedMDMap(const dim_type leadingDim,
725  const dim_type trailingDim=0) const;
726 
727 #ifdef HAVE_EPETRA
728 
737  Teuchos::RCP< const Epetra_Map >
738  getEpetraMap(bool withCommPad=true) const;
739 
750  Teuchos::RCP< const Epetra_Map >
751  getEpetraAxisMap(int axis,
752  bool withCommPad=true) const;
753 
754 #endif
755 
756 #ifdef HAVE_TPETRA
757 
767  template< class LocalOrdinal,
768  class GlobalOrdinal = LocalOrdinal,
769  class Node2 = Node >
770  Teuchos::RCP< const Tpetra::Map< LocalOrdinal, GlobalOrdinal, Node2 > >
771  getTpetraMap(bool withCommPad=true) const;
772 
785  template< class LocalOrdinal,
786  class GlobalOrdinal = LocalOrdinal,
787  class Node2 = Node >
788  Teuchos::RCP< const Tpetra::Map< LocalOrdinal, GlobalOrdinal, Node2 > >
789  getTpetraAxisMap(int axis,
790  bool withCommPad=true) const;
791 
792 #endif
793 
795 
798 
803  Teuchos::Array< dim_type >
804  getGlobalIndex(size_type globalID) const;
805 
810  Teuchos::Array< dim_type >
811  getLocalIndex(size_type localID) const;
812 
817  size_type getGlobalID(size_type localID) const;
818 
823  size_type
824  getGlobalID(const Teuchos::ArrayView< dim_type > & globalIndex) const;
825 
833  size_type getLocalID(size_type globalID) const;
834 
839  size_type
840  getLocalID(const Teuchos::ArrayView< dim_type > & localIndex) const;
841 
843 
846 
861  bool isCompatible(const MDMap< Node > & mdMap) const;
862 
884  bool isSameAs(const MDMap< Node > & mdMap,
885  const int verbose = 0) const;
886 
896  bool isContiguous() const;
897 
899 
900 private:
901 
902  // A private method for computing the bounds and local dimensions,
903  // after the global dimensions, communication and boundary padding
904  // have been properly assigned.
905  void computeBounds();
906 
907  // The underlying multi-dimensional communicator.
908  Teuchos::RCP< const MDComm > _mdComm;
909 
910  // The size of the global dimensions along each axis. This includes
911  // the values of the boundary padding.
912  Teuchos::Array< dim_type > _globalDims;
913 
914  // Store the start and stop indexes of the global problem along each
915  // axis. This includes the values of the boundary padding.
916  Teuchos::Array< Slice > _globalBounds;
917 
918  // A double array of Slices that stores the starting and stopping
919  // indexes for each axis processor along each axis. This data
920  // structure allows any processor to know the map bounds on any
921  // other processor. These bounds do NOT include the boundary or
922  // communication padding. The outer array will have a size equal to
923  // the number of dimensions. Each inner array will have a size
924  // equal to the number of axis processors along that axis.
925  Teuchos::Array< Teuchos::Array< Slice > > _globalRankBounds;
926 
927  // The global stride between adjacent IDs. This quantity is
928  // "virtual" as it does not describe actual memory layout, but it is
929  // useful for index conversion algorithms.
930  Teuchos::Array< size_type > _globalStrides;
931 
932  // The minumum global ID of the global data structure, including
933  // boundary padding. This will only be non-zero on a sub-map.
934  size_type _globalMin;
935 
936  // The maximum global ID of the global data structure, including
937  // boundary padding.
938  size_type _globalMax;
939 
940  // The size of the local dimensions along each axis. This includes
941  // the values of the padding.
942  Teuchos::Array< dim_type > _localDims;
943 
944  // The local loop bounds along each axis, stored as an array of
945  // Slices. These bounds DO include the padding. By definition, the
946  // start() attribute of these Slices will all be zero by definition.
947  // This may seem wasteful (we could store an array of dim_type
948  // rather than an array of Slice), but this storage is convenient
949  // when it comes to the getLocalBounds() method.
950  Teuchos::Array< Slice > _localBounds;
951 
952  // The local stride between adjacent elements in memory.
953  Teuchos::Array< size_type > _localStrides;
954 
955  // The minimum local ID of the local data structure, including
956  // padding.
957  size_type _localMin;
958 
959  // The maximum local ID of the local data structure, including
960  // padding.
961  size_type _localMax;
962 
963  // The communication padding that was specified at construction, one
964  // value along each axis.
965  Teuchos::Array< int > _commPadSizes;
966 
967  // The actual padding stored on this processor along each axis. The
968  // padding can be either communication padding or boundary padding
969  // based on the processor position on the boundary of a domain.
970  Teuchos::Array< padding_type > _pad;
971 
972  // The size of the boundary padding along each axis.
973  Teuchos::Array< int > _bndryPadSizes;
974 
975  // The actual boundary padding stored along each axis. For a full
976  // communicator, the lower and upper boundary padding are both the
977  // same as the corresponding value in _bndryPadSizes. However, the
978  // introduction of sub-maps creates the possibility of different
979  // upper and lower boundary padding values. If an axis is periodic,
980  // then these values will be set to the communication padding.
981  Teuchos::Array< padding_type > _bndryPad;
982 
983  // An array of flags, one for each axis, that specifies whether the
984  // endpoints of a periodic boundary are replicated (1 or true) or
985  // unique (0 or false). These flags only have meaning for axes that
986  // are periodic. The arrray type is int, beacause Array< bool > is
987  // specialized and sometimes difficult to work with.
988  Teuchos::Array< int > _replicatedBoundary;
989 
990  // The storage order
991  Layout _layout;
992 
993  // An array of axis maps that correspond to the full MDMap. An axis
994  // map describes the map along a single axis. This member is mutable
995  // because it is logically const but does not get constructed until
996  // requested by the user.
997  mutable Teuchos::Array< Teuchos::RCP< const MDMap< Node > > > _axisMaps;
998 
999  // Kokkos node
1000  Teuchos::RCP< Node > _node;
1001 
1002 #ifdef HAVE_EPETRA
1003  // An RCP pointing to an Epetra_Map that is equivalent to this
1004  // MDMap, including communication padding. It is mutable because we
1005  // do not construct it until it asked for by a get method that is
1006  // const.
1007  mutable Teuchos::RCP< const Epetra_Map > _epetraMap;
1008 
1009  // An RCP pointing to an Epetra_Map that is equivalent to this
1010  // MDMap, excluding communication padding. The returned Epetra_Map
1011  // thus indicates what IDs are owned by this processor. It is
1012  // mutable because we do not construct it until it asked for by a
1013  // get method that is const.
1014  mutable Teuchos::RCP< const Epetra_Map > _epetraOwnMap;
1015 
1016  // An ArrayRCP that stores Epetra_Maps that represent the
1017  // distribution of indexes along each axis, including communication
1018  // padding. It is mutable because we do not construct it until it
1019  // asked for by a get method that is const.
1020  mutable Teuchos::Array< Teuchos::RCP< const Epetra_Map > > _epetraAxisMaps;
1021 
1022  // An ArrayRCP that stores Epetra_Maps that represent the
1023  // distribution of indexes along each axis, excluding communication
1024  // padding. It is mutable because we do not construct it until it
1025  // asked for by a get method that is const.
1026  mutable Teuchos::Array< Teuchos::RCP< const Epetra_Map > > _epetraAxisOwnMaps;
1027 #endif
1028 
1029 };
1030 
1032 // Implementations
1034 
1035 template< class Node >
1037 MDMap(const Teuchos::RCP< const MDComm > mdComm,
1038  const Teuchos::ArrayView< const dim_type > & dimensions,
1039  const Teuchos::ArrayView< const int > & commPad,
1040  const Teuchos::ArrayView< const int > & bndryPad,
1041  const Teuchos::ArrayView< const int > & replicatedBoundary,
1042  const Layout layout) :
1043  _mdComm(mdComm),
1044  _globalDims(mdComm->numDims()),
1045  _globalBounds(),
1046  _globalRankBounds(mdComm->numDims()),
1047  _globalStrides(),
1048  _globalMin(0),
1049  _globalMax(),
1050  _localDims(mdComm->numDims(), 0),
1051  _localBounds(),
1052  _localStrides(),
1053  _localMin(0),
1054  _localMax(),
1055  _commPadSizes(mdComm->numDims(), 0),
1056  _pad(),
1057  _bndryPadSizes(mdComm->numDims(), 0),
1058  _bndryPad(),
1059  _replicatedBoundary(createArrayOfInts(mdComm->numDims(),
1060  replicatedBoundary)),
1061  _layout(layout)
1062 {
1063  // Temporarily store the number of dimensions
1064  int numDims = mdComm->numDims();
1065 
1066  // Check the global dimensions
1067  TEUCHOS_TEST_FOR_EXCEPTION(
1068  numDims != dimensions.size(),
1070  "Size of dimensions does not match MDComm number of dimensions");
1071 
1072  // Copy the communication and boundary padding sizes, compute the
1073  // global dimensions and bounds, and the actual padding
1074  for (int axis = 0; axis < numDims; ++axis)
1075  {
1076  if (axis < commPad.size() ) _commPadSizes[ axis] = commPad[ axis];
1077  if (axis < bndryPad.size()) _bndryPadSizes[axis] = bndryPad[axis];
1078  if (_mdComm->isPeriodic(axis))
1079  _bndryPad.push_back(Teuchos::tuple(_commPadSizes[axis],
1080  _commPadSizes[axis]));
1081  else
1082  _bndryPad.push_back(Teuchos::tuple(_bndryPadSizes[axis],
1083  _bndryPadSizes[axis]));
1084  _globalDims[axis] = dimensions[axis] + _bndryPad[axis][0] +
1085  _bndryPad[axis][1];
1086  _globalBounds.push_back(ConcreteSlice(_globalDims[axis]));
1087  int lower, upper;
1088  if (getLowerNeighbor(axis) == -1)
1089  lower = _bndryPadSizes[axis];
1090  else
1091  lower = _commPadSizes[axis];
1092  if (getUpperNeighbor(axis) == -1)
1093  upper = _bndryPadSizes[axis];
1094  else
1095  upper = _commPadSizes[axis];
1096  _pad.push_back(Teuchos::tuple(lower, upper));
1097  }
1098 
1099  // Compute the global size
1100  _globalMax = computeSize(_globalDims());
1101 
1102  // Compute _globalRankBounds, _localBounds, and _localDims. Then
1103  // compute the local size
1104  computeBounds();
1105  _localMax = computeSize(_localDims());
1106 
1107  // Compute the global and local strides
1108  _globalStrides = computeStrides< size_type, dim_type >(_globalDims, _layout);
1109  _localStrides = computeStrides< size_type, dim_type >(_localDims , _layout);
1110 }
1111 
1113 
1114 template< class Node >
1116 MDMap(Teuchos::ParameterList & plist) :
1117  _mdComm(Teuchos::rcp(new MDComm(plist))),
1118  _globalDims(),
1119  _globalBounds(),
1120  _globalRankBounds(),
1121  _globalStrides(),
1122  _globalMin(0),
1123  _globalMax(),
1124  _localDims(),
1125  _localBounds(),
1126  _localStrides(),
1127  _localMin(0),
1128  _localMax(0),
1129  _commPadSizes(),
1130  _pad(),
1131  _bndryPadSizes(),
1132  _bndryPad(),
1133  _replicatedBoundary(),
1134  _layout()
1135 {
1136  // Note that the call to the MDComm constructor in the constructor
1137  // initialization list will validate the ParameterList, so we don't
1138  // have to do it again here.
1139 
1140  // Temporarily store the number of dimensions
1141  int numDims = _mdComm->numDims();
1142 
1143  // Check the global dimensions
1144  Teuchos::Array< dim_type > dimensions =
1145  plist.get("dimensions", Teuchos::Array< dim_type >());
1146  TEUCHOS_TEST_FOR_EXCEPTION(
1147  numDims != dimensions.size(),
1149  "Size of dimensions does not match MDComm number of dimensions");
1150 
1151  // Initialize _bndryPadSizes, _commPadSizes and _globalDims from the
1152  // ParameterList
1153  int commPad = plist.get("communication pad size", int(0));
1154  int bndryPad = plist.get("boundary pad size" , int(0));
1155  Teuchos::Array< int > commPads =
1156  plist.get("communication pad sizes", Teuchos::Array< int >());
1157  Teuchos::Array< int > bndryPads =
1158  plist.get("boundary pad sizes" , Teuchos::Array< int >());
1159  _commPadSizes.resize( numDims);
1160  _bndryPadSizes.resize(numDims);
1161  _globalDims.resize( numDims);
1162 
1163  // Copy the communication and boundary padding sizes, compute the
1164  // global dimensions and bounds, and the actual padding
1165  for (int axis = 0; axis < numDims; ++axis)
1166  {
1167  if (axis < commPads.size() ) _commPadSizes[ axis] = commPads[ axis];
1168  else _commPadSizes[ axis] = commPad;
1169  if (axis < bndryPads.size()) _bndryPadSizes[axis] = bndryPads[axis];
1170  else _bndryPadSizes[axis] = bndryPad;
1171  if (_mdComm->isPeriodic(axis))
1172  _bndryPad.push_back(Teuchos::tuple(_commPadSizes[axis],
1173  _commPadSizes[axis]));
1174  else
1175  _bndryPad.push_back(Teuchos::tuple(_bndryPadSizes[axis],
1176  _bndryPadSizes[axis]));
1177  _globalDims[axis] = dimensions[axis] + _bndryPad[axis][0] +
1178  _bndryPad[axis][1];
1179  _globalBounds.push_back(ConcreteSlice(_globalDims[axis]));
1180  int lower, upper;
1181  if (getLowerNeighbor(axis) == -1)
1182  lower = _bndryPadSizes[axis];
1183  else
1184  lower = _commPadSizes[axis];
1185  if (getUpperNeighbor(axis) == -1)
1186  upper = _bndryPadSizes[axis];
1187  else
1188  upper = _commPadSizes[axis];
1189  _pad.push_back(Teuchos::tuple(lower, upper));
1190  }
1191 
1192  // Compute the global size
1193  _globalMax = computeSize(_globalDims());
1194 
1195  // Compute _globalRankBounds, _localBounds, and _localDims.
1196  // Then compute the local size
1197  _globalRankBounds.resize(numDims);
1198  _localDims.resize(numDims);
1199  computeBounds();
1200  _localMax = computeSize(_localDims());
1201 
1202  // Set the replicated boundary flags along each axis
1203  Teuchos::Array< int > repBndry = plist.get("replicated boundary",
1204  Teuchos::Array< int >());
1205  _replicatedBoundary = createArrayOfInts(numDims, repBndry);
1206 
1207  // Set the layout
1208  std::string layout = plist.get("layout", "DEFAULT");
1209  std::transform(layout.begin(), layout.end(), layout.begin(), ::toupper);
1210  if (layout == "C ORDER")
1211  _layout = C_ORDER;
1212  else if (layout == "FORTRAN ORDER")
1213  _layout = FORTRAN_ORDER;
1214  else if (layout == "ROW MAJOR")
1215  _layout = ROW_MAJOR;
1216  else if (layout == "COLUMN MAJOR")
1217  _layout = COLUMN_MAJOR;
1218  else if (layout == "LAST INDEX FASTEST")
1219  _layout = LAST_INDEX_FASTEST;
1220  else if (layout == "FIRST INDEX FASTEST")
1221  _layout = FIRST_INDEX_FASTEST;
1222  else
1223  _layout = DEFAULT_ORDER;
1224 
1225  // Compute the strides
1226  _globalStrides = computeStrides< size_type, dim_type >(_globalDims, _layout);
1227  _localStrides = computeStrides< size_type, dim_type >(_localDims , _layout);
1228 }
1229 
1231 
1232 template< class Node >
1234 MDMap(Teuchos::RCP< const Teuchos::Comm< int > > teuchosComm,
1235  Teuchos::ParameterList & plist) :
1236  _mdComm(Teuchos::rcp(new MDComm(teuchosComm, plist))),
1237  _globalDims(),
1238  _globalBounds(),
1239  _globalRankBounds(),
1240  _globalStrides(),
1241  _globalMin(0),
1242  _globalMax(),
1243  _localDims(),
1244  _localBounds(),
1245  _localStrides(),
1246  _localMin(0),
1247  _localMax(0),
1248  _commPadSizes(),
1249  _pad(),
1250  _bndryPadSizes(),
1251  _bndryPad(),
1252  _replicatedBoundary(),
1253  _layout()
1254 {
1255  // Note that the call to the MDComm constructor in the constructor
1256  // initialization list will validate the ParameterList, so we don't
1257  // have to do it again here.
1258 
1259  // Temporarily store the number of dimensions
1260  int numDims = _mdComm->numDims();
1261 
1262  // Check the global dimensions
1263  Teuchos::Array< dim_type > dimensions =
1264  plist.get("dimensions", Teuchos::Array< dim_type >());
1265  TEUCHOS_TEST_FOR_EXCEPTION(
1266  numDims != dimensions.size(),
1268  "Size of dimensions does not match MDComm number of dimensions");
1269 
1270  // Initialize _bndryPadSizes, _commPadSizes and _globalDims from the
1271  // ParameterList
1272  int commPad = plist.get("communication pad size", int(0));
1273  int bndryPad = plist.get("boundary pad size" , int(0));
1274  Teuchos::Array< int > commPads =
1275  plist.get("communication pad sizes", Teuchos::Array< int >());
1276  Teuchos::Array< int > bndryPads =
1277  plist.get("boundary pad sizes" , Teuchos::Array< int >());
1278  _commPadSizes.resize( numDims);
1279  _bndryPadSizes.resize(numDims);
1280  _globalDims.resize( numDims);
1281 
1282  // Copy the communication and boundary padding sizes, compute the
1283  // global dimensions and bounds, and the actual padding
1284  for (int axis = 0; axis < numDims; ++axis)
1285  {
1286  if (axis < commPads.size() ) _commPadSizes[ axis] = commPads[ axis];
1287  else _commPadSizes[ axis] = commPad;
1288  if (axis < bndryPads.size()) _bndryPadSizes[axis] = bndryPads[axis];
1289  else _bndryPadSizes[axis] = bndryPad;
1290  if (_mdComm->isPeriodic(axis))
1291  _bndryPad.push_back(Teuchos::tuple(_commPadSizes[axis],
1292  _commPadSizes[axis]));
1293  else
1294  _bndryPad.push_back(Teuchos::tuple(_bndryPadSizes[axis],
1295  _bndryPadSizes[axis]));
1296  _globalDims[axis] = dimensions[axis] + _bndryPad[axis][0] +
1297  _bndryPad[axis][1];
1298  _globalBounds.push_back(ConcreteSlice(_globalDims[axis]));
1299  int lower, upper;
1300  if (getLowerNeighbor(axis) == -1)
1301  lower = _bndryPadSizes[axis];
1302  else
1303  lower = _commPadSizes[axis];
1304  if (getUpperNeighbor(axis) == -1)
1305  upper = _bndryPadSizes[axis];
1306  else
1307  upper = _commPadSizes[axis];
1308  _pad.push_back(Teuchos::tuple(lower, upper));
1309  }
1310  // std::cout << "MDMap constructor: _commPadSizes = " << _commPadSizes
1311  // << ", bndryPadSizes = " << _bndryPadSizes << ", _bndryPad = "
1312  // << _bndryPad << ", _pad = " << _pad << ", _globalDims = "
1313  // << _globalDims << ", _globalBounds = " << _globalBounds
1314  // << std::endl;
1315 
1316  // Compute the global size
1317  _globalMax = computeSize(_globalDims());
1318 
1319  // Compute _globalRankBounds, _localBounds, and _localDims.
1320  // Then compute the local size
1321  _globalRankBounds.resize(numDims);
1322  _localDims.resize(numDims);
1323  computeBounds();
1324  _localMax = computeSize(_localDims());
1325 
1326  // Set the replicated boundary flags along each axis
1327  Teuchos::Array< int > repBndry = plist.get("replicated boundary",
1328  Teuchos::Array< int >());
1329  _replicatedBoundary = createArrayOfInts(numDims, repBndry);
1330 
1331  // Set the layout
1332  std::string layout = plist.get("layout", "DEFAULT");
1333  std::transform(layout.begin(), layout.end(), layout.begin(), ::toupper);
1334  if (layout == "C ORDER")
1335  _layout = C_ORDER;
1336  else if (layout == "FORTRAN ORDER")
1337  _layout = FORTRAN_ORDER;
1338  else if (layout == "ROW MAJOR")
1339  _layout = ROW_MAJOR;
1340  else if (layout == "COLUMN MAJOR")
1341  _layout = COLUMN_MAJOR;
1342  else if (layout == "LAST INDEX FASTEST")
1343  _layout = LAST_INDEX_FASTEST;
1344  else if (layout == "FIRST INDEX FASTEST")
1345  _layout = FIRST_INDEX_FASTEST;
1346  else
1347  _layout = DEFAULT_ORDER;
1348 
1349  // Compute the strides
1350  _globalStrides = computeStrides< size_type, dim_type >(_globalDims, _layout);
1351  _localStrides = computeStrides< size_type, dim_type >(_localDims , _layout);
1352 }
1353 
1355 
1356 template< class Node >
1358 MDMap(Teuchos::RCP< const MDComm > mdComm,
1359  Teuchos::ParameterList & plist) :
1360  _mdComm(mdComm),
1361  _globalDims(mdComm->numDims()),
1362  _globalBounds(),
1363  _globalRankBounds(mdComm->numDims()),
1364  _globalStrides(),
1365  _globalMin(0),
1366  _globalMax(),
1367  _localDims(mdComm->numDims(), 0),
1368  _localBounds(),
1369  _localStrides(),
1370  _localMin(0),
1371  _localMax(),
1372  _commPadSizes(mdComm->numDims(), 0),
1373  _pad(),
1374  _bndryPadSizes(mdComm->numDims(), 0),
1375  _bndryPad(),
1376  _replicatedBoundary(),
1377  _layout()
1378 {
1379  // Validate the ParameterList
1380  plist.validateParameters(*getValidParameters());
1381 
1382  // Temporarily store the number of dimensions
1383  int numDims = _mdComm->numDims();
1384 
1385  // Check the global dimensions
1386  Teuchos::Array< dim_type > dimensions =
1387  plist.get("dimensions", Teuchos::Array< dim_type >());
1388  TEUCHOS_TEST_FOR_EXCEPTION(
1389  numDims != dimensions.size(),
1391  "Number of dimensions does not match MDComm number of dimensions");
1392 
1393  // Initialize _bndryPadSizes, _commPadSizes and _globalDims from the
1394  // ParameterList
1395  int commPad = plist.get("communication pad size", int(0));
1396  int bndryPad = plist.get("boundary pad size" , int(0));
1397  Teuchos::Array< int > commPads =
1398  plist.get("communication pad sizes", Teuchos::Array< int >());
1399  Teuchos::Array< int > bndryPads =
1400  plist.get("boundary pad sizes" , Teuchos::Array< int >());
1401  _commPadSizes.resize( numDims);
1402  _bndryPadSizes.resize(numDims);
1403  _globalDims.resize( numDims);
1404 
1405  // Copy the communication and boundary padding sizes, compute the
1406  // global dimensions and bounds, and the actual padding
1407  for (int axis = 0; axis < numDims; ++axis)
1408  {
1409  if (axis < commPads.size() ) _commPadSizes[ axis] = commPads[ axis];
1410  else _commPadSizes[ axis] = commPad;
1411  if (axis < bndryPads.size()) _bndryPadSizes[axis] = bndryPads[axis];
1412  else _bndryPadSizes[axis] = bndryPad;
1413  if (_mdComm->isPeriodic(axis))
1414  _bndryPad.push_back(Teuchos::tuple(_commPadSizes[axis],
1415  _commPadSizes[axis]));
1416  else
1417  _bndryPad.push_back(Teuchos::tuple(_bndryPadSizes[axis],
1418  _bndryPadSizes[axis]));
1419  _globalDims[axis] = dimensions[axis] + _bndryPad[axis][0] +
1420  _bndryPad[axis][1];
1421  _globalBounds.push_back(ConcreteSlice(_globalDims[axis]));
1422  int lower, upper;
1423  if (getLowerNeighbor(axis) == -1)
1424  lower = _bndryPadSizes[axis];
1425  else
1426  lower = _commPadSizes[axis];
1427  if (getUpperNeighbor(axis) == -1)
1428  upper = _bndryPadSizes[axis];
1429  else
1430  upper = _commPadSizes[axis];
1431  _pad.push_back(Teuchos::tuple(lower, upper));
1432  }
1433 
1434  // Compute the global size
1435  _globalMax = computeSize(_globalDims());
1436 
1437  // Compute _globalRankBounds, _localBounds, and _localDims.
1438  // Then compute the local size
1439  _globalRankBounds.resize(numDims);
1440  _localDims.resize(numDims);
1441  computeBounds();
1442  _localMax = computeSize(_localDims());
1443 
1444  // Set the replicated boundary flags along each axis
1445  Teuchos::Array< int > repBndry = plist.get("replicated boundary",
1446  Teuchos::Array< int >());
1447  _replicatedBoundary = createArrayOfInts(numDims, repBndry);
1448 
1449  // Set the layout
1450  std::string layout = plist.get("layout", "DEFAULT");
1451  std::transform(layout.begin(), layout.end(), layout.begin(), ::toupper);
1452  if (layout == "C ORDER")
1453  _layout = C_ORDER;
1454  else if (layout == "FORTRAN ORDER")
1455  _layout = FORTRAN_ORDER;
1456  else if (layout == "ROW MAJOR")
1457  _layout = ROW_MAJOR;
1458  else if (layout == "COLUMN MAJOR")
1459  _layout = COLUMN_MAJOR;
1460  else if (layout == "LAST INDEX FASTEST")
1461  _layout = LAST_INDEX_FASTEST;
1462  else if (layout == "FIRST INDEX FASTEST")
1463  _layout = FIRST_INDEX_FASTEST;
1464  else
1465  _layout = DEFAULT_ORDER;
1466 
1467  // Compute the strides
1468  _globalStrides = computeStrides< size_type, dim_type >(_globalDims, _layout);
1469  _localStrides = computeStrides< size_type, dim_type >(_localDims , _layout);
1470 }
1471 
1473 
1474 template< class Node >
1476 MDMap(const Teuchos::RCP< const MDComm > mdComm,
1477  const Teuchos::ArrayView< Slice > & myGlobalBounds,
1478  const Teuchos::ArrayView< padding_type > & padding,
1479  const Teuchos::ArrayView< const int > & replicatedBoundary,
1480  const Layout layout) :
1481  _mdComm(mdComm),
1482  _globalDims(mdComm->numDims()),
1483  _globalBounds(mdComm->numDims()),
1484  _globalRankBounds(mdComm->numDims()),
1485  _globalStrides(mdComm->numDims()),
1486  _globalMin(0),
1487  _globalMax(0),
1488  _localDims(mdComm->numDims(), 0),
1489  _localBounds(mdComm->numDims()),
1490  _localStrides(mdComm->numDims()),
1491  _localMin(0),
1492  _localMax(0),
1493  _commPadSizes(mdComm->numDims(), 0),
1494  _pad(mdComm->numDims(), Teuchos::tuple(0,0)),
1495  _bndryPadSizes(mdComm->numDims(), 0),
1496  _bndryPad(mdComm->numDims()),
1497  _replicatedBoundary(createArrayOfInts(mdComm->numDims(),
1498  replicatedBoundary)),
1499  _layout(layout)
1500 {
1501  // Check that myGlobalBounds is the correct size
1502  int numDims = _mdComm->numDims();
1503  TEUCHOS_TEST_FOR_EXCEPTION(
1504  myGlobalBounds.size() < numDims,
1506  "MDMap: myGlobalBounds too small");
1507 
1508  // Copy the padding to _pad
1509  int maxAxis = std::min(numDims, (int)padding.size());
1510  for (int axis = 0; axis < maxAxis; ++axis)
1511  _pad[axis] = padding[axis];
1512 
1513  // All of the required info for the MDMap is contained in the
1514  // myGlobalBounds and padding arguments, but it is distributed. We
1515  // will do a gather so that each processor has the global bounds
1516  // data from each other processor.
1517 
1518  // Resize _globalRankBounds
1519  for (int axis = 0; axis < numDims; ++axis)
1520  _globalRankBounds[axis].resize(_mdComm->getCommDim(axis));
1521 
1522  // Allocate and initialize the communication buffers
1523  int numProc = _mdComm->getTeuchosComm()->getSize();
1524  MDArray< dim_type > sendBuffer(Teuchos::tuple(5,numDims),
1525  FIRST_INDEX_FASTEST);
1526  MDArray< dim_type > recvBuffer(Teuchos::tuple(5,numDims,numProc),
1527  FIRST_INDEX_FASTEST);
1528  for (int axis = 0; axis < numDims; ++axis)
1529  {
1530  sendBuffer(0,axis) = mdComm->getCommIndex(axis);
1531  sendBuffer(1,axis) = myGlobalBounds[axis].start();
1532  sendBuffer(2,axis) = myGlobalBounds[axis].stop();
1533  sendBuffer(3,axis) = _pad[axis][0];
1534  sendBuffer(4,axis) = _pad[axis][1];
1535  }
1536 
1537  // Perform the gather all
1538  Teuchos::gatherAll(*(_mdComm->getTeuchosComm()),
1539  (int)sendBuffer.size(),
1540  sendBuffer.getRawPtr(),
1541  (int)recvBuffer.size(),
1542  recvBuffer.getRawPtr());
1543 
1544  // Extract _globalRankBounds and _bndryPad. Because of the
1545  // structure, there will be duplicate Slices and padding, and we
1546  // will check to make sure they are the expected equivalent values.
1547  for (int axis = 0; axis < numDims; ++axis)
1548  {
1549  for (int commIndex = 0; commIndex < _mdComm->getCommDim(axis); ++commIndex)
1550  {
1551  Slice bounds;
1552  padding_type pad;
1553  for (int rank = 0; rank < numProc; ++rank)
1554  {
1555  if (recvBuffer(0,axis,rank) == commIndex)
1556  {
1557  dim_type start = recvBuffer(1,axis,rank);
1558  dim_type stop = recvBuffer(2,axis,rank);
1559  int loPad = recvBuffer(3,axis,rank);
1560  int hiPad = recvBuffer(4,axis,rank);
1561  if (bounds.stop() == Slice::Default)
1562  {
1563  bounds = Slice(start, stop);
1564  pad[0] = loPad;
1565  pad[1] = hiPad;
1566  }
1567  else
1568  {
1569  TEUCHOS_TEST_FOR_EXCEPTION(
1570  (bounds.start() != start) || (bounds.stop() != stop),
1571  BoundsError,
1572  "Global rank bounds mismatch: bounds = " << bounds <<
1573  ", (start,stop) = (" << start << "," << stop << ")");
1574  TEUCHOS_TEST_FOR_EXCEPTION(
1575  (pad[0] != loPad) || (pad[1] != hiPad),
1576  BoundsError,
1577  "Padding value mismatch: pad = " << pad << ", (loPad,hiPad) = ("
1578  << loPad << "," << hiPad << ")");
1579  }
1580  }
1581  }
1582 
1583  // Extract the _bndryPad data
1584  if (commIndex == 0 ) _bndryPad[axis][0] = pad[0];
1585  if (commIndex == mdComm->getCommDim(axis)-1) _bndryPad[axis][1] = pad[1];
1586 
1587  // Extract the verified _globalRankBounds
1588  _globalRankBounds[axis][commIndex] = bounds;
1589  }
1590  }
1591 
1592  // Check the sanity of _globalRankBounds
1593  for (int axis = 0; axis < numDims; ++axis)
1594  {
1595  for (int commIndex = 1; commIndex < _mdComm->getCommDim(axis); ++commIndex)
1596  {
1597  TEUCHOS_TEST_FOR_EXCEPTION(
1598  _globalRankBounds[axis][commIndex-1].stop() !=
1599  _globalRankBounds[axis][commIndex ].start(),
1601  "Global rank bounds are not contiguous");
1602  }
1603  }
1604 
1605  // Set the global data
1606  for (int axis = 0; axis < numDims; ++axis)
1607  {
1608  int commSize = _mdComm->getCommDim(axis);
1609  dim_type start =
1610  _globalRankBounds[axis][0 ].start() - _pad[axis][0];
1611  dim_type stop =
1612  _globalRankBounds[axis][commSize-1].stop() + _pad[axis][1];
1613  _globalDims[axis] = stop - start;
1614  _globalBounds[axis] = Slice(start, stop);
1615  }
1616  _globalStrides = computeStrides< size_type, dim_type >(_globalDims, _layout);
1617 
1618  // Set the global min and max
1619  for (int axis = 0; axis < numDims; ++axis)
1620  {
1621  _globalMin += _globalBounds[axis].start() * _globalStrides[axis];
1622  _globalMax += _globalBounds[axis].stop() * _globalStrides[axis];
1623  }
1624 
1625  // Set the local data
1626  for (int axis = 0; axis < numDims; ++axis)
1627  {
1628  int commIndex = _mdComm->getCommIndex(axis);
1629  dim_type start =
1630  _globalRankBounds[axis][commIndex].start() - _pad[axis][0];
1631  dim_type stop =
1632  _globalRankBounds[axis][commIndex].stop() + _pad[axis][1];
1633  _localDims[axis] = stop - start;
1634  _localBounds[axis] = Slice(stop - start);
1635  }
1636  _localStrides = computeStrides< size_type, dim_type >(_localDims, _layout);
1637 
1638  // Compute the local max
1639  for (int axis = 0; axis < numDims; ++axis)
1640  _localMax += (_localDims[axis] - 1) * _localStrides[axis];
1641 }
1642 
1644 
1645 template< class Node >
1647 MDMap(const MDMap< Node > & source) :
1648  _mdComm(source._mdComm),
1649  _globalDims(source._globalDims),
1650  _globalBounds(source._globalBounds),
1651  _globalRankBounds(source._globalRankBounds),
1652  _globalStrides(source._globalStrides),
1653  _globalMin(source._globalMin),
1654  _globalMax(source._globalMax),
1655  _localDims(source._localDims),
1656  _localBounds(source._localBounds),
1657  _localStrides(source._localStrides),
1658  _localMin(source._localMin),
1659  _localMax(source._localMax),
1660  _commPadSizes(source._commPadSizes),
1661  _pad(source._pad),
1662  _bndryPadSizes(source._bndryPadSizes),
1663  _bndryPad(source._bndryPad),
1664  _replicatedBoundary(source._replicatedBoundary),
1665  _layout(source._layout)
1666 {
1667 }
1668 
1670 
1671 template< class Node >
1673 MDMap(const MDMap< Node > & parent,
1674  int axis,
1675  dim_type index) :
1676  _mdComm(parent._mdComm),
1677  _globalDims(),
1678  _globalBounds(),
1679  _globalRankBounds(),
1680  _globalStrides(),
1681  _globalMin(),
1682  _globalMax(),
1683  _localDims(),
1684  _localBounds(),
1685  _localStrides(),
1686  _localMin(),
1687  _localMax(),
1688  _commPadSizes(),
1689  _pad(),
1690  _bndryPadSizes(),
1691  _bndryPad(),
1692  _replicatedBoundary(),
1693  _layout(parent._layout)
1694 {
1695  if (parent.onSubcommunicator())
1696  {
1697  int numDims = parent.numDims();
1698  TEUCHOS_TEST_FOR_EXCEPTION(
1699  ((axis < 0) || (axis >= numDims)),
1700  RangeError,
1701  "axis = " << axis << " is invalid for communicator with " <<
1702  numDims << " dimensions");
1703 
1704  Slice globalBounds = parent.getGlobalBounds(axis,true);
1705  TEUCHOS_TEST_FOR_EXCEPTION(
1706  ((index < globalBounds.start()) || (index >= globalBounds.stop())),
1707  RangeError,
1708  "index = " << index << " is invalid for MDMap axis " <<
1709  axis << " with bounds " << globalBounds);
1710 
1711  // Determine the axis rank for the processor on which the index
1712  // lives, and construct the MDComm
1713  int thisAxisRank = -1;
1714  for (int axisRank = 0; axisRank < parent.getCommDim(axis); ++axisRank)
1715  if (index >= parent._globalRankBounds[axis][axisRank].start() &&
1716  index < parent._globalRankBounds[axis][axisRank].stop())
1717  thisAxisRank = axisRank;
1718  TEUCHOS_TEST_FOR_EXCEPTION(
1719  (thisAxisRank == -1),
1721  "error computing axis rank for sub-communicator");
1722  _mdComm = Teuchos::rcp(new MDComm(*(parent._mdComm), axis, thisAxisRank));
1723  }
1724 
1725  // There are now two ways for this processor to be off the
1726  // sub-communicator: (1) it came in that way, or (2) it is not on
1727  // the new sub-communicator. Either way, this will be reflected in
1728  // the new _mdComm, so we check it now.
1729  if (_mdComm->onSubcommunicator())
1730  {
1731  int numDims = parent.numDims();
1732  if (numDims == 1)
1733  {
1734  _globalDims.push_back(1);
1735  _globalBounds.push_back(ConcreteSlice(index,index+1));
1736  Teuchos::Array< Slice > bounds(1);
1737  bounds[0] = ConcreteSlice(index, index+1);
1738  _globalRankBounds.push_back(bounds);
1739  _globalStrides.push_back(1);
1740  _globalMin = index * parent._globalStrides[axis];
1741  _globalMax = _globalMin;
1742  _localDims.push_back(1);
1743  _localBounds.push_back(ConcreteSlice(0,1));
1744  _localStrides.push_back(1);
1745  _localMin = parent._localMin +
1746  (index - parent._globalRankBounds[axis][0].start()) *
1747  parent._localStrides[axis];
1748  _localMax = _localMin + 1;
1749  _commPadSizes.push_back(0);
1750  _pad.push_back(Teuchos::tuple(0,0));
1751  _bndryPadSizes.push_back(0);
1752  _bndryPad.push_back(Teuchos::tuple(0,0));
1753  _replicatedBoundary.push_back(0);
1754  }
1755  else
1756  {
1757  _globalMin = parent._globalMin;
1758  _globalMax = parent._globalMax;
1759  _localMin = parent._localMin;
1760  _localMax = parent._localMax;
1761  for (int myAxis = 0; myAxis < numDims; ++myAxis)
1762  {
1763  if (myAxis != axis)
1764  {
1765  _globalDims.push_back(parent._globalDims[myAxis]);
1766  _globalBounds.push_back(parent._globalBounds[myAxis]);
1767  _globalRankBounds.push_back(parent._globalRankBounds[myAxis]);
1768  _globalStrides.push_back(parent._globalStrides[myAxis]);
1769  _localDims.push_back(parent._localDims[myAxis]);
1770  _localBounds.push_back(parent._localBounds[myAxis]);
1771  _localStrides.push_back(parent._localStrides[myAxis]);
1772  _commPadSizes.push_back(parent._commPadSizes[myAxis]);
1773  _pad.push_back(parent._pad[myAxis]);
1774  _bndryPadSizes.push_back(parent._bndryPadSizes[myAxis]);
1775  _bndryPad.push_back(parent._bndryPad[myAxis]);
1776  _replicatedBoundary.push_back(parent._replicatedBoundary[myAxis]);
1777  }
1778  else
1779  {
1780  int axisRank = parent.getCommIndex(axis);
1781  _globalMin += index * parent._globalStrides[axis];
1782  _globalMax -= (parent._globalBounds[axis].stop() - index) *
1783  parent._globalStrides[axis];
1784  _localMin += (index-parent._globalRankBounds[axis][axisRank].start())
1785  * parent._localStrides[axis];
1786  _localMax -= (parent._globalRankBounds[axis][axisRank].stop()-index-1)
1787  * parent._localStrides[axis];
1788  }
1789  }
1790  }
1791  }
1792 }
1793 
1795 
1796 template< class Node >
1798 MDMap(const MDMap< Node > & parent,
1799  int axis,
1800  const Slice & slice,
1801  int bndryPad) :
1802  _mdComm(parent._mdComm),
1803  _globalDims(parent._globalDims),
1804  _globalBounds(parent._globalBounds),
1805  _globalRankBounds(parent._globalRankBounds),
1806  _globalStrides(parent._globalStrides),
1807  _globalMin(parent._globalMin),
1808  _globalMax(parent._globalMax),
1809  _localDims(parent._localDims),
1810  _localBounds(parent._localBounds),
1811  _localStrides(parent._localStrides),
1812  _localMin(parent._localMin),
1813  _localMax(parent._localMax),
1814  _commPadSizes(parent._commPadSizes),
1815  _pad(parent._pad),
1816  _bndryPadSizes(parent._bndryPadSizes),
1817  _bndryPad(parent._bndryPad),
1818  _replicatedBoundary(parent._replicatedBoundary),
1819  _layout(parent._layout)
1820 {
1821  if (parent.onSubcommunicator())
1822  {
1823  // Temporarily store the number of dimensions
1824  int numDims = parent.numDims();
1825 
1826  // Sanity check
1827  TEUCHOS_TEST_FOR_EXCEPTION(
1828  ((axis < 0) || (axis >= numDims)),
1829  RangeError,
1830  "axis = " << axis << " is invalid for MDMap with " <<
1831  numDims << " dimensions");
1832 
1833  // Convert the slice to concrete and check
1834  Slice bounds =
1835  slice.bounds(parent.getGlobalBounds(axis,true).stop());
1836  TEUCHOS_TEST_FOR_EXCEPTION(
1837  ((bounds.start() < parent.getGlobalBounds(axis).start()) ||
1838  (bounds.stop() > parent.getGlobalBounds(axis).stop())),
1839  RangeError,
1840  "Slice along axis " << axis << " is " << bounds << " but must be within "
1841  << parent.getGlobalBounds(axis));
1842  TEUCHOS_TEST_FOR_EXCEPTION(
1843  (bounds.stop() == bounds.start()),
1844  RangeError,
1845  "Slice along axis " << axis << " has length zero");
1846 
1847  // Copy the boundary padding sizes and set initial values for
1848  // _bndryPad
1849  _bndryPadSizes[axis] = bndryPad;
1850  _bndryPad[axis][0] = bndryPad;
1851  _bndryPad[axis][1] = bndryPad;
1852 
1853  // Adjust _globalRankBounds
1854  for (int axisRank = 0; axisRank < parent.getCommDim(axis);
1855  ++axisRank)
1856  {
1857  dim_type start = _globalRankBounds[axis][axisRank].start();
1858  dim_type stop = _globalRankBounds[axis][axisRank].stop();
1859  if (start < bounds.start()) start = bounds.start();
1860  if (stop > bounds.stop() ) stop = bounds.stop();
1861  _globalRankBounds[axis][axisRank] = ConcreteSlice(start, stop);
1862  }
1863 
1864  // Alter _bndryPad if necessary
1865  dim_type start = bounds.start() - _bndryPadSizes[axis];
1866  if (start < 0)
1867  {
1868  _bndryPad[axis][0] = bounds.start();
1869  start = 0;
1870  }
1871  dim_type stop = bounds.stop() + _bndryPadSizes[axis];
1872  if (stop > parent.getGlobalBounds(axis,true).stop())
1873  {
1874  _bndryPad[axis][1] = parent.getGlobalBounds(axis,true).stop() -
1875  bounds.stop();
1876  stop = parent.getGlobalBounds(axis,true).stop();
1877  }
1878 
1879  // Compute _globalBounds, _globalDims, _globalMax, and _globalMin
1880  _globalBounds[axis] = ConcreteSlice(start,stop);
1881  _globalDims[axis] = stop - start;
1882  _globalMin += start * _globalStrides[axis];
1883  _globalMax -= (parent.getGlobalDim(axis,true) - stop) *
1884  _globalStrides[axis];
1885 
1886  // Alter _replicatedBoundary
1887  if ((parent.getGlobalBounds(axis,true).start() < _globalBounds[axis].start())
1888  || (parent.getGlobalBounds(axis,true).stop() > _globalBounds[axis].stop()))
1889  _replicatedBoundary[axis] = 0;
1890 
1891  // Build the slice for the MDComm sub-communicator constructor
1892  int pStart = -1;
1893  int pStop = -1;
1894  for (int axisRank = 0; axisRank < parent.getCommDim(axis);
1895  ++axisRank)
1896  {
1897  if ((_globalRankBounds[axis][axisRank].start() - _bndryPad[axis][0]
1898  <= _globalBounds[axis].start()) &&
1899  (_globalBounds[axis].start() <
1900  _globalRankBounds[axis][axisRank].stop() + _bndryPad[axis][1]))
1901  if (pStart == -1) pStart = axisRank;
1902  if ((_globalRankBounds[axis][axisRank].start() - _bndryPad[axis][0]
1903  < _globalBounds[axis].stop()) &&
1904  (_globalBounds[axis].stop() <=
1905  _globalRankBounds[axis][axisRank].stop() + _bndryPad[axis][1]))
1906  pStop = axisRank+1;
1907  }
1908  TEUCHOS_TEST_FOR_EXCEPTION(
1909  (pStart == -1 || pStop == -1),
1911  "error computing axis rank slice");
1912  Slice axisRankSlice = ConcreteSlice(pStart,pStop);
1913 
1914  // Construct the MDComm sub-communicator
1915  _mdComm = Teuchos::rcp(new MDComm(*(parent._mdComm), axis, axisRankSlice));
1916 
1917  // We now have a sub-communicator, and should only construct this
1918  // MDMap if this processor is on it. If this processor is off the
1919  // communicator, then we clear many of the data members.
1920  if (_mdComm->onSubcommunicator())
1921  {
1922  // Fix _pad, if needed
1923  int parentAxisRank = parent.getCommIndex(axis);
1924  int myAxisRank = _mdComm->getCommIndex(axis);
1925  if (myAxisRank == 0)
1926  _pad[axis][0] = _bndryPad[axis][0];
1927  if (myAxisRank == _mdComm->getCommDim(axis)-1)
1928  _pad[axis][1] = _bndryPad[axis][1];
1929 
1930  // Compute the local start and stop indexes. Note that
1931  // _globalRankBounds has an axis-rank dimension, and that it
1932  // still uses the parent's commDim, not the new ones. We will
1933  // fix this later.
1934  dim_type start = (_globalRankBounds[axis][parentAxisRank].start() -
1935  _pad[axis][0]) -
1936  (parent._globalRankBounds[axis][parentAxisRank].start() -
1937  parent._pad[axis][0]);
1938  dim_type stop = (_globalRankBounds[axis][parentAxisRank].stop() +
1939  _pad[axis][1]) -
1940  (parent._globalRankBounds[axis][parentAxisRank].start() -
1941  parent._pad[axis][0]);
1942 
1943  // Compute the local bounds, dims, min, and max
1944  _localBounds[axis] = ConcreteSlice(stop - start);
1945  _localDims[axis] = stop - start;
1946  _localMin += start * _localStrides[axis];
1947  _localMax -= (parent.getLocalBounds(axis,true).stop() - stop) *
1948  _localStrides[axis];
1949 
1950  // The new sub-communicator may have fewer processors than the
1951  // parent communicator, so we need to fix _globalRankBounds
1952  Teuchos::Array< Slice > newRankBounds;
1953  for (int axisRank = 0; axisRank < parent.getCommDim(axis);
1954  ++axisRank)
1955  if ((axisRank >= axisRankSlice.start()) &&
1956  (axisRank < axisRankSlice.stop() ) )
1957  newRankBounds.push_back(_globalRankBounds[axis][axisRank]);
1958  _globalRankBounds[axis] = newRankBounds;
1959  }
1960  else
1961  {
1962  _localDims.clear();
1963  _localMin = 0;
1964  _localMax = 0;
1965  _localBounds.clear();
1966  _commPadSizes.clear();
1967  _pad.clear();
1968  _bndryPadSizes.clear();
1969  _bndryPad.clear();
1970  _localStrides.clear();
1971  }
1972  }
1973 }
1974 
1976 
1977 template< class Node >
1979 MDMap(const MDMap< Node > & parent,
1980  const Teuchos::ArrayView< Slice > & slices,
1981  const Teuchos::ArrayView< int > & bndryPad)
1982 {
1983  // Temporarily store the number of dimensions
1984  int numDims = parent.numDims();
1985 
1986  // Sanity check on dimensions
1987  TEUCHOS_TEST_FOR_EXCEPTION(
1988  (slices.size() != numDims),
1990  "number of slices = " << slices.size() << " != parent MDMap number of "
1991  "dimensions = " << numDims);
1992 
1993  // Apply the single-Slice constructor to each axis in succession
1994  MDMap< Node > tempMDMap1(parent);
1995  for (int axis = 0; axis < numDims; ++axis)
1996  {
1997  int bndryPadding = (axis < bndryPad.size()) ? bndryPad[axis] : 0;
1998  MDMap< Node > tempMDMap2(tempMDMap1,
1999  axis,
2000  slices[axis],
2001  bndryPadding);
2002  tempMDMap1 = tempMDMap2;
2003  }
2004  *this = tempMDMap1;
2005 }
2006 
2008 
2009 template< class Node >
2011 {
2012 }
2013 
2015 
2016 template< class Node >
2017 MDMap< Node > &
2019 {
2020  _mdComm = source._mdComm;
2021  _globalDims = source._globalDims;
2022  _globalBounds = source._globalBounds;
2023  _globalRankBounds = source._globalRankBounds;
2024  _globalStrides = source._globalStrides;
2025  _globalMin = source._globalMin;
2026  _globalMax = source._globalMax;
2027  _localDims = source._localDims;
2028  _localBounds = source._localBounds;
2029  _localStrides = source._localStrides;
2030  _localMin = source._localMin;
2031  _localMax = source._localMax;
2032  _commPadSizes = source._commPadSizes;
2033  _pad = source._pad;
2034  _bndryPadSizes = source._bndryPadSizes;
2035  _bndryPad = source._bndryPad;
2036  _layout = source._layout;
2037  return *this;
2038 }
2039 
2041 
2042 template< class Node >
2043 Teuchos::RCP< const MDComm >
2045 {
2046  return _mdComm;
2047 }
2048 
2050 
2051 template< class Node >
2052 bool
2054 {
2055  return _mdComm->onSubcommunicator();
2056 }
2057 
2059 
2060 template< class Node >
2061 Teuchos::RCP< const Teuchos::Comm< int > >
2063 {
2064  return _mdComm->getTeuchosComm();
2065 }
2066 
2068 
2069 template< class Node >
2070 int
2072 {
2073  return _mdComm->numDims();
2074 }
2075 
2077 
2078 template< class Node >
2079 Teuchos::Array< int >
2081 {
2082  return _mdComm->getCommDims();
2083 }
2084 
2086 
2087 template< class Node >
2088 int
2090 {
2091  return _mdComm->getCommDim(axis);
2092 }
2093 
2095 
2096 template< class Node >
2097 bool
2099 {
2100  return _mdComm->isPeriodic(axis);
2101 }
2102 
2104 
2105 template< class Node >
2106 int
2108 {
2109  return _mdComm->getCommIndex(axis);
2110 }
2111 
2113 
2114 template< class Node >
2115 int
2117 {
2118  return _mdComm->getLowerNeighbor(axis);
2119 }
2120 
2122 
2123 template< class Node >
2124 int
2126 {
2127  return _mdComm->getUpperNeighbor(axis);
2128 }
2129 
2131 
2132 template< class Node >
2133 Teuchos::Array< dim_type >
2136 {
2137  return _globalDims;
2138 }
2139 
2141 
2142 template< class Node >
2143 dim_type
2145 getGlobalDim(int axis,
2146  bool withBndryPad) const
2147 {
2148 #ifdef HAVE_DOMI_ARRAY_BOUNDSCHECK
2149  TEUCHOS_TEST_FOR_EXCEPTION(
2150  ((axis < 0) || (axis >= numDims())),
2151  RangeError,
2152  "invalid axis index = " << axis << " (number of dimensions = " <<
2153  numDims() << ")");
2154 #endif
2155  if (withBndryPad)
2156  return _globalDims[axis];
2157  else
2158  return _globalDims[axis] - _bndryPad[axis][0] - _bndryPad[axis][1];
2159 }
2160 
2162 
2163 template< class Node >
2164 Slice
2167  bool withBndryPad) const
2168 {
2169 #ifdef HAVE_DOMI_ARRAY_BOUNDSCHECK
2170  TEUCHOS_TEST_FOR_EXCEPTION(
2171  ((axis < 0) || (axis >= numDims())),
2172  RangeError,
2173  "invalid axis index = " << axis << " (number of dimensions = " <<
2174  numDims() << ")");
2175 #endif
2176  if (withBndryPad)
2177  return _globalBounds[axis];
2178  else
2179  {
2180  dim_type start = _globalBounds[axis].start() + _bndryPad[axis][0];
2181  dim_type stop = _globalBounds[axis].stop() - _bndryPad[axis][1];
2182  return ConcreteSlice(start, stop);
2183  }
2184 }
2185 
2187 
2188 template< class Node >
2189 dim_type
2191 getLocalDim(int axis,
2192  bool withPad) const
2193 {
2194 #ifdef HAVE_DOMI_ARRAY_BOUNDSCHECK
2195  TEUCHOS_TEST_FOR_EXCEPTION(
2196  ((axis < 0) || (axis >= numDims())),
2197  RangeError,
2198  "invalid axis index = " << axis << " (number of dimensions = " <<
2199  numDims() << ")");
2200 #endif
2201  if (withPad)
2202  return _localDims[axis];
2203  else
2204  return _localDims[axis] - _pad[axis][0] - _pad[axis][1];
2205 }
2206 
2208 
2209 template< class Node >
2210 Teuchos::Array< dim_type >
2213 {
2214  return _localDims;
2215 }
2216 
2218 
2219 template< class Node >
2220 Slice
2223  bool withBndryPad) const
2224 {
2225 #ifdef HAVE_DOMI_ARRAY_BOUNDSCHECK
2226  TEUCHOS_TEST_FOR_EXCEPTION(
2227  ((axis < 0) || (axis >= numDims())),
2228  RangeError,
2229  "invalid axis index = " << axis << " (number of dimensions = " <<
2230  numDims() << ")");
2231 #endif
2232  int axisRank = getCommIndex(axis);
2233  if (withBndryPad)
2234  {
2235  dim_type start = _globalRankBounds[axis][axisRank].start();
2236  dim_type stop = _globalRankBounds[axis][axisRank].stop();
2237  if (getCommIndex(axis) == 0)
2238  start -= _bndryPad[axis][0];
2239  if (getCommIndex(axis) == getCommDim(axis)-1)
2240  stop += _bndryPad[axis][1];
2241  return ConcreteSlice(start,stop);
2242  }
2243  else
2244  return _globalRankBounds[axis][axisRank];
2245 }
2246 
2248 
2249 template< class Node >
2250 Teuchos::ArrayView< const Slice >
2253 {
2254  return _localBounds();
2255 }
2256 
2258 
2259 template< class Node >
2260 Slice
2263  bool withPad) const
2264 {
2265 #ifdef HAVE_DOMI_ARRAY_BOUNDSCHECK
2266  TEUCHOS_TEST_FOR_EXCEPTION(
2267  ((axis < 0) || (axis >= numDims())),
2268  RangeError,
2269  "invalid axis index = " << axis << " (number of dimensions = " <<
2270  numDims() << ")");
2271 #endif
2272  if (withPad)
2273  return _localBounds[axis];
2274  else
2275  {
2276  dim_type start = _localBounds[axis].start() + _pad[axis][0];
2277  dim_type stop = _localBounds[axis].stop() - _pad[axis][1];
2278  return ConcreteSlice(start, stop);
2279  }
2280 }
2281 
2283 
2284 template< class Node >
2285 Slice
2287 getLocalInteriorBounds(int axis) const
2288 {
2289 #ifdef HAVE_DOMI_ARRAY_BOUNDSCHECK
2290  TEUCHOS_TEST_FOR_EXCEPTION(
2291  ((axis < 0) || (axis >= numDims())),
2292  RangeError,
2293  "invalid axis index = " << axis << " (number of dimensions = " <<
2294  numDims() << ")");
2295 #endif
2296  dim_type start = _localBounds[axis].start() + _pad[axis][0];
2297  dim_type stop = _localBounds[axis].stop() - _pad[axis][1];
2298  if (_mdComm->getLowerNeighbor(axis) == -1) ++start;
2299  if (_mdComm->getUpperNeighbor(axis) == -1) --stop;
2300  return ConcreteSlice(start, stop);
2301 }
2302 
2304 
2305 template< class Node >
2306 bool
2308 {
2309  bool result = false;
2310  for (int axis = 0; axis < numDims(); ++axis)
2311  if (_pad[axis][0] + _pad[axis][1]) result = true;
2312  return result;
2313 }
2314 
2316 
2317 template< class Node >
2318 int
2320 {
2321 #ifdef HAVE_DOMI_ARRAY_BOUNDSCHECK
2322  TEUCHOS_TEST_FOR_EXCEPTION(
2323  ((axis < 0) || (axis >= numDims())),
2324  RangeError,
2325  "invalid axis index = " << axis << " (number of dimensions = " <<
2326  numDims() << ")");
2327 #endif
2328  return _pad[axis][0];
2329 }
2330 
2332 
2333 template< class Node >
2334 int
2336 {
2337 #ifdef HAVE_DOMI_ARRAY_BOUNDSCHECK
2338  TEUCHOS_TEST_FOR_EXCEPTION(
2339  ((axis < 0) || (axis >= numDims())),
2340  RangeError,
2341  "invalid axis index = " << axis << " (number of dimensions = " <<
2342  numDims() << ")");
2343 #endif
2344  return _pad[axis][1];
2345 }
2346 
2348 
2349 template< class Node >
2350 int
2352 {
2353 #ifdef HAVE_DOMI_ARRAY_BOUNDSCHECK
2354  TEUCHOS_TEST_FOR_EXCEPTION(
2355  ((axis < 0) || (axis >= numDims())),
2356  RangeError,
2357  "invalid axis index = " << axis << " (number of dimensions = " <<
2358  numDims() << ")");
2359 #endif
2360  return _commPadSizes[axis];
2361 }
2362 
2364 
2365 template< class Node >
2366 int
2368 {
2369 #ifdef HAVE_DOMI_ARRAY_BOUNDSCHECK
2370  TEUCHOS_TEST_FOR_EXCEPTION(
2371  ((axis < 0) || (axis >= numDims())),
2372  RangeError,
2373  "invalid axis index = " << axis << " (number of dimensions = " <<
2374  numDims() << ")");
2375 #endif
2376  return _bndryPad[axis][0];
2377 }
2378 
2380 
2381 template< class Node >
2382 int
2384 {
2385 #ifdef HAVE_DOMI_ARRAY_BOUNDSCHECK
2386  TEUCHOS_TEST_FOR_EXCEPTION(
2387  ((axis < 0) || (axis >= numDims())),
2388  RangeError,
2389  "invalid axis index = " << axis << " (number of dimensions = " <<
2390  numDims() << ")");
2391 #endif
2392  return _bndryPad[axis][1];
2393 }
2394 
2396 
2397 template< class Node >
2398 Teuchos::Array< int >
2400 {
2401  return _bndryPadSizes;
2402 }
2403 
2405 
2406 template< class Node >
2407 int
2409 {
2410 #ifdef HAVE_DOMI_ARRAY_BOUNDSCHECK
2411  TEUCHOS_TEST_FOR_EXCEPTION(
2412  ((axis < 0) || (axis >= numDims())),
2413  RangeError,
2414  "invalid axis index = " << axis << " (number of dimensions = " <<
2415  numDims() << ")");
2416 #endif
2417  return _bndryPadSizes[axis];
2418 }
2419 
2421 
2422 template< class Node >
2423 bool
2425 isPad(const Teuchos::ArrayView< dim_type > & index) const
2426 {
2427  bool result = false;
2428  for (int axis = 0; axis < numDims(); ++axis)
2429  {
2430  if (index[axis] < getLowerPadSize(axis))
2431  result = true;
2432  if (index[axis] >= getLocalDim(axis,true) - getUpperPadSize(axis))
2433  result = true;
2434  }
2435  return result;
2436 }
2437 
2439 
2440 template< class Node >
2441 bool
2442 MDMap< Node >::
2443 isCommPad(const Teuchos::ArrayView< dim_type > & index) const
2444 {
2445  bool result = false;
2446  for (int axis = 0; axis < numDims(); ++axis)
2447  {
2448  // Check the ranks of the lower and upper neighbor processors. If
2449  // either of these values is non-negative, then we are on a
2450  // processor that contains communication padding
2451  if (getLowerNeighbor(axis) >= 0)
2452  {
2453  if (index[axis] < getLowerPadSize(axis))
2454  result = true;
2455  }
2456  if (getUpperNeighbor(axis) >= 0)
2457  {
2458  if (index[axis] >= getLocalDim(axis,true) - getUpperPadSize(axis))
2459  result = true;
2460  }
2461  }
2462  return result;
2463 }
2464 
2466 
2467 template< class Node >
2468 bool
2469 MDMap< Node >::
2470 isBndryPad(const Teuchos::ArrayView< dim_type > & index) const
2471 {
2472  bool result = false;
2473  for (int axis = 0; axis < numDims(); ++axis)
2474  {
2475  // Check the ranks of the lower and upper neighbor processors. If
2476  // either of these values is -1, then we are on a processor that
2477  // contains a boundary
2478  if (getLowerNeighbor(axis) == -1)
2479  {
2480  if (index[axis] < getLowerPadSize(axis))
2481  result = true;
2482  }
2483  if (getUpperNeighbor(axis) == -1)
2484  {
2485  if (index[axis] >= getLocalDim(axis,true) - getUpperPadSize(axis))
2486  result = true;
2487  }
2488  }
2489  return result;
2490 }
2491 
2493 
2494 template< class Node >
2495 bool
2497 {
2498  return _mdComm->isPeriodic(axis) && bool(_replicatedBoundary[axis]);
2499 }
2500 
2502 
2503 template< class Node >
2504 Layout
2506 {
2507  return _layout;
2508 }
2509 
2511 
2512 template< class Node >
2513 Teuchos::RCP< Node >
2515 {
2516  return _node;
2517 }
2518 
2520 
2521 template< class Node >
2522 Teuchos::ArrayView< Teuchos::RCP< const MDMap< Node > > >
2524 {
2525  if (_axisMaps.size() == 0) _axisMaps.resize(numDims());
2526  for (int axis = 0; axis < numDims(); ++axis)
2527  if (_axisMaps[axis].is_null()) getAxisMap(axis);
2528  return _axisMaps();
2529 }
2530 
2532 
2533 template< class Node >
2534 Teuchos::RCP< const MDMap< Node > >
2536 {
2537 #ifdef HAVE_DOMI_ARRAY_BOUNDSCHECK
2538  TEUCHOS_TEST_FOR_EXCEPTION(
2539  ((axis < 0) || (axis >= numDims())),
2540  RangeError,
2541  "invalid axis index = " << axis << " (number of dimensions = " <<
2542  numDims() << ")");
2543 #endif
2544  if (_axisMaps.size() == 0) _axisMaps.resize(numDims());
2545  if (_axisMaps[axis].is_null())
2546  {
2547  Teuchos::RCP< const MDComm > axisComm = _mdComm->getAxisComm(axis);
2548  Domi::dim_type axisDim = _globalDims[axis] - 2*_bndryPadSizes[axis];
2549  _axisMaps[axis] =
2550  Teuchos::rcp(new MDMap(axisComm,
2551  Teuchos::tuple(axisDim),
2552  Teuchos::tuple(_commPadSizes[axis]),
2553  Teuchos::tuple(_bndryPadSizes[axis]),
2554  Teuchos::tuple(_replicatedBoundary[axis]),
2555  _layout));
2556  }
2557  return _axisMaps[axis];
2558 }
2559 
2561 
2562 template< class Node >
2563 Teuchos::RCP< const MDMap< Node > >
2564 MDMap< Node >::getAugmentedMDMap(const dim_type leadingDim,
2565  const dim_type trailingDim) const
2566 {
2567  // Construct the new MDMap
2568  MDMap< Node > * newMdMap = new MDMap< Node >(*this);
2569 
2570  // Compute the number of new dimensions
2571  int newNumDims = 0;
2572  if (leadingDim > 0) ++newNumDims;
2573  if (trailingDim > 0) ++newNumDims;
2574 
2575  // Trivial result
2576  if (newNumDims == 0) return Teuchos::rcp(newMdMap);
2577 
2578  // Compute the new MDComm
2579  int oldNumDims = numDims();
2580  Teuchos::Array< int > newCommDims(oldNumDims);
2581  Teuchos::Array< int > newPeriodic(oldNumDims);
2582  for (int axis = 0; axis < oldNumDims; ++axis)
2583  {
2584  newCommDims[axis] = getCommDim(axis);
2585  newPeriodic[axis] = int(isPeriodic(axis));
2586  }
2587  if (leadingDim > 0)
2588  {
2589  newCommDims.insert(newCommDims.begin(),1);
2590  newPeriodic.insert(newPeriodic.begin(),0);
2591  }
2592  if (trailingDim > 0)
2593  {
2594  newCommDims.push_back(1);
2595  newPeriodic.push_back(0);
2596  }
2597  newMdMap->_mdComm = Teuchos::rcp(new MDComm(getTeuchosComm(),
2598  newCommDims,
2599  newPeriodic));
2600 
2601  // Adjust new MDMap arrays for a new leading dimension
2602  Slice slice = Slice(leadingDim);
2603  padding_type pad(Teuchos::tuple(0,0));
2604  if (leadingDim > 0)
2605  {
2606  newMdMap->_globalDims.insert(newMdMap->_globalDims.begin(), leadingDim);
2607  newMdMap->_globalBounds.insert(newMdMap->_globalBounds.begin(), slice);
2608  newMdMap->_globalRankBounds.insert(newMdMap->_globalRankBounds.begin(),
2609  Teuchos::Array< Slice >(1,slice));
2610  newMdMap->_localDims.insert(newMdMap->_localDims.begin(), leadingDim);
2611  newMdMap->_localBounds.insert(newMdMap->_localBounds.begin(), slice);
2612  newMdMap->_commPadSizes.insert(newMdMap->_commPadSizes.begin(),0);
2613  newMdMap->_pad.insert(newMdMap->_pad.begin(), pad);
2614  newMdMap->_bndryPadSizes.insert(newMdMap->_bndryPadSizes.begin(),0);
2615  newMdMap->_bndryPad.insert(newMdMap->_bndryPad.begin(), pad);
2616  }
2617 
2618  // Adjust new MDMap arrays for a new trailing dimension
2619  slice = Slice(trailingDim);
2620  if (trailingDim > 0)
2621  {
2622  newMdMap->_globalDims.push_back(trailingDim);
2623  newMdMap->_globalBounds.push_back(slice);
2624  newMdMap->_globalRankBounds.push_back(Teuchos::Array< Slice >(1,slice));
2625  newMdMap->_localDims.push_back(trailingDim);
2626  newMdMap->_localBounds.push_back(slice);
2627  newMdMap->_commPadSizes.push_back(0);
2628  newMdMap->_pad.push_back(pad);
2629  newMdMap->_bndryPadSizes.push_back(0);
2630  newMdMap->_bndryPad.push_back(pad);
2631  }
2632 
2633  // Compute the new stride related data
2634  newMdMap->_globalStrides =
2635  computeStrides< size_type, dim_type >(newMdMap->_globalDims,
2636  newMdMap->_layout);
2637  newMdMap->_localStrides =
2638  computeStrides< size_type, dim_type >(newMdMap->_localDims,
2639  newMdMap->_layout);
2640  newMdMap->_globalMin = 0;
2641  newMdMap->_globalMax = 0;
2642  newMdMap->_localMin = 0;
2643  newMdMap->_localMax = 0;
2644  for (int axis = 0; axis < oldNumDims + newNumDims; ++axis)
2645  {
2646  newMdMap->_globalMin += newMdMap->_globalBounds[axis].start() *
2647  newMdMap->_globalStrides[axis];
2648  newMdMap->_globalMax += newMdMap->_globalBounds[axis].stop() *
2649  newMdMap->_globalStrides[axis];
2650  newMdMap->_localMin += newMdMap->_localBounds[axis].start() *
2651  newMdMap->_localStrides[axis];
2652  newMdMap->_localMax += newMdMap->_localBounds[axis].stop() *
2653  newMdMap->_localStrides[axis];
2654  }
2655 
2656  // Return the result
2657  return Teuchos::rcp(newMdMap);
2658 }
2659 
2661 
2662 #ifdef HAVE_EPETRA
2663 
2664 template< class Node >
2665 Teuchos::RCP< const Epetra_Map >
2666 MDMap< Node >::getEpetraMap(bool withCommPad) const
2667 {
2668  if (withCommPad)
2669  {
2670  if (_epetraMap.is_null())
2671  {
2672  // Check if the maximum global ID is larger than what an int can
2673  // hold (because Epetra uses int ordinals)
2674  TEUCHOS_TEST_FOR_EXCEPTION(
2675  computeSize(_globalDims) - 1 > std::numeric_limits< int >::max(),
2677  "The maximum global ID of this MDMap is too large for an Epetra_Map");
2678 
2679  // Allocate the myElements MDArray and the index array
2680  int num_dims = numDims();
2681  Teuchos::Array<dim_type> localDims(num_dims);
2682  for (int axis = 0; axis < num_dims; ++axis)
2683  localDims[axis] = _localDims[axis];
2684  MDArray<int> myElements(localDims);
2685  Teuchos::Array<int> index(num_dims);
2686 
2687  // Iterate over the local MDArray and assign global IDs
2688  for (MDArray<int>::iterator it = myElements.begin();
2689  it != myElements.end(); ++it)
2690  {
2691  int globalID = 0;
2692  for (int axis = 0; axis < num_dims; ++axis)
2693  {
2694  int axisRank = getCommIndex(axis);
2695  int start = _globalRankBounds[axis][axisRank].start() -
2696  _pad[axis][0];
2697  globalID += (start + it.index(axis)) * _globalStrides[axis];
2698  }
2699  *it = globalID;
2700  }
2701 
2702  // Construct the Epetra_Map
2703  Teuchos::RCP< const Epetra_Comm > epetraComm = _mdComm->getEpetraComm();
2704  _epetraMap = Teuchos::rcp(new Epetra_Map(-1,
2705  myElements.size(),
2706  myElements.getRawPtr(),
2707  0,
2708  *epetraComm));
2709  }
2710  return _epetraMap;
2711  }
2712  else
2713  {
2714  if (_epetraOwnMap.is_null())
2715  {
2716  // Check if the maximum global ID is larger than what an int can
2717  // hold (because Epetra uses int ordinals)
2718  if (computeSize(_globalDims) - 1 > std::numeric_limits< int >::max())
2719  throw MapOrdinalError("The maximum global ID of this MDMap is too "
2720  "large for an Epetra_Map");
2721 
2722  // Allocate the myElements MDArray and the index array
2723  int num_dims = numDims();
2724  Teuchos::Array<int> index(num_dims);
2725  Teuchos::Array<dim_type> myDims(num_dims);
2726  for (int axis = 0; axis < num_dims; ++axis)
2727  {
2728  myDims[axis] = _localDims[axis] - _pad[axis][0] - _pad[axis][1];
2729  int axisRank = getCommIndex(axis);
2730  if (axisRank == 0)
2731  myDims[axis] += _bndryPad[axis][0];
2732  if (axisRank == getCommDim(axis)-1)
2733  myDims[axis] += _bndryPad[axis][1];
2734  }
2735  MDArray<int> myElements(myDims());
2736 
2737  // Iterate over the local MDArray and assign global IDs
2738  for (MDArray<int>::iterator it = myElements.begin();
2739  it != myElements.end(); ++it)
2740  {
2741  int globalID = 0;
2742  for (int axis = 0; axis < num_dims; ++axis)
2743  {
2744  int axisRank = getCommIndex(axis);
2745  int start = _globalRankBounds[axis][axisRank].start();
2746  if (axisRank == 0)
2747  start -= _bndryPad[axis][0];
2748  if (axisRank == getCommDim(axis)-1)
2749  start += _bndryPad[axis][1];
2750  globalID += (start + it.index(axis)) * _globalStrides[axis];
2751  }
2752  }
2753 
2754  // Construct the Epetra_Map
2755  Teuchos::RCP< const Epetra_Comm > epetraComm = _mdComm->getEpetraComm();
2756  _epetraOwnMap = Teuchos::rcp(new Epetra_Map(-1,
2757  myElements.size(),
2758  myElements.getRawPtr(),
2759  0,
2760  *epetraComm));
2761  }
2762  return _epetraOwnMap;
2763  }
2764 }
2765 
2766 #endif
2767 
2769 
2770 #ifdef HAVE_EPETRA
2771 
2772 template< class Node >
2773 Teuchos::RCP< const Epetra_Map >
2774 MDMap< Node >::
2775 getEpetraAxisMap(int axis,
2776  bool withCommPad) const
2777 {
2778 #ifdef HAVE_DOMI_ARRAY_BOUNDSCHECK
2779  TEUCHOS_TEST_FOR_EXCEPTION(
2780  ((axis < 0) || (axis >= numDims())),
2781  RangeError,
2782  "invalid axis index = " << axis << " (number of dimensions = " <<
2783  numDims() << ")");
2784 #endif
2785  if ((withCommPad && (_epetraAxisMaps.size() == 0)) ||
2786  (not withCommPad && (_epetraAxisOwnMaps.size() == 0)))
2787  {
2788  int num_dims = numDims();
2789  Teuchos::RCP< const Epetra_Comm > epetraComm = _mdComm->getEpetraComm();
2790  for (int axis=0; axis < num_dims; ++axis)
2791  {
2792  Teuchos::Array<int> elements(getLocalDim(axis, withCommPad));
2793  int start = getGlobalRankBounds(axis,true).start();
2794  if (withCommPad && (getCommIndex(axis) != 0)) start -= _pad[axis][0];
2795  for (int i = 0; i < elements.size(); ++i)
2796  elements[i] = i + start;
2797  if (withCommPad)
2798  {
2799  _epetraAxisMaps.push_back(
2800  Teuchos::rcp(new Epetra_Map(-1,
2801  elements.size(),
2802  elements.getRawPtr(),
2803  0,
2804  *epetraComm)));
2805  }
2806  else
2807  {
2808  _epetraAxisOwnMaps.push_back(
2809  Teuchos::rcp(new Epetra_Map(-1,
2810  elements.size(),
2811  elements.getRawPtr(),
2812  0,
2813  *epetraComm)));
2814  }
2815  }
2816  }
2817 
2818  if (withCommPad)
2819  return _epetraAxisMaps[axis];
2820  else
2821  return _epetraAxisOwnMaps[axis];
2822 }
2823 
2824 #endif
2825 
2827 
2828 #ifdef HAVE_TPETRA
2829 
2830 template< class Node >
2831 template< class LocalOrdinal,
2832  class GlobalOrdinal,
2833  class Node2 >
2834 Teuchos::RCP< const Tpetra::Map< LocalOrdinal, GlobalOrdinal, Node2 > >
2835 MDMap< Node >::getTpetraMap(bool withCommPad) const
2836 {
2837  if (withCommPad)
2838  {
2839  // Allocate the elementsMDArray and the index array
2840  int num_dims = numDims();
2841  Teuchos::Array<dim_type> localDims(num_dims);
2842  for (int axis = 0; axis < num_dims; ++axis)
2843  localDims[axis] = _localDims[axis];
2844  MDArray< GlobalOrdinal > elementMDArray(localDims);
2845  Teuchos::Array< LocalOrdinal > index(num_dims);
2846 
2847  // Iterate over the local MDArray and assign global IDs
2848  for (typename MDArray< GlobalOrdinal >::iterator it = elementMDArray.begin();
2849  it != elementMDArray.end(); ++it)
2850  {
2851  GlobalOrdinal globalID = 0;
2852  for (int axis = 0; axis < num_dims; ++axis)
2853  {
2854  int axisRank = getCommIndex(axis);
2855  GlobalOrdinal start = _globalRankBounds[axis][axisRank].start() -
2856  _pad[axis][0];
2857  globalID += (start + it.index(axis)) * _globalStrides[axis];
2858  }
2859  *it = globalID;
2860  }
2861 
2862  // Return the Tpetra::Map
2863  const Teuchos::Array< GlobalOrdinal > & myElements =
2864  elementMDArray.array();
2865  Teuchos::RCP< const Teuchos::Comm< int > > teuchosComm =
2866  _mdComm->getTeuchosComm();
2867  return
2868  Teuchos::rcp(new Tpetra::Map< LocalOrdinal,
2869  GlobalOrdinal,
2870  Node2 >(Teuchos::OrdinalTraits<
2871  Tpetra::global_size_t>::invalid(),
2872  myElements(),
2873  0,
2874  teuchosComm));
2875  }
2876  else
2877  {
2878  // Allocate the elementMDArray MDArray and the index array
2879  int num_dims = numDims();
2880  Teuchos::Array< LocalOrdinal > index(num_dims);
2881  Teuchos::Array< dim_type > myDims(num_dims);
2882  for (int axis = 0; axis < num_dims; ++axis)
2883  {
2884  myDims[axis] =
2885  _localDims[axis] - _pad[axis][0] - _pad[axis][1];
2886  int axisRank = getCommIndex(axis);
2887  if (axisRank == 0)
2888  myDims[axis] += _bndryPad[axis][0];
2889  if (axisRank == getCommDim(axis)-1)
2890  myDims[axis] += _bndryPad[axis][1];
2891  }
2892  MDArray< GlobalOrdinal > elementMDArray(myDims());
2893 
2894  // Iterate over the local MDArray and assign global IDs
2895  for (typename MDArray< GlobalOrdinal >::iterator it = elementMDArray.begin();
2896  it != elementMDArray.end(); ++it)
2897  {
2898  GlobalOrdinal globalID = 0;
2899  for (int axis = 0; axis < num_dims; ++axis)
2900  {
2901  int axisRank = getCommIndex(axis);
2902  GlobalOrdinal start = _globalRankBounds[axis][axisRank].start();
2903  if (axisRank == 0)
2904  start -= _bndryPad[axis][0];
2905  if (axisRank == getCommDim(axis)-1)
2906  start += _bndryPad[axis][1];
2907  globalID += (start + it.index(axis)) * _globalStrides[axis];
2908  }
2909  }
2910 
2911  // Return the Tpetra::Map
2912  const Teuchos::Array< GlobalOrdinal> & myElements =
2913  elementMDArray.array();
2914  Teuchos::RCP< const Teuchos::Comm< int > > teuchosComm =
2915  _mdComm->getTeuchosComm();
2916  return
2917  Teuchos::rcp(new Tpetra::Map< LocalOrdinal,
2918  GlobalOrdinal,
2919  Node >(Teuchos::OrdinalTraits<
2920  Tpetra::global_size_t>::invalid(),
2921  myElements(),
2922  0,
2923  teuchosComm));
2924  }
2925 }
2926 #endif
2927 
2929 
2930 #ifdef HAVE_TPETRA
2931 
2932 template< class Node >
2933 template< class LocalOrdinal,
2934  class GlobalOrdinal,
2935  class Node2 >
2936 Teuchos::RCP< const Tpetra::Map< LocalOrdinal, GlobalOrdinal, Node2 > >
2937 MDMap< Node >::
2938 getTpetraAxisMap(int axis,
2939  bool withCommPad) const
2940 {
2941 #ifdef HAVE_DOMI_ARRAY_BOUNDSCHECK
2942  TEUCHOS_TEST_FOR_EXCEPTION(
2943  ((axis < 0) || (axis >= numDims())),
2944  RangeError,
2945  "invalid axis index = " << axis << " (number of dimensions = " <<
2946  numDims() << ")");
2947 #endif
2948  int num_dims = numDims();
2949  Teuchos::RCP< const Teuchos::Comm< int > > teuchosComm =
2950  _mdComm->getTeuchosComm();
2951  Teuchos::Array< GlobalOrdinal > elements(getLocalDim(axis,withCommPad));
2952  GlobalOrdinal start = getGlobalRankBounds(axis,true).start();
2953  if (withCommPad && (getCommIndex(axis) != 0)) start -= _pad[axis][0];
2954  for (LocalOrdinal i = 0; i < elements.size(); ++i)
2955  elements[i] = i + start;
2956  return Teuchos::rcp(new Tpetra::Map< LocalOrdinal,
2957  GlobalOrdinal,
2958  Node >(Teuchos::OrdinalTraits<
2959  Tpetra::global_size_t>::invalid(),
2960  elements,
2961  0,
2962  teuchosComm));
2963 }
2964 #endif
2965 
2967 
2968 template< class Node >
2969 Teuchos::Array< dim_type >
2971 getGlobalIndex(size_type globalID) const
2972 {
2973 #ifdef HAVE_DOMI_ARRAY_BOUNDSCHECK
2974  TEUCHOS_TEST_FOR_EXCEPTION(
2975  ((globalID < _globalMin) || (globalID >= _globalMax)),
2976  RangeError,
2977  "invalid global index = " << globalID << " (should be between " <<
2978  _globalMin << " and " << _globalMax << ")");
2979 #endif
2980  int num_dims = numDims();
2981  Teuchos::Array< dim_type > result(num_dims);
2982  size_type index = globalID;
2983  if (_layout == LAST_INDEX_FASTEST)
2984  {
2985  for (int axis = 0; axis < num_dims-1; ++axis)
2986  {
2987  result[axis] = index / _globalStrides[axis];
2988  index = index % _globalStrides[axis];
2989  }
2990  result[num_dims-1] = index;
2991  }
2992  else
2993  {
2994  for (int axis = num_dims-1; axis > 0; --axis)
2995  {
2996  result[axis] = index / _globalStrides[axis];
2997  index = index % _globalStrides[axis];
2998  }
2999  result[0] = index;
3000  }
3001  return result;
3002 }
3003 
3005 
3006 template< class Node >
3007 Teuchos::Array< dim_type >
3009 getLocalIndex(size_type localID) const
3010 {
3011 #ifdef HAVE_DOMI_ARRAY_BOUNDSCHECK
3012  TEUCHOS_TEST_FOR_EXCEPTION(
3013  ((localID < _localMin) || (localID >= _localMax)),
3014  RangeError,
3015  "invalid local index = " << localID << " (should be between " <<
3016  _localMin << " and " << _localMax << ")");
3017 #endif
3018  int num_dims = numDims();
3019  Teuchos::Array< dim_type > result(num_dims);
3020  size_type index = localID;
3021  if (_layout == LAST_INDEX_FASTEST)
3022  {
3023  for (int axis = 0; axis < num_dims-1; ++axis)
3024  {
3025  result[axis] = index / _localStrides[axis];
3026  index = index % _localStrides[axis];
3027  }
3028  result[num_dims-1] = index;
3029  }
3030  else
3031  {
3032  for (int axis = num_dims-1; axis > 0; --axis)
3033  {
3034  result[axis] = index / _localStrides[axis];
3035  index = index % _localStrides[axis];
3036  }
3037  result[0] = index;
3038  }
3039  return result;
3040 }
3041 
3043 
3044 template< class Node >
3045 size_type
3047 getGlobalID(size_type localID) const
3048 {
3049 #ifdef HAVE_DOMI_ARRAY_BOUNDSCHECK
3050  TEUCHOS_TEST_FOR_EXCEPTION(
3051  ((localID < 0) || (localID >= _localMax)),
3052  RangeError,
3053  "invalid local index = " << localID << " (local size = " <<
3054  _localMax << ")");
3055 #endif
3056  Teuchos::Array< dim_type > localIndex = getLocalIndex(localID);
3057  size_type result = 0;
3058  for (int axis = 0; axis < numDims(); ++axis)
3059  {
3060  dim_type globalIndex = localIndex[axis] +
3061  _globalRankBounds[axis][getCommIndex(axis)].start() - _pad[axis][0];
3062  result += globalIndex * _globalStrides[axis];
3063  }
3064  return result;
3065 }
3066 
3068 
3069 template< class Node >
3070 size_type
3072 getGlobalID(const Teuchos::ArrayView< dim_type > & globalIndex) const
3073 {
3074 #ifdef HAVE_DOMI_ARRAY_BOUNDSCHECK
3075  TEUCHOS_TEST_FOR_EXCEPTION(
3076  (globalIndex.size() != numDims()),
3078  "globalIndex has " << globalIndex.size() << " entries; expecting "
3079  << numDims());
3080  for (int axis = 0; axis < numDims(); ++axis)
3081  {
3082  TEUCHOS_TEST_FOR_EXCEPTION(
3083  ((globalIndex[axis] < 0) ||
3084  (globalIndex[axis] >= _globalDims[axis])),
3085  RangeError,
3086  "invalid globalIndex[" << axis << "] = " << globalIndex[axis] <<
3087  " (global dimension = " << _globalDims[axis] << ")");
3088  }
3089 #endif
3090  size_type result = 0;
3091  for (int axis = 0; axis < numDims(); ++axis)
3092  result += globalIndex[axis] * _globalStrides[axis];
3093  return result;
3094 }
3095 
3097 
3098 template< class Node >
3099 size_type
3101 getLocalID(size_type globalID) const
3102 {
3103 #ifdef HAVE_DOMI_ARRAY_BOUNDSCHECK
3104  TEUCHOS_TEST_FOR_EXCEPTION(
3105  ((globalID < _globalMin) || (globalID >= _globalMax)),
3106  RangeError,
3107  "invalid global index = " << globalID << " (should be between " <<
3108  _globalMin << " and " << _globalMax << ")");
3109 #endif
3110  Teuchos::Array< dim_type > globalIndex =
3111  getGlobalIndex(globalID);
3112  size_type result = 0;
3113  for (int axis = 0; axis < numDims(); ++axis)
3114  {
3115  dim_type localIndex = globalIndex[axis] -
3116  _globalRankBounds[axis][getCommIndex(axis)].start() + _pad[axis][0];
3117  TEUCHOS_TEST_FOR_EXCEPTION(
3118  (localIndex < 0 || localIndex >= _localDims[axis]),
3119  RangeError,
3120  "global index not on local processor")
3121  result += localIndex * _localStrides[axis];
3122  }
3123  return result;
3124 }
3125 
3127 
3128 template< class Node >
3129 size_type
3131 getLocalID(const Teuchos::ArrayView< dim_type > & localIndex) const
3132 {
3133 #ifdef HAVE_DOMI_ARRAY_BOUNDSCHECK
3134  TEUCHOS_TEST_FOR_EXCEPTION(
3135  (localIndex.size() != numDims()),
3137  "localIndex has " << localIndex.size() << " entries; expecting "
3138  << numDims());
3139  for (int axis = 0; axis < numDims(); ++axis)
3140  {
3141  TEUCHOS_TEST_FOR_EXCEPTION(
3142  ((localIndex[axis] < 0) ||
3143  (localIndex[axis] >= _localDims[axis])),
3144  RangeError,
3145  "invalid localIndex[" << axis << "] = " << localIndex[axis] <<
3146  " (local dimension = " << _localDims[axis] << ")");
3147  }
3148 #endif
3149  size_type result = 0;
3150  for (int axis = 0; axis < numDims(); ++axis)
3151  result += localIndex[axis] * _localStrides[axis];
3152  return result;
3153 }
3154 
3156 
3157 template< class Node >
3158 bool
3160 {
3161  // Trivial comparison. We assume that if the object pointers match
3162  // on this processor, then they match on all processors
3163  if (this == &mdMap) return true;
3164 
3165  // Check the number of dimensions. Assume this check produces the
3166  // same result on all processors
3167  int num_dims = numDims();
3168  if (num_dims != mdMap.numDims()) return false;
3169 
3170  // Check the commDims. Assume this check produces the same result
3171  // on all processes
3172  for (int axis = 0; axis < num_dims; ++axis)
3173  if (getCommDim(axis) != mdMap.getCommDim(axis)) return false;
3174 
3175  // Check the global dimensions. Assume this check produces the same
3176  // result on all processes
3177  if (_globalDims != mdMap._globalDims) return false;
3178 
3179  // Check the local dimensions. This needs to be checked locally on
3180  // each processor and then the results communicated to obtain global
3181  // result
3182  int localResult = 1;
3183  int globalResult = 1;
3184  for (int axis = 0; axis < num_dims; ++axis)
3185  if (getLocalDim(axis,false) != mdMap.getLocalDim(axis,false))
3186  localResult = 0;
3187  Teuchos::reduceAll(*(getTeuchosComm()),
3188  Teuchos::REDUCE_MIN,
3189  1,
3190  &localResult,
3191  &globalResult);
3192 
3193  // Return the result
3194  return bool(globalResult);
3195 }
3196 
3198 
3199 template< class Node >
3200 bool
3202  const int verbose) const
3203 {
3204  // Trivial comparison. We assume that if the object pointers match
3205  // on this processor, then they match on all processors
3206  if (this == &mdMap) return true;
3207 
3208  // Start by setting a local result to true. We will perform a
3209  // number of tests, and if they fail, the local result will be set
3210  // to false. At the end, we will perform a global reduction to
3211  // obtain the global result.
3212  int localResult = 1;
3213  Teuchos::RCP< const Teuchos::Comm< int > > comm = getTeuchosComm();
3214  int rank = comm->getRank();
3215 
3216  // Check if MDMaps are compatible.
3217  if (! isCompatible(mdMap))
3218  {
3219  localResult = 0;
3220  if (verbose)
3221  std::cout << rank << ": MDMaps are incompatible" << std::endl;
3222  }
3223 
3224  // Check that underlying communicators are the same size
3225  if (comm->getSize() != mdMap.getTeuchosComm()->getSize())
3226  {
3227  localResult = 0;
3228  if (verbose)
3229  std::cout << rank << ": this Comm size = " << comm->getSize() << " != "
3230  << mdMap.getTeuchosComm()->getSize() << std::endl;
3231  }
3232 
3233  // Check that underlying communicators have the same rank
3234  if (rank != mdMap.getTeuchosComm()->getRank())
3235  {
3236  localResult = 0;
3237  if (verbose)
3238  std::cout << rank << ": this Comm rank = " << rank << " != "
3239  << mdMap.getTeuchosComm()->getRank() << std::endl;
3240  }
3241 
3242  // Check the global bounds.
3243  if (_globalBounds != mdMap._globalBounds)
3244  {
3245  localResult = 0;
3246  if (verbose)
3247  std::cout << rank << ": global bounds " << _globalBounds << " != "
3248  << mdMap._globalBounds << std::endl;
3249  }
3250 
3251  // Check the local dimensions.
3252  if (_localDims != mdMap._localDims)
3253  {
3254  localResult = 0;
3255  if (verbose)
3256  std::cout << rank << ": local dimensions " << _localDims << " != "
3257  << mdMap._localDims << std::endl;
3258  }
3259 
3260  // Obtain the global result
3261  int globalResult = 1;
3262  Teuchos::reduceAll(*(getTeuchosComm()),
3263  Teuchos::REDUCE_MIN,
3264  1,
3265  &localResult,
3266  &globalResult);
3267 
3268  // Return the result
3269  return bool(globalResult);
3270 }
3271 
3273 
3274 template< class Node >
3275 bool
3277 {
3278  // Compute the local strides if they were contiguous
3279  Teuchos::Array< size_type > contiguousStrides =
3280  computeStrides< size_type, dim_type >(_localDims, _layout);
3281 
3282  // Compute the local result: 0 = contiguous, 1 = non-contiguous
3283  int localResult = int(_localStrides != contiguousStrides);
3284 
3285  // Compute the global result
3286  int globalResult = 0;
3287  Teuchos::reduceAll(*(_mdComm->getTeuchosComm()),
3288  Teuchos::REDUCE_SUM,
3289  1,
3290  &localResult,
3291  &globalResult);
3292  return (globalResult == 0);
3293 }
3294 
3296 
3297 template< class Node >
3298 void
3300 {
3301  // Initialization
3302  int num_dims = numDims();
3303 
3304  // Decompose the multi-dimensional domain
3305  for (int axis = 0; axis < num_dims; ++axis)
3306  {
3307  // Get the communicator info for this axis
3308  int commDim = getCommDim(axis);
3309  for (int axisRank = 0; axisRank < commDim; ++axisRank)
3310  {
3311  // First estimates assuming even division of global dimensions
3312  // by the number of processors along this axis, and ignoring
3313  // communication and boundary padding.
3314  dim_type localDim = (_globalDims[axis] - _bndryPad[axis][0] -
3315  _bndryPad[axis][1]) / commDim;
3316  dim_type axisStart = axisRank * localDim;
3317 
3318  // Adjustments for non-zero remainder. Compute the remainder
3319  // using the mod operator. If the remainder is > 0, then add an
3320  // element to the appropriate number of processors with the
3321  // highest axis ranks. Note that this is the opposite of the
3322  // standard Tpetra::Map constructor (which adds an elements to
3323  // the lowest processor ranks), and provides better balance for
3324  // finite differencing systems with staggered data location.
3325  dim_type remainder = (_globalDims[axis] - _bndryPad[axis][0] -
3326  _bndryPad[axis][1]) % commDim;
3327  if (commDim - axisRank - 1 < remainder)
3328  {
3329  ++localDim;
3330  axisStart += (remainder - commDim + axisRank);
3331  }
3332 
3333  // Global adjustment for boundary padding
3334  axisStart += _bndryPad[axis][0];
3335 
3336  // Compute and store the global axis bounds
3337  _globalRankBounds[axis].push_back(
3338  ConcreteSlice(axisStart, axisStart + localDim));
3339 
3340  // Set _localDims[axis] and _localBounds[axis] only if
3341  // axisRank equals the axis rank of this processor
3342  if (axisRank == getCommIndex(axis))
3343  {
3344  // Local adjustment for padding. Note that _pad should
3345  // already be corrected to be either the communication padding
3346  // or boundary padding as appropriate
3347  _localDims[axis] = localDim + _pad[axis][0] + _pad[axis][1];
3348 
3349  // Compute and store the axis bounds
3350  _localBounds.push_back(ConcreteSlice(_localDims[axis]));
3351  }
3352  }
3353  }
3354 }
3355 
3356 }
3357 
3358 #endif
Teuchos::RCP< const MDComm > getMDComm() const
Access the underlying MDComm object.
Definition: Domi_MDMap.hpp:2044
MDMap(const Teuchos::RCP< const MDComm > mdComm, const Teuchos::ArrayView< const dim_type > &dimensions, const Teuchos::ArrayView< const int > &commPad=Teuchos::ArrayView< const int >(), const Teuchos::ArrayView< const int > &bndryPad=Teuchos::ArrayView< const int >(), const Teuchos::ArrayView< const int > &replicatedBoundary=Teuchos::ArrayView< const int >(), const Layout layout=DEFAULT_ORDER)
Main constructor.
Definition: Domi_MDMap.hpp:1037
Teuchos::Array< int > getCommDims() const
Get the communicator sizes along each axis.
Definition: Domi_MDMap.hpp:2080
size_type getGlobalID(size_type localID) const
Convert a local ID to a global ID.
Definition: Domi_MDMap.hpp:3047
Teuchos::RCP< Node > getNode() const
Get the Kokkos node.
Definition: Domi_MDMap.hpp:2514
Teuchos::RCP< const MDMap< Node > > getAugmentedMDMap(const dim_type leadingDim, const dim_type trailingDim=0) const
Return an RCP to a new MDMap that is a simple augmentation of this MDMap.
Definition: Domi_MDMap.hpp:2564
int getCommDim(int axis) const
Get the communicator size along the given axis.
Definition: Domi_MDMap.hpp:2089
MDMap< Node > & operator=(const MDMap< Node > &source)
Assignment operator.
Definition: Domi_MDMap.hpp:2018
int getLowerPadSize(int axis) const
Get the size of the lower padding along the given axis.
Definition: Domi_MDMap.hpp:2319
Iterator class suitable for multi-dimensional arrays.
Definition: Domi_MDIterator.hpp:100
Teuchos::Array< dim_type > getGlobalIndex(size_type globalID) const
Convert a global ID to a global index.
Definition: Domi_MDMap.hpp:2971
Range Error exception type.
Definition: Domi_Exceptions.hpp:65
Slice getLocalInteriorBounds(int axis) const
Get the local interior loop bounds along the specified axis.
Definition: Domi_MDMap.hpp:2287
bool isReplicatedBoundary(int axis) const
Return whether the given axis supports a replicated boundary.
Definition: Domi_MDMap.hpp:2496
int numDims() const
Get the number of dimensions.
Definition: Domi_MDMap.hpp:2071
bool onSubcommunicator() const
Query whether this processor is on the sub-communicator.
Definition: Domi_MDMap.hpp:2053
dim_type getLocalDim(int axis, bool withPad=false) const
Get the local dimension along the specified axis.
Definition: Domi_MDMap.hpp:2191
bool isCompatible(const MDMap< Node > &mdMap) const
True if two MDMaps are "compatible".
Definition: Domi_MDMap.hpp:3159
A Slice defines a subset of a container.
Multi-dimensional communicator object.
Definition: Domi_MDComm.hpp:108
Slice getGlobalRankBounds(int axis, bool withBndryPad=false) const
Get the global loop bounds along the specified axis.
Definition: Domi_MDMap.hpp:2222
Bounds Error exception type.
Definition: Domi_Exceptions.hpp:138
const dim_type stop() const
Stop index.
Definition: Domi_Slice.hpp:438
bool isPeriodic(int axis) const
Return the periodic flag for the given axis.
Definition: Domi_MDMap.hpp:2098
A ConcreteSlice is a Slice that does not accept Default or negative start or stop values...
Definition: Domi_Slice.hpp:340
A Slice contains a start, stop, and step index, describing a subset of an ordered container...
Definition: Domi_Slice.hpp:137
Teuchos::ArrayView< Teuchos::RCP< const Domi::MDMap< Node > > > getAxisMaps() const
Return an array of axis maps.
Definition: Domi_MDMap.hpp:2523
Teuchos::RCP< const Teuchos::Comm< int > > getTeuchosComm() const
Get the Teuchos communicator.
Definition: Domi_MDMap.hpp:2062
Teuchos::RCP< const Domi::MDMap< Node > > getAxisMap(int axis) const
Return an axis map for the given axis.
Definition: Domi_MDMap.hpp:2535
Teuchos::Array< int > getBndryPadSizes() const
Get the boundary padding sizes along each axis.
Definition: Domi_MDMap.hpp:2399
Teuchos::ArrayView< const Slice > getLocalBounds() const
Get the local loop bounds along every axis.
Definition: Domi_MDMap.hpp:2252
int getUpperNeighbor(int axis) const
Get the rank of the upper neighbor.
Definition: Domi_MDMap.hpp:2125
MDMap Error exception type.
Definition: Domi_Exceptions.hpp:114
Definition: Domi_ConfigDefs.hpp:97
int getCommPadSize(int axis) const
Get the communication padding size along the given axis.
Definition: Domi_MDMap.hpp:2351
bool isSameAs(const MDMap< Node > &mdMap, const int verbose=0) const
True if two MDMaps are "identical".
Definition: Domi_MDMap.hpp:3201
virtual Slice bounds(dim_type size) const
Return a Slice with concrete start and stop values.
int getUpperPadSize(int axis) const
Get the size of the upper padding along the given axis.
Definition: Domi_MDMap.hpp:2335
bool isContiguous() const
True if there are no stride gaps on any processor.
Definition: Domi_MDMap.hpp:3276
static const dim_type Default
Default value for Slice constructors.
Definition: Domi_Slice.hpp:155
int getLowerBndryPad(int axis) const
Get the size of the lower boundary padding along the given axis.
Definition: Domi_MDMap.hpp:2367
Slice getGlobalBounds(int axis, bool withBndryPad=false) const
Get the bounds of the global problem.
Definition: Domi_MDMap.hpp:2166
int getUpperBndryPad(int axis) const
Get the size of the upper boundary padding along the given axis.
Definition: Domi_MDMap.hpp:2383
~MDMap()
Destructor.
Definition: Domi_MDMap.hpp:2010
dim_type getGlobalDim(int axis, bool withBndryPad=false) const
Get the global dimension along the specified axis.
Definition: Domi_MDMap.hpp:2145
bool hasPadding() const
Return true if there is any padding stored locally.
Definition: Domi_MDMap.hpp:2307
int getBndryPadSize(int axis) const
Get the boundary padding size along the given axis.
Definition: Domi_MDMap.hpp:2408
size_type getLocalID(size_type globalID) const
Convert a global ID to a local ID.
Definition: Domi_MDMap.hpp:3101
size_type size() const
Return the total size of the MDArray
Definition: Domi_MDArray.hpp:1037
int getLowerNeighbor(int axis) const
Get the rank of the lower neighbor.
Definition: Domi_MDMap.hpp:2116
Multi-dimensional map.
Definition: Domi_MDMap.hpp:144
const T * getRawPtr() const
Return a const raw pointer to the beginning of the MDArray or NULL if unsized.
Definition: Domi_MDArray.hpp:1714
const dim_type start() const
Start index.
Definition: Domi_Slice.hpp:431
Memory-safe templated multi-dimensional array class.
Definition: Domi_MDArray.hpp:65
Teuchos::Array< dim_type > getGlobalDims() const
Get an array of the the global dimensions, including boundary padding.
Definition: Domi_MDMap.hpp:2135
Teuchos::Array< dim_type > getLocalIndex(size_type localID) const
Convert a local ID to a local index.
Definition: Domi_MDMap.hpp:3009
Teuchos::Array< dim_type > getLocalDims() const
Get an array of the local dimensions, including padding.
Definition: Domi_MDMap.hpp:2212
Invalid argument exception type.
Definition: Domi_Exceptions.hpp:53
int getCommIndex(int axis) const
Get the axis rank of this processor.
Definition: Domi_MDMap.hpp:2107
Map Ordinal Error exception type.
Definition: Domi_Exceptions.hpp:90
Layout getLayout() const
Get the storage order.
Definition: Domi_MDMap.hpp:2505

Generated for Domi by doxygen 1.8.14