Domi
Multi-dimensional, distributed data structures
Domi_MDMap.hpp
1 // @HEADER
2 // ***********************************************************************
3 //
4 // Domi: Multi-dimensional Distributed Linear Algebra Services
5 // Copyright (2014) Sandia Corporation
6 //
7 // Under the terms of Contract DE-AC04-94AL85000 with Sandia
8 // Corporation, the U.S. Government retains certain rights in this
9 // software.
10 //
11 // Redistribution and use in source and binary forms, with or without
12 // modification, are permitted provided that the following conditions are
13 // met:
14 //
15 // 1. Redistributions of source code must retain the above copyright
16 // notice, this list of conditions and the following disclaimer.
17 //
18 // 2. Redistributions in binary form must reproduce the above copyright
19 // notice, this list of conditions and the following disclaimer in the
20 // documentation and/or other materials provided with the distribution.
21 //
22 // 3. Neither the name of the Corporation nor the names of the
23 // contributors may be used to endorse or promote products derived from
24 // this software without specific prior written permission.
25 //
26 // THIS SOFTWARE IS PROVIDED BY SANDIA CORPORATION "AS IS" AND ANY
27 // EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
28 // IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
29 // PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL SANDIA CORPORATION OR THE
30 // CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
31 // EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
32 // PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
33 // PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
34 // LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
35 // NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
36 // SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
37 //
38 // Questions? Contact William F. Spotz (wfspotz@sandia.gov)
39 //
40 // ***********************************************************************
41 // @HEADER
42 
43 #ifndef DOMI_MDMAP_HPP
44 #define DOMI_MDMAP_HPP
45 
46 // System includes
47 #include <algorithm>
48 #include <limits>
49 #include <sstream>
50 
51 // Domi includes
52 #include "Domi_ConfigDefs.hpp"
53 #include "Domi_DefaultNode.hpp"
54 #include "Domi_Utils.hpp"
55 #include "Domi_getValidParameters.hpp"
56 #include "Domi_Slice.hpp"
57 #include "Domi_MDComm.hpp"
58 #include "Domi_MDArray.hpp"
59 
60 // Teuchos includes
61 #include "Teuchos_Comm.hpp"
62 #include "Teuchos_DefaultComm.hpp"
63 #include "Teuchos_CommHelpers.hpp"
64 #include "Teuchos_Tuple.hpp"
65 
66 // Kokkos includes
67 #include "Kokkos_Core.hpp"
68 
69 #ifdef HAVE_EPETRA
70 #include "Epetra_Map.h"
71 #endif
72 
73 #ifdef HAVE_TPETRA
74 #include "Tpetra_Map.hpp"
75 #include "Tpetra_Util.hpp"
76 #endif
77 
78 namespace Domi
79 {
80 
143 template< class Node = DefaultNode::DefaultNodeType >
144 class MDMap
145 {
146 public:
147 
150 
184  MDMap(const Teuchos::RCP< const MDComm > mdComm,
185  const Teuchos::ArrayView< const dim_type > & dimensions,
186  const Teuchos::ArrayView< const int > & commPad =
187  Teuchos::ArrayView< const int >(),
188  const Teuchos::ArrayView< const int > & bndryPad =
189  Teuchos::ArrayView< const int >(),
190  const Teuchos::ArrayView< const int > & replicatedBoundary =
191  Teuchos::ArrayView< const int >(),
192  const Layout layout = DEFAULT_ORDER);
193 
205  MDMap(Teuchos::ParameterList & plist);
206 
219  MDMap(const Teuchos::RCP< const Teuchos::Comm< int > > teuchosComm,
220  Teuchos::ParameterList & plist);
221 
234  MDMap(const Teuchos::RCP< const MDComm > mdComm,
235  Teuchos::ParameterList & plist);
236 
266  MDMap(const Teuchos::RCP< const MDComm > mdComm,
267  const Teuchos::ArrayView< Slice > & myGlobalBounds,
268  const Teuchos::ArrayView< padding_type > & padding =
269  Teuchos::ArrayView< padding_type >(),
270  const Teuchos::ArrayView< const int > & replicatedBoundary =
271  Teuchos::ArrayView< const int >(),
272  const Layout layout = DEFAULT_ORDER);
273 
278  MDMap(const MDMap< Node > & source);
279 
293  MDMap(const MDMap< Node > & parent,
294  int axis,
295  dim_type index);
296 
318  MDMap(const MDMap< Node > & parent,
319  int axis,
320  const Slice & slice,
321  int bndryPad = 0);
322 
336  MDMap(const MDMap< Node > & parent,
337  const Teuchos::ArrayView< Slice > & slices,
338  const Teuchos::ArrayView< int > & bndryPad =
339  Teuchos::ArrayView< int >());
340 
343  ~MDMap();
344 
346 
349 
354  MDMap< Node > & operator=(const MDMap< Node > & source);
355 
357 
360 
366  inline Teuchos::RCP< const MDComm > getMDComm() const;
367 
374  inline bool onSubcommunicator() const;
375 
382  inline Teuchos::RCP< const Teuchos::Comm< int > > getTeuchosComm() const;
383 
390  inline int numDims() const;
391 
398  inline Teuchos::Array< int > getCommDims() const;
399 
409  inline int getCommDim(int axis) const;
410 
420  inline bool isPeriodic(int axis) const;
421 
431  inline int getCommIndex(int axis) const;
432 
449  inline int getLowerNeighbor(int axis) const;
450 
468  inline int getUpperNeighbor(int axis) const;
469 
471 
474 
478  Teuchos::Array< dim_type > getGlobalDims() const;
479 
488  dim_type getGlobalDim(int axis,
489  bool withBndryPad=false) const;
490 
499  Slice getGlobalBounds(int axis,
500  bool withBndryPad=false) const;
501 
515  Slice getGlobalRankBounds(int axis,
516  bool withBndryPad=false) const;
517 
520  Teuchos::Array< dim_type > getLocalDims() const;
521 
530  dim_type getLocalDim(int axis,
531  bool withPad=false) const;
532 
540  Teuchos::ArrayView< const Slice > getLocalBounds() const;
541 
555  Slice getLocalBounds(int axis,
556  bool withPad=false) const;
557 
575  Slice getLocalInteriorBounds(int axis) const;
576 
578 
581 
592  bool hasPadding() const;
593 
602  int getLowerPadSize(int axis) const;
603 
612  int getUpperPadSize(int axis) const;
613 
623  int getCommPadSize(int axis) const;
624 
631  int getLowerBndryPad(int axis) const;
632 
639  int getUpperBndryPad(int axis) const;
640 
647  Teuchos::Array< int > getBndryPadSizes() const;
648 
658  int getBndryPadSize(int axis) const;
659 
660  /* \brief Return whether given local index is in the padding
661  *
662  * \param index [in] An array of indexes ((i) for 1D, (i,j) for 2D,
663  * (i,j,k) for 3D, etc)
664  */
665  bool isPad(const Teuchos::ArrayView< dim_type > & index) const;
666 
667  /* \brief Return whether given local index is in the communication
668  * padding
669  *
670  * \param index [in] An array of indexes ((i) for 1D, (i,j) for 2D,
671  * (i,j,k) for 3D, etc)
672  */
673  bool isCommPad(const Teuchos::ArrayView< dim_type > & index) const;
674 
675  /* \brief Return whether given local index is in the boundary
676  * padding
677  *
678  * \param index [in] An array of indexes ((i) for 1D, (i,j) for 2D,
679  * (i,j,k) for 3D, etc)
680  */
681  bool isBndryPad(const Teuchos::ArrayView< dim_type > & index) const;
682 
685  bool isReplicatedBoundary(int axis) const;
686 
689  Layout getLayout() const;
690 
693  Teuchos::RCP< Node > getNode() const;
694 
696 
699 
704  Teuchos::ArrayView< Teuchos::RCP< const Domi::MDMap< Node > > >
705  getAxisMaps() const;
706 
711  Teuchos::RCP< const Domi::MDMap< Node > > getAxisMap(int axis) const;
712 
729  Teuchos::RCP< const MDMap< Node > >
730  getAugmentedMDMap(const dim_type leadingDim,
731  const dim_type trailingDim=0) const;
732 
733 #ifdef HAVE_EPETRA
734 
743  Teuchos::RCP< const Epetra_Map >
744  getEpetraMap(bool withCommPad=true) const;
745 
756  Teuchos::RCP< const Epetra_Map >
757  getEpetraAxisMap(int axis,
758  bool withCommPad=true) const;
759 
760 #endif
761 
762 #ifdef HAVE_TPETRA
763 
773  template< class LocalOrdinal,
774  class GlobalOrdinal = LocalOrdinal,
775  class Node2 = Node >
776  Teuchos::RCP< const Tpetra::Map< LocalOrdinal, GlobalOrdinal, Node2 > >
777  getTpetraMap(bool withCommPad=true) const;
778 
791  template< class LocalOrdinal,
792  class GlobalOrdinal = LocalOrdinal,
793  class Node2 = Node >
794  Teuchos::RCP< const Tpetra::Map< LocalOrdinal, GlobalOrdinal, Node2 > >
795  getTpetraAxisMap(int axis,
796  bool withCommPad=true) const;
797 
798 #endif
799 
801 
804 
809  Teuchos::Array< dim_type >
810  getGlobalIndex(size_type globalID) const;
811 
816  Teuchos::Array< dim_type >
817  getLocalIndex(size_type localID) const;
818 
823  size_type getGlobalID(size_type localID) const;
824 
829  size_type
830  getGlobalID(const Teuchos::ArrayView< dim_type > & globalIndex) const;
831 
839  size_type getLocalID(size_type globalID) const;
840 
845  size_type
846  getLocalID(const Teuchos::ArrayView< dim_type > & localIndex) const;
847 
849 
852 
867  bool isCompatible(const MDMap< Node > & mdMap) const;
868 
890  bool isSameAs(const MDMap< Node > & mdMap,
891  const int verbose = 0) const;
892 
902  bool isContiguous() const;
903 
905 
906 private:
907 
908  // A private method for computing the bounds and local dimensions,
909  // after the global dimensions, communication and boundary padding
910  // have been properly assigned.
911  void computeBounds();
912 
913  // The underlying multi-dimensional communicator.
914  Teuchos::RCP< const MDComm > _mdComm;
915 
916  // The size of the global dimensions along each axis. This includes
917  // the values of the boundary padding.
918  Teuchos::Array< dim_type > _globalDims;
919 
920  // Store the start and stop indexes of the global problem along each
921  // axis. This includes the values of the boundary padding.
922  Teuchos::Array< Slice > _globalBounds;
923 
924  // A double array of Slices that stores the starting and stopping
925  // indexes for each axis processor along each axis. This data
926  // structure allows any processor to know the map bounds on any
927  // other processor. These bounds do NOT include the boundary or
928  // communication padding. The outer array will have a size equal to
929  // the number of dimensions. Each inner array will have a size
930  // equal to the number of axis processors along that axis.
931  Teuchos::Array< Teuchos::Array< Slice > > _globalRankBounds;
932 
933  // The global stride between adjacent IDs. This quantity is
934  // "virtual" as it does not describe actual memory layout, but it is
935  // useful for index conversion algorithms.
936  Teuchos::Array< size_type > _globalStrides;
937 
938  // The minumum global ID of the global data structure, including
939  // boundary padding. This will only be non-zero on a sub-map.
940  size_type _globalMin;
941 
942  // The maximum global ID of the global data structure, including
943  // boundary padding.
944  size_type _globalMax;
945 
946  // The size of the local dimensions along each axis. This includes
947  // the values of the padding.
948  Teuchos::Array< dim_type > _localDims;
949 
950  // The local loop bounds along each axis, stored as an array of
951  // Slices. These bounds DO include the padding. By definition, the
952  // start() attribute of these Slices will all be zero by definition.
953  // This may seem wasteful (we could store an array of dim_type
954  // rather than an array of Slice), but this storage is convenient
955  // when it comes to the getLocalBounds() method.
956  Teuchos::Array< Slice > _localBounds;
957 
958  // The local stride between adjacent elements in memory.
959  Teuchos::Array< size_type > _localStrides;
960 
961  // The minimum local ID of the local data structure, including
962  // padding.
963  size_type _localMin;
964 
965  // The maximum local ID of the local data structure, including
966  // padding.
967  size_type _localMax;
968 
969  // The communication padding that was specified at construction, one
970  // value along each axis.
971  Teuchos::Array< int > _commPadSizes;
972 
973  // The actual padding stored on this processor along each axis. The
974  // padding can be either communication padding or boundary padding
975  // based on the processor position on the boundary of a domain.
976  Teuchos::Array< padding_type > _pad;
977 
978  // The size of the boundary padding along each axis.
979  Teuchos::Array< int > _bndryPadSizes;
980 
981  // The actual boundary padding stored along each axis. For a full
982  // communicator, the lower and upper boundary padding are both the
983  // same as the corresponding value in _bndryPadSizes. However, the
984  // introduction of sub-maps creates the possibility of different
985  // upper and lower boundary padding values. If an axis is periodic,
986  // then these values will be set to the communication padding.
987  Teuchos::Array< padding_type > _bndryPad;
988 
989  // An array of flags, one for each axis, that specifies whether the
990  // endpoints of a periodic boundary are replicated (1 or true) or
991  // unique (0 or false). These flags only have meaning for axes that
992  // are periodic. The arrray type is int, beacause Array< bool > is
993  // specialized and sometimes difficult to work with.
994  Teuchos::Array< int > _replicatedBoundary;
995 
996  // The storage order
997  Layout _layout;
998 
999  // An array of axis maps that correspond to the full MDMap. An axis
1000  // map describes the map along a single axis. This member is mutable
1001  // because it is logically const but does not get constructed until
1002  // requested by the user.
1003  mutable Teuchos::Array< Teuchos::RCP< const MDMap< Node > > > _axisMaps;
1004 
1005  // Kokkos node
1006  Teuchos::RCP< Node > _node;
1007 
1008 #ifdef HAVE_EPETRA
1009  // An RCP pointing to an Epetra_Map that is equivalent to this
1010  // MDMap, including communication padding. It is mutable because we
1011  // do not construct it until it asked for by a get method that is
1012  // const.
1013  mutable Teuchos::RCP< const Epetra_Map > _epetraMap;
1014 
1015  // An RCP pointing to an Epetra_Map that is equivalent to this
1016  // MDMap, excluding communication padding. The returned Epetra_Map
1017  // thus indicates what IDs are owned by this processor. It is
1018  // mutable because we do not construct it until it asked for by a
1019  // get method that is const.
1020  mutable Teuchos::RCP< const Epetra_Map > _epetraOwnMap;
1021 
1022  // An ArrayRCP that stores Epetra_Maps that represent the
1023  // distribution of indexes along each axis, including communication
1024  // padding. It is mutable because we do not construct it until it
1025  // asked for by a get method that is const.
1026  mutable Teuchos::Array< Teuchos::RCP< const Epetra_Map > > _epetraAxisMaps;
1027 
1028  // An ArrayRCP that stores Epetra_Maps that represent the
1029  // distribution of indexes along each axis, excluding communication
1030  // padding. It is mutable because we do not construct it until it
1031  // asked for by a get method that is const.
1032  mutable Teuchos::Array< Teuchos::RCP< const Epetra_Map > > _epetraAxisOwnMaps;
1033 #endif
1034 
1035 };
1036 
1038 // Implementations
1040 
1041 template< class Node >
1043 MDMap(const Teuchos::RCP< const MDComm > mdComm,
1044  const Teuchos::ArrayView< const dim_type > & dimensions,
1045  const Teuchos::ArrayView< const int > & commPad,
1046  const Teuchos::ArrayView< const int > & bndryPad,
1047  const Teuchos::ArrayView< const int > & replicatedBoundary,
1048  const Layout layout) :
1049  _mdComm(mdComm),
1050  _globalDims(mdComm->numDims()),
1051  _globalBounds(),
1052  _globalRankBounds(mdComm->numDims()),
1053  _globalStrides(),
1054  _globalMin(0),
1055  _globalMax(),
1056  _localDims(mdComm->numDims(), 0),
1057  _localBounds(),
1058  _localStrides(),
1059  _localMin(0),
1060  _localMax(),
1061  _commPadSizes(mdComm->numDims(), 0),
1062  _pad(),
1063  _bndryPadSizes(mdComm->numDims(), 0),
1064  _bndryPad(),
1065  _replicatedBoundary(createArrayOfInts(mdComm->numDims(),
1066  replicatedBoundary)),
1067  _layout(layout)
1068 {
1069  // Temporarily store the number of dimensions
1070  int numDims = mdComm->numDims();
1071 
1072  // Check the global dimensions
1073  TEUCHOS_TEST_FOR_EXCEPTION(
1074  numDims != dimensions.size(),
1076  "Size of dimensions does not match MDComm number of dimensions");
1077 
1078  // Copy the communication and boundary padding sizes, compute the
1079  // global dimensions and bounds, and the actual padding
1080  for (int axis = 0; axis < numDims; ++axis)
1081  {
1082  if (axis < commPad.size() ) _commPadSizes[ axis] = commPad[ axis];
1083  if (axis < bndryPad.size()) _bndryPadSizes[axis] = bndryPad[axis];
1084  if (_mdComm->isPeriodic(axis))
1085  _bndryPad.push_back(Teuchos::tuple(_commPadSizes[axis],
1086  _commPadSizes[axis]));
1087  else
1088  _bndryPad.push_back(Teuchos::tuple(_bndryPadSizes[axis],
1089  _bndryPadSizes[axis]));
1090  _globalDims[axis] = dimensions[axis] + _bndryPad[axis][0] +
1091  _bndryPad[axis][1];
1092  _globalBounds.push_back(ConcreteSlice(_globalDims[axis]));
1093  int lower, upper;
1094  if (getLowerNeighbor(axis) == -1)
1095  lower = _bndryPadSizes[axis];
1096  else
1097  lower = _commPadSizes[axis];
1098  if (getUpperNeighbor(axis) == -1)
1099  upper = _bndryPadSizes[axis];
1100  else
1101  upper = _commPadSizes[axis];
1102  _pad.push_back(Teuchos::tuple(lower, upper));
1103  }
1104 
1105  // Compute the global size
1106  _globalMax = computeSize(_globalDims());
1107 
1108  // Compute _globalRankBounds, _localBounds, and _localDims. Then
1109  // compute the local size
1110  computeBounds();
1111  _localMax = computeSize(_localDims());
1112 
1113  // Compute the global and local strides
1114  _globalStrides = computeStrides< size_type, dim_type >(_globalDims, _layout);
1115  _localStrides = computeStrides< size_type, dim_type >(_localDims , _layout);
1116 }
1117 
1119 
1120 template< class Node >
1122 MDMap(Teuchos::ParameterList & plist) :
1123  _mdComm(Teuchos::rcp(new MDComm(plist))),
1124  _globalDims(),
1125  _globalBounds(),
1126  _globalRankBounds(),
1127  _globalStrides(),
1128  _globalMin(0),
1129  _globalMax(),
1130  _localDims(),
1131  _localBounds(),
1132  _localStrides(),
1133  _localMin(0),
1134  _localMax(0),
1135  _commPadSizes(),
1136  _pad(),
1137  _bndryPadSizes(),
1138  _bndryPad(),
1139  _replicatedBoundary(),
1140  _layout()
1141 {
1142  // Note that the call to the MDComm constructor in the constructor
1143  // initialization list will validate the ParameterList, so we don't
1144  // have to do it again here.
1145 
1146  // Temporarily store the number of dimensions
1147  int numDims = _mdComm->numDims();
1148 
1149  // Check the global dimensions
1150  Teuchos::Array< dim_type > dimensions =
1151  plist.get("dimensions", Teuchos::Array< dim_type >());
1152  TEUCHOS_TEST_FOR_EXCEPTION(
1153  numDims != dimensions.size(),
1155  "Size of dimensions does not match MDComm number of dimensions");
1156 
1157  // Initialize _bndryPadSizes, _commPadSizes and _globalDims from the
1158  // ParameterList
1159  int commPad = plist.get("communication pad size", int(0));
1160  int bndryPad = plist.get("boundary pad size" , int(0));
1161  Teuchos::Array< int > commPads =
1162  plist.get("communication pad sizes", Teuchos::Array< int >());
1163  Teuchos::Array< int > bndryPads =
1164  plist.get("boundary pad sizes" , Teuchos::Array< int >());
1165  _commPadSizes.resize( numDims);
1166  _bndryPadSizes.resize(numDims);
1167  _globalDims.resize( numDims);
1168 
1169  // Copy the communication and boundary padding sizes, compute the
1170  // global dimensions and bounds, and the actual padding
1171  for (int axis = 0; axis < numDims; ++axis)
1172  {
1173  if (axis < commPads.size() ) _commPadSizes[ axis] = commPads[ axis];
1174  else _commPadSizes[ axis] = commPad;
1175  if (axis < bndryPads.size()) _bndryPadSizes[axis] = bndryPads[axis];
1176  else _bndryPadSizes[axis] = bndryPad;
1177  if (_mdComm->isPeriodic(axis))
1178  _bndryPad.push_back(Teuchos::tuple(_commPadSizes[axis],
1179  _commPadSizes[axis]));
1180  else
1181  _bndryPad.push_back(Teuchos::tuple(_bndryPadSizes[axis],
1182  _bndryPadSizes[axis]));
1183  _globalDims[axis] = dimensions[axis] + _bndryPad[axis][0] +
1184  _bndryPad[axis][1];
1185  _globalBounds.push_back(ConcreteSlice(_globalDims[axis]));
1186  int lower, upper;
1187  if (getLowerNeighbor(axis) == -1)
1188  lower = _bndryPadSizes[axis];
1189  else
1190  lower = _commPadSizes[axis];
1191  if (getUpperNeighbor(axis) == -1)
1192  upper = _bndryPadSizes[axis];
1193  else
1194  upper = _commPadSizes[axis];
1195  _pad.push_back(Teuchos::tuple(lower, upper));
1196  }
1197 
1198  // Compute the global size
1199  _globalMax = computeSize(_globalDims());
1200 
1201  // Compute _globalRankBounds, _localBounds, and _localDims.
1202  // Then compute the local size
1203  _globalRankBounds.resize(numDims);
1204  _localDims.resize(numDims);
1205  computeBounds();
1206  _localMax = computeSize(_localDims());
1207 
1208  // Set the replicated boundary flags along each axis
1209  Teuchos::Array< int > repBndry = plist.get("replicated boundary",
1210  Teuchos::Array< int >());
1211  _replicatedBoundary = createArrayOfInts(numDims, repBndry);
1212 
1213  // Set the layout
1214  std::string layout = plist.get("layout", "DEFAULT");
1215  std::transform(layout.begin(), layout.end(), layout.begin(), ::toupper);
1216  if (layout == "C ORDER")
1217  _layout = C_ORDER;
1218  else if (layout == "FORTRAN ORDER")
1219  _layout = FORTRAN_ORDER;
1220  else if (layout == "ROW MAJOR")
1221  _layout = ROW_MAJOR;
1222  else if (layout == "COLUMN MAJOR")
1223  _layout = COLUMN_MAJOR;
1224  else if (layout == "LAST INDEX FASTEST")
1225  _layout = LAST_INDEX_FASTEST;
1226  else if (layout == "FIRST INDEX FASTEST")
1227  _layout = FIRST_INDEX_FASTEST;
1228  else
1229  _layout = DEFAULT_ORDER;
1230 
1231  // Compute the strides
1232  _globalStrides = computeStrides< size_type, dim_type >(_globalDims, _layout);
1233  _localStrides = computeStrides< size_type, dim_type >(_localDims , _layout);
1234 }
1235 
1237 
1238 template< class Node >
1240 MDMap(Teuchos::RCP< const Teuchos::Comm< int > > teuchosComm,
1241  Teuchos::ParameterList & plist) :
1242  _mdComm(Teuchos::rcp(new MDComm(teuchosComm, plist))),
1243  _globalDims(),
1244  _globalBounds(),
1245  _globalRankBounds(),
1246  _globalStrides(),
1247  _globalMin(0),
1248  _globalMax(),
1249  _localDims(),
1250  _localBounds(),
1251  _localStrides(),
1252  _localMin(0),
1253  _localMax(0),
1254  _commPadSizes(),
1255  _pad(),
1256  _bndryPadSizes(),
1257  _bndryPad(),
1258  _replicatedBoundary(),
1259  _layout()
1260 {
1261  // Note that the call to the MDComm constructor in the constructor
1262  // initialization list will validate the ParameterList, so we don't
1263  // have to do it again here.
1264 
1265  // Temporarily store the number of dimensions
1266  int numDims = _mdComm->numDims();
1267 
1268  // Check the global dimensions
1269  Teuchos::Array< dim_type > dimensions =
1270  plist.get("dimensions", Teuchos::Array< dim_type >());
1271  TEUCHOS_TEST_FOR_EXCEPTION(
1272  numDims != dimensions.size(),
1274  "Size of dimensions does not match MDComm number of dimensions");
1275 
1276  // Initialize _bndryPadSizes, _commPadSizes and _globalDims from the
1277  // ParameterList
1278  int commPad = plist.get("communication pad size", int(0));
1279  int bndryPad = plist.get("boundary pad size" , int(0));
1280  Teuchos::Array< int > commPads =
1281  plist.get("communication pad sizes", Teuchos::Array< int >());
1282  Teuchos::Array< int > bndryPads =
1283  plist.get("boundary pad sizes" , Teuchos::Array< int >());
1284  _commPadSizes.resize( numDims);
1285  _bndryPadSizes.resize(numDims);
1286  _globalDims.resize( numDims);
1287 
1288  // Copy the communication and boundary padding sizes, compute the
1289  // global dimensions and bounds, and the actual padding
1290  for (int axis = 0; axis < numDims; ++axis)
1291  {
1292  if (axis < commPads.size() ) _commPadSizes[ axis] = commPads[ axis];
1293  else _commPadSizes[ axis] = commPad;
1294  if (axis < bndryPads.size()) _bndryPadSizes[axis] = bndryPads[axis];
1295  else _bndryPadSizes[axis] = bndryPad;
1296  if (_mdComm->isPeriodic(axis))
1297  _bndryPad.push_back(Teuchos::tuple(_commPadSizes[axis],
1298  _commPadSizes[axis]));
1299  else
1300  _bndryPad.push_back(Teuchos::tuple(_bndryPadSizes[axis],
1301  _bndryPadSizes[axis]));
1302  _globalDims[axis] = dimensions[axis] + _bndryPad[axis][0] +
1303  _bndryPad[axis][1];
1304  _globalBounds.push_back(ConcreteSlice(_globalDims[axis]));
1305  int lower, upper;
1306  if (getLowerNeighbor(axis) == -1)
1307  lower = _bndryPadSizes[axis];
1308  else
1309  lower = _commPadSizes[axis];
1310  if (getUpperNeighbor(axis) == -1)
1311  upper = _bndryPadSizes[axis];
1312  else
1313  upper = _commPadSizes[axis];
1314  _pad.push_back(Teuchos::tuple(lower, upper));
1315  }
1316  // std::cout << "MDMap constructor: _commPadSizes = " << _commPadSizes
1317  // << ", bndryPadSizes = " << _bndryPadSizes << ", _bndryPad = "
1318  // << _bndryPad << ", _pad = " << _pad << ", _globalDims = "
1319  // << _globalDims << ", _globalBounds = " << _globalBounds
1320  // << std::endl;
1321 
1322  // Compute the global size
1323  _globalMax = computeSize(_globalDims());
1324 
1325  // Compute _globalRankBounds, _localBounds, and _localDims.
1326  // Then compute the local size
1327  _globalRankBounds.resize(numDims);
1328  _localDims.resize(numDims);
1329  computeBounds();
1330  _localMax = computeSize(_localDims());
1331 
1332  // Set the replicated boundary flags along each axis
1333  Teuchos::Array< int > repBndry = plist.get("replicated boundary",
1334  Teuchos::Array< int >());
1335  _replicatedBoundary = createArrayOfInts(numDims, repBndry);
1336 
1337  // Set the layout
1338  std::string layout = plist.get("layout", "DEFAULT");
1339  std::transform(layout.begin(), layout.end(), layout.begin(), ::toupper);
1340  if (layout == "C ORDER")
1341  _layout = C_ORDER;
1342  else if (layout == "FORTRAN ORDER")
1343  _layout = FORTRAN_ORDER;
1344  else if (layout == "ROW MAJOR")
1345  _layout = ROW_MAJOR;
1346  else if (layout == "COLUMN MAJOR")
1347  _layout = COLUMN_MAJOR;
1348  else if (layout == "LAST INDEX FASTEST")
1349  _layout = LAST_INDEX_FASTEST;
1350  else if (layout == "FIRST INDEX FASTEST")
1351  _layout = FIRST_INDEX_FASTEST;
1352  else
1353  _layout = DEFAULT_ORDER;
1354 
1355  // Compute the strides
1356  _globalStrides = computeStrides< size_type, dim_type >(_globalDims, _layout);
1357  _localStrides = computeStrides< size_type, dim_type >(_localDims , _layout);
1358 }
1359 
1361 
1362 template< class Node >
1364 MDMap(Teuchos::RCP< const MDComm > mdComm,
1365  Teuchos::ParameterList & plist) :
1366  _mdComm(mdComm),
1367  _globalDims(mdComm->numDims()),
1368  _globalBounds(),
1369  _globalRankBounds(mdComm->numDims()),
1370  _globalStrides(),
1371  _globalMin(0),
1372  _globalMax(),
1373  _localDims(mdComm->numDims(), 0),
1374  _localBounds(),
1375  _localStrides(),
1376  _localMin(0),
1377  _localMax(),
1378  _commPadSizes(mdComm->numDims(), 0),
1379  _pad(),
1380  _bndryPadSizes(mdComm->numDims(), 0),
1381  _bndryPad(),
1382  _replicatedBoundary(),
1383  _layout()
1384 {
1385  // Validate the ParameterList
1386  plist.validateParameters(*getValidParameters());
1387 
1388  // Temporarily store the number of dimensions
1389  int numDims = _mdComm->numDims();
1390 
1391  // Check the global dimensions
1392  Teuchos::Array< dim_type > dimensions =
1393  plist.get("dimensions", Teuchos::Array< dim_type >());
1394  TEUCHOS_TEST_FOR_EXCEPTION(
1395  numDims != dimensions.size(),
1397  "Number of dimensions does not match MDComm number of dimensions");
1398 
1399  // Initialize _bndryPadSizes, _commPadSizes and _globalDims from the
1400  // ParameterList
1401  int commPad = plist.get("communication pad size", int(0));
1402  int bndryPad = plist.get("boundary pad size" , int(0));
1403  Teuchos::Array< int > commPads =
1404  plist.get("communication pad sizes", Teuchos::Array< int >());
1405  Teuchos::Array< int > bndryPads =
1406  plist.get("boundary pad sizes" , Teuchos::Array< int >());
1407  _commPadSizes.resize( numDims);
1408  _bndryPadSizes.resize(numDims);
1409  _globalDims.resize( numDims);
1410 
1411  // Copy the communication and boundary padding sizes, compute the
1412  // global dimensions and bounds, and the actual padding
1413  for (int axis = 0; axis < numDims; ++axis)
1414  {
1415  if (axis < commPads.size() ) _commPadSizes[ axis] = commPads[ axis];
1416  else _commPadSizes[ axis] = commPad;
1417  if (axis < bndryPads.size()) _bndryPadSizes[axis] = bndryPads[axis];
1418  else _bndryPadSizes[axis] = bndryPad;
1419  if (_mdComm->isPeriodic(axis))
1420  _bndryPad.push_back(Teuchos::tuple(_commPadSizes[axis],
1421  _commPadSizes[axis]));
1422  else
1423  _bndryPad.push_back(Teuchos::tuple(_bndryPadSizes[axis],
1424  _bndryPadSizes[axis]));
1425  _globalDims[axis] = dimensions[axis] + _bndryPad[axis][0] +
1426  _bndryPad[axis][1];
1427  _globalBounds.push_back(ConcreteSlice(_globalDims[axis]));
1428  int lower, upper;
1429  if (getLowerNeighbor(axis) == -1)
1430  lower = _bndryPadSizes[axis];
1431  else
1432  lower = _commPadSizes[axis];
1433  if (getUpperNeighbor(axis) == -1)
1434  upper = _bndryPadSizes[axis];
1435  else
1436  upper = _commPadSizes[axis];
1437  _pad.push_back(Teuchos::tuple(lower, upper));
1438  }
1439 
1440  // Compute the global size
1441  _globalMax = computeSize(_globalDims());
1442 
1443  // Compute _globalRankBounds, _localBounds, and _localDims.
1444  // Then compute the local size
1445  _globalRankBounds.resize(numDims);
1446  _localDims.resize(numDims);
1447  computeBounds();
1448  _localMax = computeSize(_localDims());
1449 
1450  // Set the replicated boundary flags along each axis
1451  Teuchos::Array< int > repBndry = plist.get("replicated boundary",
1452  Teuchos::Array< int >());
1453  _replicatedBoundary = createArrayOfInts(numDims, repBndry);
1454 
1455  // Set the layout
1456  std::string layout = plist.get("layout", "DEFAULT");
1457  std::transform(layout.begin(), layout.end(), layout.begin(), ::toupper);
1458  if (layout == "C ORDER")
1459  _layout = C_ORDER;
1460  else if (layout == "FORTRAN ORDER")
1461  _layout = FORTRAN_ORDER;
1462  else if (layout == "ROW MAJOR")
1463  _layout = ROW_MAJOR;
1464  else if (layout == "COLUMN MAJOR")
1465  _layout = COLUMN_MAJOR;
1466  else if (layout == "LAST INDEX FASTEST")
1467  _layout = LAST_INDEX_FASTEST;
1468  else if (layout == "FIRST INDEX FASTEST")
1469  _layout = FIRST_INDEX_FASTEST;
1470  else
1471  _layout = DEFAULT_ORDER;
1472 
1473  // Compute the strides
1474  _globalStrides = computeStrides< size_type, dim_type >(_globalDims, _layout);
1475  _localStrides = computeStrides< size_type, dim_type >(_localDims , _layout);
1476 }
1477 
1479 
1480 template< class Node >
1482 MDMap(const Teuchos::RCP< const MDComm > mdComm,
1483  const Teuchos::ArrayView< Slice > & myGlobalBounds,
1484  const Teuchos::ArrayView< padding_type > & padding,
1485  const Teuchos::ArrayView< const int > & replicatedBoundary,
1486  const Layout layout) :
1487  _mdComm(mdComm),
1488  _globalDims(mdComm->numDims()),
1489  _globalBounds(mdComm->numDims()),
1490  _globalRankBounds(mdComm->numDims()),
1491  _globalStrides(mdComm->numDims()),
1492  _globalMin(0),
1493  _globalMax(0),
1494  _localDims(mdComm->numDims(), 0),
1495  _localBounds(mdComm->numDims()),
1496  _localStrides(mdComm->numDims()),
1497  _localMin(0),
1498  _localMax(0),
1499  _commPadSizes(mdComm->numDims(), 0),
1500  _pad(mdComm->numDims(), Teuchos::tuple(0,0)),
1501  _bndryPadSizes(mdComm->numDims(), 0),
1502  _bndryPad(mdComm->numDims()),
1503  _replicatedBoundary(createArrayOfInts(mdComm->numDims(),
1504  replicatedBoundary)),
1505  _layout(layout)
1506 {
1507  // Check that myGlobalBounds is the correct size
1508  int numDims = _mdComm->numDims();
1509  TEUCHOS_TEST_FOR_EXCEPTION(
1510  myGlobalBounds.size() < numDims,
1512  "MDMap: myGlobalBounds too small");
1513 
1514  // Copy the padding to _pad
1515  int maxAxis = std::min(numDims, (int)padding.size());
1516  for (int axis = 0; axis < maxAxis; ++axis)
1517  _pad[axis] = padding[axis];
1518 
1519  // All of the required info for the MDMap is contained in the
1520  // myGlobalBounds and padding arguments, but it is distributed. We
1521  // will do a gather so that each processor has the global bounds
1522  // data from each other processor.
1523 
1524  // Resize _globalRankBounds
1525  for (int axis = 0; axis < numDims; ++axis)
1526  _globalRankBounds[axis].resize(_mdComm->getCommDim(axis));
1527 
1528  // Allocate and initialize the communication buffers
1529  int numProc = _mdComm->getTeuchosComm()->getSize();
1530  MDArray< dim_type > sendBuffer(Teuchos::tuple(5,numDims),
1531  FIRST_INDEX_FASTEST);
1532  MDArray< dim_type > recvBuffer(Teuchos::tuple(5,numDims,numProc),
1533  FIRST_INDEX_FASTEST);
1534  for (int axis = 0; axis < numDims; ++axis)
1535  {
1536  sendBuffer(0,axis) = mdComm->getCommIndex(axis);
1537  sendBuffer(1,axis) = myGlobalBounds[axis].start();
1538  sendBuffer(2,axis) = myGlobalBounds[axis].stop();
1539  sendBuffer(3,axis) = _pad[axis][0];
1540  sendBuffer(4,axis) = _pad[axis][1];
1541  }
1542 
1543  // Perform the gather all
1544  Teuchos::gatherAll(*(_mdComm->getTeuchosComm()),
1545  (int)sendBuffer.size(),
1546  sendBuffer.getRawPtr(),
1547  (int)recvBuffer.size(),
1548  recvBuffer.getRawPtr());
1549 
1550  // Extract _globalRankBounds and _bndryPad. Because of the
1551  // structure, there will be duplicate Slices and padding, and we
1552  // will check to make sure they are the expected equivalent values.
1553  for (int axis = 0; axis < numDims; ++axis)
1554  {
1555  for (int commIndex = 0; commIndex < _mdComm->getCommDim(axis); ++commIndex)
1556  {
1557  Slice bounds;
1558  padding_type pad;
1559  for (int rank = 0; rank < numProc; ++rank)
1560  {
1561  if (recvBuffer(0,axis,rank) == commIndex)
1562  {
1563  dim_type start = recvBuffer(1,axis,rank);
1564  dim_type stop = recvBuffer(2,axis,rank);
1565  int loPad = recvBuffer(3,axis,rank);
1566  int hiPad = recvBuffer(4,axis,rank);
1567  if (bounds.stop() == Slice::Default)
1568  {
1569  bounds = Slice(start, stop);
1570  pad[0] = loPad;
1571  pad[1] = hiPad;
1572  }
1573  else
1574  {
1575  TEUCHOS_TEST_FOR_EXCEPTION(
1576  (bounds.start() != start) || (bounds.stop() != stop),
1577  BoundsError,
1578  "Global rank bounds mismatch: bounds = " << bounds <<
1579  ", (start,stop) = (" << start << "," << stop << ")");
1580  TEUCHOS_TEST_FOR_EXCEPTION(
1581  (pad[0] != loPad) || (pad[1] != hiPad),
1582  BoundsError,
1583  "Padding value mismatch: pad = " << pad << ", (loPad,hiPad) = ("
1584  << loPad << "," << hiPad << ")");
1585  }
1586  }
1587  }
1588 
1589  // Extract the _bndryPad data
1590  if (commIndex == 0 ) _bndryPad[axis][0] = pad[0];
1591  if (commIndex == mdComm->getCommDim(axis)-1) _bndryPad[axis][1] = pad[1];
1592 
1593  // Extract the verified _globalRankBounds
1594  _globalRankBounds[axis][commIndex] = bounds;
1595  }
1596  }
1597 
1598  // Check the sanity of _globalRankBounds
1599  for (int axis = 0; axis < numDims; ++axis)
1600  {
1601  for (int commIndex = 1; commIndex < _mdComm->getCommDim(axis); ++commIndex)
1602  {
1603  TEUCHOS_TEST_FOR_EXCEPTION(
1604  _globalRankBounds[axis][commIndex-1].stop() !=
1605  _globalRankBounds[axis][commIndex ].start(),
1607  "Global rank bounds are not contiguous");
1608  }
1609  }
1610 
1611  // Set the global data
1612  for (int axis = 0; axis < numDims; ++axis)
1613  {
1614  int commSize = _mdComm->getCommDim(axis);
1615  dim_type start =
1616  _globalRankBounds[axis][0 ].start() - _pad[axis][0];
1617  dim_type stop =
1618  _globalRankBounds[axis][commSize-1].stop() + _pad[axis][1];
1619  _globalDims[axis] = stop - start;
1620  _globalBounds[axis] = Slice(start, stop);
1621  }
1622  _globalStrides = computeStrides< size_type, dim_type >(_globalDims, _layout);
1623 
1624  // Set the global min and max
1625  for (int axis = 0; axis < numDims; ++axis)
1626  {
1627  _globalMin += _globalBounds[axis].start() * _globalStrides[axis];
1628  _globalMax += _globalBounds[axis].stop() * _globalStrides[axis];
1629  }
1630 
1631  // Set the local data
1632  for (int axis = 0; axis < numDims; ++axis)
1633  {
1634  int commIndex = _mdComm->getCommIndex(axis);
1635  dim_type start =
1636  _globalRankBounds[axis][commIndex].start() - _pad[axis][0];
1637  dim_type stop =
1638  _globalRankBounds[axis][commIndex].stop() + _pad[axis][1];
1639  _localDims[axis] = stop - start;
1640  _localBounds[axis] = Slice(stop - start);
1641  }
1642  _localStrides = computeStrides< size_type, dim_type >(_localDims, _layout);
1643 
1644  // Compute the local max
1645  for (int axis = 0; axis < numDims; ++axis)
1646  _localMax += (_localDims[axis] - 1) * _localStrides[axis];
1647 }
1648 
1650 
1651 template< class Node >
1653 MDMap(const MDMap< Node > & source) :
1654  _mdComm(source._mdComm),
1655  _globalDims(source._globalDims),
1656  _globalBounds(source._globalBounds),
1657  _globalRankBounds(source._globalRankBounds),
1658  _globalStrides(source._globalStrides),
1659  _globalMin(source._globalMin),
1660  _globalMax(source._globalMax),
1661  _localDims(source._localDims),
1662  _localBounds(source._localBounds),
1663  _localStrides(source._localStrides),
1664  _localMin(source._localMin),
1665  _localMax(source._localMax),
1666  _commPadSizes(source._commPadSizes),
1667  _pad(source._pad),
1668  _bndryPadSizes(source._bndryPadSizes),
1669  _bndryPad(source._bndryPad),
1670  _replicatedBoundary(source._replicatedBoundary),
1671  _layout(source._layout)
1672 {
1673 }
1674 
1676 
1677 template< class Node >
1679 MDMap(const MDMap< Node > & parent,
1680  int axis,
1681  dim_type index) :
1682  _mdComm(parent._mdComm),
1683  _globalDims(),
1684  _globalBounds(),
1685  _globalRankBounds(),
1686  _globalStrides(),
1687  _globalMin(),
1688  _globalMax(),
1689  _localDims(),
1690  _localBounds(),
1691  _localStrides(),
1692  _localMin(),
1693  _localMax(),
1694  _commPadSizes(),
1695  _pad(),
1696  _bndryPadSizes(),
1697  _bndryPad(),
1698  _replicatedBoundary(),
1699  _layout(parent._layout)
1700 {
1701  if (parent.onSubcommunicator())
1702  {
1703  int numDims = parent.numDims();
1704  TEUCHOS_TEST_FOR_EXCEPTION(
1705  ((axis < 0) || (axis >= numDims)),
1706  RangeError,
1707  "axis = " << axis << " is invalid for communicator with " <<
1708  numDims << " dimensions");
1709 
1710  Slice globalBounds = parent.getGlobalBounds(axis,true);
1711  TEUCHOS_TEST_FOR_EXCEPTION(
1712  ((index < globalBounds.start()) || (index >= globalBounds.stop())),
1713  RangeError,
1714  "index = " << index << " is invalid for MDMap axis " <<
1715  axis << " with bounds " << globalBounds);
1716 
1717  // Determine the axis rank for the processor on which the index
1718  // lives, and construct the MDComm
1719  int thisAxisRank = -1;
1720  for (int axisRank = 0; axisRank < parent.getCommDim(axis); ++axisRank)
1721  if (index >= parent._globalRankBounds[axis][axisRank].start() &&
1722  index < parent._globalRankBounds[axis][axisRank].stop())
1723  thisAxisRank = axisRank;
1724  TEUCHOS_TEST_FOR_EXCEPTION(
1725  (thisAxisRank == -1),
1727  "error computing axis rank for sub-communicator");
1728  _mdComm = Teuchos::rcp(new MDComm(*(parent._mdComm), axis, thisAxisRank));
1729  }
1730 
1731  // There are now two ways for this processor to be off the
1732  // sub-communicator: (1) it came in that way, or (2) it is not on
1733  // the new sub-communicator. Either way, this will be reflected in
1734  // the new _mdComm, so we check it now.
1735  if (_mdComm->onSubcommunicator())
1736  {
1737  int numDims = parent.numDims();
1738  if (numDims == 1)
1739  {
1740  _globalDims.push_back(1);
1741  _globalBounds.push_back(ConcreteSlice(index,index+1));
1742  Teuchos::Array< Slice > bounds(1);
1743  bounds[0] = ConcreteSlice(index, index+1);
1744  _globalRankBounds.push_back(bounds);
1745  _globalStrides.push_back(1);
1746  _globalMin = index * parent._globalStrides[axis];
1747  _globalMax = _globalMin;
1748  _localDims.push_back(1);
1749  _localBounds.push_back(ConcreteSlice(0,1));
1750  _localStrides.push_back(1);
1751  _localMin = parent._localMin +
1752  (index - parent._globalRankBounds[axis][0].start()) *
1753  parent._localStrides[axis];
1754  _localMax = _localMin + 1;
1755  _commPadSizes.push_back(0);
1756  _pad.push_back(Teuchos::tuple(0,0));
1757  _bndryPadSizes.push_back(0);
1758  _bndryPad.push_back(Teuchos::tuple(0,0));
1759  _replicatedBoundary.push_back(0);
1760  }
1761  else
1762  {
1763  _globalMin = parent._globalMin;
1764  _globalMax = parent._globalMax;
1765  _localMin = parent._localMin;
1766  _localMax = parent._localMax;
1767  for (int myAxis = 0; myAxis < numDims; ++myAxis)
1768  {
1769  if (myAxis != axis)
1770  {
1771  _globalDims.push_back(parent._globalDims[myAxis]);
1772  _globalBounds.push_back(parent._globalBounds[myAxis]);
1773  _globalRankBounds.push_back(parent._globalRankBounds[myAxis]);
1774  _globalStrides.push_back(parent._globalStrides[myAxis]);
1775  _localDims.push_back(parent._localDims[myAxis]);
1776  _localBounds.push_back(parent._localBounds[myAxis]);
1777  _localStrides.push_back(parent._localStrides[myAxis]);
1778  _commPadSizes.push_back(parent._commPadSizes[myAxis]);
1779  _pad.push_back(parent._pad[myAxis]);
1780  _bndryPadSizes.push_back(parent._bndryPadSizes[myAxis]);
1781  _bndryPad.push_back(parent._bndryPad[myAxis]);
1782  _replicatedBoundary.push_back(parent._replicatedBoundary[myAxis]);
1783  }
1784  else
1785  {
1786  int axisRank = parent.getCommIndex(axis);
1787  _globalMin += index * parent._globalStrides[axis];
1788  _globalMax -= (parent._globalBounds[axis].stop() - index) *
1789  parent._globalStrides[axis];
1790  _localMin += (index-parent._globalRankBounds[axis][axisRank].start())
1791  * parent._localStrides[axis];
1792  _localMax -= (parent._globalRankBounds[axis][axisRank].stop()-index-1)
1793  * parent._localStrides[axis];
1794  }
1795  }
1796  }
1797  }
1798 }
1799 
1801 
1802 template< class Node >
1804 MDMap(const MDMap< Node > & parent,
1805  int axis,
1806  const Slice & slice,
1807  int bndryPad) :
1808  _mdComm(parent._mdComm),
1809  _globalDims(parent._globalDims),
1810  _globalBounds(parent._globalBounds),
1811  _globalRankBounds(parent._globalRankBounds),
1812  _globalStrides(parent._globalStrides),
1813  _globalMin(parent._globalMin),
1814  _globalMax(parent._globalMax),
1815  _localDims(parent._localDims),
1816  _localBounds(parent._localBounds),
1817  _localStrides(parent._localStrides),
1818  _localMin(parent._localMin),
1819  _localMax(parent._localMax),
1820  _commPadSizes(parent._commPadSizes),
1821  _pad(parent._pad),
1822  _bndryPadSizes(parent._bndryPadSizes),
1823  _bndryPad(parent._bndryPad),
1824  _replicatedBoundary(parent._replicatedBoundary),
1825  _layout(parent._layout)
1826 {
1827  if (parent.onSubcommunicator())
1828  {
1829  // Temporarily store the number of dimensions
1830  int numDims = parent.numDims();
1831 
1832  // Sanity check
1833  TEUCHOS_TEST_FOR_EXCEPTION(
1834  ((axis < 0) || (axis >= numDims)),
1835  RangeError,
1836  "axis = " << axis << " is invalid for MDMap with " <<
1837  numDims << " dimensions");
1838 
1839  // Convert the slice to concrete and check
1840  Slice bounds =
1841  slice.bounds(parent.getGlobalBounds(axis,true).stop());
1842  TEUCHOS_TEST_FOR_EXCEPTION(
1843  ((bounds.start() < parent.getGlobalBounds(axis).start()) ||
1844  (bounds.stop() > parent.getGlobalBounds(axis).stop())),
1845  RangeError,
1846  "Slice along axis " << axis << " is " << bounds << " but must be within "
1847  << parent.getGlobalBounds(axis));
1848  TEUCHOS_TEST_FOR_EXCEPTION(
1849  (bounds.stop() == bounds.start()),
1850  RangeError,
1851  "Slice along axis " << axis << " has length zero");
1852 
1853  // Copy the boundary padding sizes and set initial values for
1854  // _bndryPad
1855  _bndryPadSizes[axis] = bndryPad;
1856  _bndryPad[axis][0] = bndryPad;
1857  _bndryPad[axis][1] = bndryPad;
1858 
1859  // Adjust _globalRankBounds
1860  for (int axisRank = 0; axisRank < parent.getCommDim(axis);
1861  ++axisRank)
1862  {
1863  dim_type start = _globalRankBounds[axis][axisRank].start();
1864  dim_type stop = _globalRankBounds[axis][axisRank].stop();
1865  if (start < bounds.start()) start = bounds.start();
1866  if (stop > bounds.stop() ) stop = bounds.stop();
1867  _globalRankBounds[axis][axisRank] = ConcreteSlice(start, stop);
1868  }
1869 
1870  // Alter _bndryPad if necessary
1871  dim_type start = bounds.start() - _bndryPadSizes[axis];
1872  if (start < 0)
1873  {
1874  _bndryPad[axis][0] = bounds.start();
1875  start = 0;
1876  }
1877  dim_type stop = bounds.stop() + _bndryPadSizes[axis];
1878  if (stop > parent.getGlobalBounds(axis,true).stop())
1879  {
1880  _bndryPad[axis][1] = parent.getGlobalBounds(axis,true).stop() -
1881  bounds.stop();
1882  stop = parent.getGlobalBounds(axis,true).stop();
1883  }
1884 
1885  // Compute _globalBounds, _globalDims, _globalMax, and _globalMin
1886  _globalBounds[axis] = ConcreteSlice(start,stop);
1887  _globalDims[axis] = stop - start;
1888  _globalMin += start * _globalStrides[axis];
1889  _globalMax -= (parent.getGlobalDim(axis,true) - stop) *
1890  _globalStrides[axis];
1891 
1892  // Alter _replicatedBoundary
1893  if ((parent.getGlobalBounds(axis,true).start() < _globalBounds[axis].start())
1894  || (parent.getGlobalBounds(axis,true).stop() > _globalBounds[axis].stop()))
1895  _replicatedBoundary[axis] = 0;
1896 
1897  // Build the slice for the MDComm sub-communicator constructor
1898  int pStart = -1;
1899  int pStop = -1;
1900  for (int axisRank = 0; axisRank < parent.getCommDim(axis);
1901  ++axisRank)
1902  {
1903  if ((_globalRankBounds[axis][axisRank].start() - _bndryPad[axis][0]
1904  <= _globalBounds[axis].start()) &&
1905  (_globalBounds[axis].start() <
1906  _globalRankBounds[axis][axisRank].stop() + _bndryPad[axis][1]))
1907  if (pStart == -1) pStart = axisRank;
1908  if ((_globalRankBounds[axis][axisRank].start() - _bndryPad[axis][0]
1909  < _globalBounds[axis].stop()) &&
1910  (_globalBounds[axis].stop() <=
1911  _globalRankBounds[axis][axisRank].stop() + _bndryPad[axis][1]))
1912  pStop = axisRank+1;
1913  }
1914  TEUCHOS_TEST_FOR_EXCEPTION(
1915  (pStart == -1 || pStop == -1),
1917  "error computing axis rank slice");
1918  Slice axisRankSlice = ConcreteSlice(pStart,pStop);
1919 
1920  // Construct the MDComm sub-communicator
1921  _mdComm = Teuchos::rcp(new MDComm(*(parent._mdComm), axis, axisRankSlice));
1922 
1923  // We now have a sub-communicator, and should only construct this
1924  // MDMap if this processor is on it. If this processor is off the
1925  // communicator, then we clear many of the data members.
1926  if (_mdComm->onSubcommunicator())
1927  {
1928  // Fix _pad, if needed
1929  int parentAxisRank = parent.getCommIndex(axis);
1930  int myAxisRank = _mdComm->getCommIndex(axis);
1931  if (myAxisRank == 0)
1932  _pad[axis][0] = _bndryPad[axis][0];
1933  if (myAxisRank == _mdComm->getCommDim(axis)-1)
1934  _pad[axis][1] = _bndryPad[axis][1];
1935 
1936  // Compute the local start and stop indexes. Note that
1937  // _globalRankBounds has an axis-rank dimension, and that it
1938  // still uses the parent's commDim, not the new ones. We will
1939  // fix this later.
1940  dim_type start = (_globalRankBounds[axis][parentAxisRank].start() -
1941  _pad[axis][0]) -
1942  (parent._globalRankBounds[axis][parentAxisRank].start() -
1943  parent._pad[axis][0]);
1944  dim_type stop = (_globalRankBounds[axis][parentAxisRank].stop() +
1945  _pad[axis][1]) -
1946  (parent._globalRankBounds[axis][parentAxisRank].start() -
1947  parent._pad[axis][0]);
1948 
1949  // Compute the local bounds, dims, min, and max
1950  _localBounds[axis] = ConcreteSlice(stop - start);
1951  _localDims[axis] = stop - start;
1952  _localMin += start * _localStrides[axis];
1953  _localMax -= (parent.getLocalBounds(axis,true).stop() - stop) *
1954  _localStrides[axis];
1955 
1956  // The new sub-communicator may have fewer processors than the
1957  // parent communicator, so we need to fix _globalRankBounds
1958  Teuchos::Array< Slice > newRankBounds;
1959  for (int axisRank = 0; axisRank < parent.getCommDim(axis);
1960  ++axisRank)
1961  if ((axisRank >= axisRankSlice.start()) &&
1962  (axisRank < axisRankSlice.stop() ) )
1963  newRankBounds.push_back(_globalRankBounds[axis][axisRank]);
1964  _globalRankBounds[axis] = newRankBounds;
1965  }
1966  else
1967  {
1968  _localDims.clear();
1969  _localMin = 0;
1970  _localMax = 0;
1971  _localBounds.clear();
1972  _commPadSizes.clear();
1973  _pad.clear();
1974  _bndryPadSizes.clear();
1975  _bndryPad.clear();
1976  _localStrides.clear();
1977  }
1978  }
1979 }
1980 
1982 
1983 template< class Node >
1985 MDMap(const MDMap< Node > & parent,
1986  const Teuchos::ArrayView< Slice > & slices,
1987  const Teuchos::ArrayView< int > & bndryPad)
1988 {
1989  // Temporarily store the number of dimensions
1990  int numDims = parent.numDims();
1991 
1992  // Sanity check on dimensions
1993  TEUCHOS_TEST_FOR_EXCEPTION(
1994  (slices.size() != numDims),
1996  "number of slices = " << slices.size() << " != parent MDMap number of "
1997  "dimensions = " << numDims);
1998 
1999  // Apply the single-Slice constructor to each axis in succession
2000  MDMap< Node > tempMDMap1(parent);
2001  for (int axis = 0; axis < numDims; ++axis)
2002  {
2003  int bndryPadding = (axis < bndryPad.size()) ? bndryPad[axis] : 0;
2004  MDMap< Node > tempMDMap2(tempMDMap1,
2005  axis,
2006  slices[axis],
2007  bndryPadding);
2008  tempMDMap1 = tempMDMap2;
2009  }
2010  *this = tempMDMap1;
2011 }
2012 
2014 
2015 template< class Node >
2017 {
2018 }
2019 
2021 
2022 template< class Node >
2023 MDMap< Node > &
2025 {
2026  _mdComm = source._mdComm;
2027  _globalDims = source._globalDims;
2028  _globalBounds = source._globalBounds;
2029  _globalRankBounds = source._globalRankBounds;
2030  _globalStrides = source._globalStrides;
2031  _globalMin = source._globalMin;
2032  _globalMax = source._globalMax;
2033  _localDims = source._localDims;
2034  _localBounds = source._localBounds;
2035  _localStrides = source._localStrides;
2036  _localMin = source._localMin;
2037  _localMax = source._localMax;
2038  _commPadSizes = source._commPadSizes;
2039  _pad = source._pad;
2040  _bndryPadSizes = source._bndryPadSizes;
2041  _bndryPad = source._bndryPad;
2042  _layout = source._layout;
2043  return *this;
2044 }
2045 
2047 
2048 template< class Node >
2049 Teuchos::RCP< const MDComm >
2051 {
2052  return _mdComm;
2053 }
2054 
2056 
2057 template< class Node >
2058 bool
2060 {
2061  return _mdComm->onSubcommunicator();
2062 }
2063 
2065 
2066 template< class Node >
2067 Teuchos::RCP< const Teuchos::Comm< int > >
2069 {
2070  return _mdComm->getTeuchosComm();
2071 }
2072 
2074 
2075 template< class Node >
2076 int
2078 {
2079  return _mdComm->numDims();
2080 }
2081 
2083 
2084 template< class Node >
2085 Teuchos::Array< int >
2087 {
2088  return _mdComm->getCommDims();
2089 }
2090 
2092 
2093 template< class Node >
2094 int
2096 {
2097  return _mdComm->getCommDim(axis);
2098 }
2099 
2101 
2102 template< class Node >
2103 bool
2105 {
2106  return _mdComm->isPeriodic(axis);
2107 }
2108 
2110 
2111 template< class Node >
2112 int
2114 {
2115  return _mdComm->getCommIndex(axis);
2116 }
2117 
2119 
2120 template< class Node >
2121 int
2123 {
2124  return _mdComm->getLowerNeighbor(axis);
2125 }
2126 
2128 
2129 template< class Node >
2130 int
2132 {
2133  return _mdComm->getUpperNeighbor(axis);
2134 }
2135 
2137 
2138 template< class Node >
2139 Teuchos::Array< dim_type >
2142 {
2143  return _globalDims;
2144 }
2145 
2147 
2148 template< class Node >
2149 dim_type
2151 getGlobalDim(int axis,
2152  bool withBndryPad) const
2153 {
2154 #ifdef HAVE_DOMI_ARRAY_BOUNDSCHECK
2155  TEUCHOS_TEST_FOR_EXCEPTION(
2156  ((axis < 0) || (axis >= numDims())),
2157  RangeError,
2158  "invalid axis index = " << axis << " (number of dimensions = " <<
2159  numDims() << ")");
2160 #endif
2161  if (withBndryPad)
2162  return _globalDims[axis];
2163  else
2164  return _globalDims[axis] - _bndryPad[axis][0] - _bndryPad[axis][1];
2165 }
2166 
2168 
2169 template< class Node >
2170 Slice
2173  bool withBndryPad) const
2174 {
2175 #ifdef HAVE_DOMI_ARRAY_BOUNDSCHECK
2176  TEUCHOS_TEST_FOR_EXCEPTION(
2177  ((axis < 0) || (axis >= numDims())),
2178  RangeError,
2179  "invalid axis index = " << axis << " (number of dimensions = " <<
2180  numDims() << ")");
2181 #endif
2182  if (withBndryPad)
2183  return _globalBounds[axis];
2184  else
2185  {
2186  dim_type start = _globalBounds[axis].start() + _bndryPad[axis][0];
2187  dim_type stop = _globalBounds[axis].stop() - _bndryPad[axis][1];
2188  return ConcreteSlice(start, stop);
2189  }
2190 }
2191 
2193 
2194 template< class Node >
2195 dim_type
2197 getLocalDim(int axis,
2198  bool withPad) const
2199 {
2200 #ifdef HAVE_DOMI_ARRAY_BOUNDSCHECK
2201  TEUCHOS_TEST_FOR_EXCEPTION(
2202  ((axis < 0) || (axis >= numDims())),
2203  RangeError,
2204  "invalid axis index = " << axis << " (number of dimensions = " <<
2205  numDims() << ")");
2206 #endif
2207  if (withPad)
2208  return _localDims[axis];
2209  else
2210  return _localDims[axis] - _pad[axis][0] - _pad[axis][1];
2211 }
2212 
2214 
2215 template< class Node >
2216 Teuchos::Array< dim_type >
2219 {
2220  return _localDims;
2221 }
2222 
2224 
2225 template< class Node >
2226 Slice
2229  bool withBndryPad) const
2230 {
2231 #ifdef HAVE_DOMI_ARRAY_BOUNDSCHECK
2232  TEUCHOS_TEST_FOR_EXCEPTION(
2233  ((axis < 0) || (axis >= numDims())),
2234  RangeError,
2235  "invalid axis index = " << axis << " (number of dimensions = " <<
2236  numDims() << ")");
2237 #endif
2238  int axisRank = getCommIndex(axis);
2239  if (withBndryPad)
2240  {
2241  dim_type start = _globalRankBounds[axis][axisRank].start();
2242  dim_type stop = _globalRankBounds[axis][axisRank].stop();
2243  if (getCommIndex(axis) == 0)
2244  start -= _bndryPad[axis][0];
2245  if (getCommIndex(axis) == getCommDim(axis)-1)
2246  stop += _bndryPad[axis][1];
2247  return ConcreteSlice(start,stop);
2248  }
2249  else
2250  return _globalRankBounds[axis][axisRank];
2251 }
2252 
2254 
2255 template< class Node >
2256 Teuchos::ArrayView< const Slice >
2259 {
2260  return _localBounds();
2261 }
2262 
2264 
2265 template< class Node >
2266 Slice
2269  bool withPad) const
2270 {
2271 #ifdef HAVE_DOMI_ARRAY_BOUNDSCHECK
2272  TEUCHOS_TEST_FOR_EXCEPTION(
2273  ((axis < 0) || (axis >= numDims())),
2274  RangeError,
2275  "invalid axis index = " << axis << " (number of dimensions = " <<
2276  numDims() << ")");
2277 #endif
2278  if (withPad)
2279  return _localBounds[axis];
2280  else
2281  {
2282  dim_type start = _localBounds[axis].start() + _pad[axis][0];
2283  dim_type stop = _localBounds[axis].stop() - _pad[axis][1];
2284  return ConcreteSlice(start, stop);
2285  }
2286 }
2287 
2289 
2290 template< class Node >
2291 Slice
2293 getLocalInteriorBounds(int axis) const
2294 {
2295 #ifdef HAVE_DOMI_ARRAY_BOUNDSCHECK
2296  TEUCHOS_TEST_FOR_EXCEPTION(
2297  ((axis < 0) || (axis >= numDims())),
2298  RangeError,
2299  "invalid axis index = " << axis << " (number of dimensions = " <<
2300  numDims() << ")");
2301 #endif
2302  dim_type start = _localBounds[axis].start() + _pad[axis][0];
2303  dim_type stop = _localBounds[axis].stop() - _pad[axis][1];
2304  if (_mdComm->getLowerNeighbor(axis) == -1) ++start;
2305  if (_mdComm->getUpperNeighbor(axis) == -1) --stop;
2306  return ConcreteSlice(start, stop);
2307 }
2308 
2310 
2311 template< class Node >
2312 bool
2314 {
2315  bool result = false;
2316  for (int axis = 0; axis < numDims(); ++axis)
2317  if (_pad[axis][0] + _pad[axis][1]) result = true;
2318  return result;
2319 }
2320 
2322 
2323 template< class Node >
2324 int
2326 {
2327 #ifdef HAVE_DOMI_ARRAY_BOUNDSCHECK
2328  TEUCHOS_TEST_FOR_EXCEPTION(
2329  ((axis < 0) || (axis >= numDims())),
2330  RangeError,
2331  "invalid axis index = " << axis << " (number of dimensions = " <<
2332  numDims() << ")");
2333 #endif
2334  return _pad[axis][0];
2335 }
2336 
2338 
2339 template< class Node >
2340 int
2342 {
2343 #ifdef HAVE_DOMI_ARRAY_BOUNDSCHECK
2344  TEUCHOS_TEST_FOR_EXCEPTION(
2345  ((axis < 0) || (axis >= numDims())),
2346  RangeError,
2347  "invalid axis index = " << axis << " (number of dimensions = " <<
2348  numDims() << ")");
2349 #endif
2350  return _pad[axis][1];
2351 }
2352 
2354 
2355 template< class Node >
2356 int
2358 {
2359 #ifdef HAVE_DOMI_ARRAY_BOUNDSCHECK
2360  TEUCHOS_TEST_FOR_EXCEPTION(
2361  ((axis < 0) || (axis >= numDims())),
2362  RangeError,
2363  "invalid axis index = " << axis << " (number of dimensions = " <<
2364  numDims() << ")");
2365 #endif
2366  return _commPadSizes[axis];
2367 }
2368 
2370 
2371 template< class Node >
2372 int
2374 {
2375 #ifdef HAVE_DOMI_ARRAY_BOUNDSCHECK
2376  TEUCHOS_TEST_FOR_EXCEPTION(
2377  ((axis < 0) || (axis >= numDims())),
2378  RangeError,
2379  "invalid axis index = " << axis << " (number of dimensions = " <<
2380  numDims() << ")");
2381 #endif
2382  return _bndryPad[axis][0];
2383 }
2384 
2386 
2387 template< class Node >
2388 int
2390 {
2391 #ifdef HAVE_DOMI_ARRAY_BOUNDSCHECK
2392  TEUCHOS_TEST_FOR_EXCEPTION(
2393  ((axis < 0) || (axis >= numDims())),
2394  RangeError,
2395  "invalid axis index = " << axis << " (number of dimensions = " <<
2396  numDims() << ")");
2397 #endif
2398  return _bndryPad[axis][1];
2399 }
2400 
2402 
2403 template< class Node >
2404 Teuchos::Array< int >
2406 {
2407  return _bndryPadSizes;
2408 }
2409 
2411 
2412 template< class Node >
2413 int
2415 {
2416 #ifdef HAVE_DOMI_ARRAY_BOUNDSCHECK
2417  TEUCHOS_TEST_FOR_EXCEPTION(
2418  ((axis < 0) || (axis >= numDims())),
2419  RangeError,
2420  "invalid axis index = " << axis << " (number of dimensions = " <<
2421  numDims() << ")");
2422 #endif
2423  return _bndryPadSizes[axis];
2424 }
2425 
2427 
2428 template< class Node >
2429 bool
2431 isPad(const Teuchos::ArrayView< dim_type > & index) const
2432 {
2433  bool result = false;
2434  for (int axis = 0; axis < numDims(); ++axis)
2435  {
2436  if (index[axis] < getLowerPadSize(axis))
2437  result = true;
2438  if (index[axis] >= getLocalDim(axis,true) - getUpperPadSize(axis))
2439  result = true;
2440  }
2441  return result;
2442 }
2443 
2445 
2446 template< class Node >
2447 bool
2448 MDMap< Node >::
2449 isCommPad(const Teuchos::ArrayView< dim_type > & index) const
2450 {
2451  bool result = false;
2452  for (int axis = 0; axis < numDims(); ++axis)
2453  {
2454  // Check the ranks of the lower and upper neighbor processors. If
2455  // either of these values is non-negative, then we are on a
2456  // processor that contains communication padding
2457  if (getLowerNeighbor(axis) >= 0)
2458  {
2459  if (index[axis] < getLowerPadSize(axis))
2460  result = true;
2461  }
2462  if (getUpperNeighbor(axis) >= 0)
2463  {
2464  if (index[axis] >= getLocalDim(axis,true) - getUpperPadSize(axis))
2465  result = true;
2466  }
2467  }
2468  return result;
2469 }
2470 
2472 
2473 template< class Node >
2474 bool
2475 MDMap< Node >::
2476 isBndryPad(const Teuchos::ArrayView< dim_type > & index) const
2477 {
2478  bool result = false;
2479  for (int axis = 0; axis < numDims(); ++axis)
2480  {
2481  // Check the ranks of the lower and upper neighbor processors. If
2482  // either of these values is -1, then we are on a processor that
2483  // contains a boundary
2484  if (getLowerNeighbor(axis) == -1)
2485  {
2486  if (index[axis] < getLowerPadSize(axis))
2487  result = true;
2488  }
2489  if (getUpperNeighbor(axis) == -1)
2490  {
2491  if (index[axis] >= getLocalDim(axis,true) - getUpperPadSize(axis))
2492  result = true;
2493  }
2494  }
2495  return result;
2496 }
2497 
2499 
2500 template< class Node >
2501 bool
2503 {
2504  return _mdComm->isPeriodic(axis) && bool(_replicatedBoundary[axis]);
2505 }
2506 
2508 
2509 template< class Node >
2510 Layout
2512 {
2513  return _layout;
2514 }
2515 
2517 
2518 template< class Node >
2519 Teuchos::RCP< Node >
2521 {
2522  return _node;
2523 }
2524 
2526 
2527 template< class Node >
2528 Teuchos::ArrayView< Teuchos::RCP< const MDMap< Node > > >
2530 {
2531  if (_axisMaps.size() == 0) _axisMaps.resize(numDims());
2532  for (int axis = 0; axis < numDims(); ++axis)
2533  if (_axisMaps[axis].is_null()) getAxisMap(axis);
2534  return _axisMaps();
2535 }
2536 
2538 
2539 template< class Node >
2540 Teuchos::RCP< const MDMap< Node > >
2542 {
2543 #ifdef HAVE_DOMI_ARRAY_BOUNDSCHECK
2544  TEUCHOS_TEST_FOR_EXCEPTION(
2545  ((axis < 0) || (axis >= numDims())),
2546  RangeError,
2547  "invalid axis index = " << axis << " (number of dimensions = " <<
2548  numDims() << ")");
2549 #endif
2550  if (_axisMaps.size() == 0) _axisMaps.resize(numDims());
2551  if (_axisMaps[axis].is_null())
2552  {
2553  Teuchos::RCP< const MDComm > axisComm = _mdComm->getAxisComm(axis);
2554  Domi::dim_type axisDim = _globalDims[axis] - 2*_bndryPadSizes[axis];
2555  _axisMaps[axis] =
2556  Teuchos::rcp(new MDMap(axisComm,
2557  Teuchos::tuple(axisDim),
2558  Teuchos::tuple(_commPadSizes[axis]),
2559  Teuchos::tuple(_bndryPadSizes[axis]),
2560  Teuchos::tuple(_replicatedBoundary[axis]),
2561  _layout));
2562  }
2563  return _axisMaps[axis];
2564 }
2565 
2567 
2568 template< class Node >
2569 Teuchos::RCP< const MDMap< Node > >
2570 MDMap< Node >::getAugmentedMDMap(const dim_type leadingDim,
2571  const dim_type trailingDim) const
2572 {
2573  // Construct the new MDMap
2574  MDMap< Node > * newMdMap = new MDMap< Node >(*this);
2575 
2576  // Compute the number of new dimensions
2577  int newNumDims = 0;
2578  if (leadingDim > 0) ++newNumDims;
2579  if (trailingDim > 0) ++newNumDims;
2580 
2581  // Trivial result
2582  if (newNumDims == 0) return Teuchos::rcp(newMdMap);
2583 
2584  // Compute the new MDComm
2585  int oldNumDims = numDims();
2586  Teuchos::Array< int > newCommDims(oldNumDims);
2587  Teuchos::Array< int > newPeriodic(oldNumDims);
2588  Teuchos::Array< int > newReplicatedBndry(oldNumDims);
2589 
2590  for (int axis = 0; axis < oldNumDims; ++axis)
2591  {
2592  newCommDims[axis] = getCommDim(axis);
2593  newPeriodic[axis] = int(isPeriodic(axis));
2594  newReplicatedBndry[axis] = int(isReplicatedBoundary(axis));
2595  }
2596  if (leadingDim > 0)
2597  {
2598  newCommDims.insert(newCommDims.begin(),1);
2599  newPeriodic.insert(newPeriodic.begin(),0);
2600  newReplicatedBndry.insert(newReplicatedBndry.begin(),0);
2601  }
2602  if (trailingDim > 0)
2603  {
2604  newCommDims.push_back(1);
2605  newPeriodic.push_back(0);
2606  newReplicatedBndry.push_back(0);
2607  }
2608  newMdMap->_mdComm = Teuchos::rcp(new MDComm(getTeuchosComm(),
2609  newCommDims,
2610  newPeriodic));
2611  newMdMap->_replicatedBoundary = newReplicatedBndry;
2612 
2613  // Adjust new MDMap arrays for a new leading dimension
2614  Slice slice = Slice(leadingDim);
2615  padding_type pad(Teuchos::tuple(0,0));
2616  if (leadingDim > 0)
2617  {
2618  newMdMap->_globalDims.insert(newMdMap->_globalDims.begin(), leadingDim);
2619  newMdMap->_globalBounds.insert(newMdMap->_globalBounds.begin(), slice);
2620  newMdMap->_globalRankBounds.insert(newMdMap->_globalRankBounds.begin(),
2621  Teuchos::Array< Slice >(1,slice));
2622  newMdMap->_localDims.insert(newMdMap->_localDims.begin(), leadingDim);
2623  newMdMap->_localBounds.insert(newMdMap->_localBounds.begin(), slice);
2624  newMdMap->_commPadSizes.insert(newMdMap->_commPadSizes.begin(),0);
2625  newMdMap->_pad.insert(newMdMap->_pad.begin(), pad);
2626  newMdMap->_bndryPadSizes.insert(newMdMap->_bndryPadSizes.begin(),0);
2627  newMdMap->_bndryPad.insert(newMdMap->_bndryPad.begin(), pad);
2628  }
2629 
2630  // Adjust new MDMap arrays for a new trailing dimension
2631  slice = Slice(trailingDim);
2632  if (trailingDim > 0)
2633  {
2634  newMdMap->_globalDims.push_back(trailingDim);
2635  newMdMap->_globalBounds.push_back(slice);
2636  newMdMap->_globalRankBounds.push_back(Teuchos::Array< Slice >(1,slice));
2637  newMdMap->_localDims.push_back(trailingDim);
2638  newMdMap->_localBounds.push_back(slice);
2639  newMdMap->_commPadSizes.push_back(0);
2640  newMdMap->_pad.push_back(pad);
2641  newMdMap->_bndryPadSizes.push_back(0);
2642  newMdMap->_bndryPad.push_back(pad);
2643  }
2644 
2645  // Compute the new stride related data
2646  newMdMap->_globalStrides =
2647  computeStrides< size_type, dim_type >(newMdMap->_globalDims,
2648  newMdMap->_layout);
2649  newMdMap->_localStrides =
2650  computeStrides< size_type, dim_type >(newMdMap->_localDims,
2651  newMdMap->_layout);
2652  newMdMap->_globalMin = 0;
2653  newMdMap->_globalMax = 0;
2654  newMdMap->_localMin = 0;
2655  newMdMap->_localMax = 0;
2656  for (int axis = 0; axis < oldNumDims + newNumDims; ++axis)
2657  {
2658  newMdMap->_globalMin += newMdMap->_globalBounds[axis].start() *
2659  newMdMap->_globalStrides[axis];
2660  newMdMap->_globalMax += newMdMap->_globalBounds[axis].stop() *
2661  newMdMap->_globalStrides[axis];
2662  newMdMap->_localMin += newMdMap->_localBounds[axis].start() *
2663  newMdMap->_localStrides[axis];
2664  newMdMap->_localMax += newMdMap->_localBounds[axis].stop() *
2665  newMdMap->_localStrides[axis];
2666  }
2667 
2668  // Return the result
2669  return Teuchos::rcp(newMdMap);
2670 }
2671 
2673 
2674 #ifdef HAVE_EPETRA
2675 
2676 template< class Node >
2677 Teuchos::RCP< const Epetra_Map >
2678 MDMap< Node >::getEpetraMap(bool withCommPad) const
2679 {
2680  if (withCommPad)
2681  {
2682  if (_epetraMap.is_null())
2683  {
2684  // Check if the maximum global ID is larger than what an int can
2685  // hold (because Epetra uses int ordinals)
2686  TEUCHOS_TEST_FOR_EXCEPTION(
2687  computeSize(_globalDims) - 1 > std::numeric_limits< int >::max(),
2689  "The maximum global ID of this MDMap is too large for an Epetra_Map");
2690 
2691  // Allocate the myElements MDArray and the index array
2692  int num_dims = numDims();
2693  Teuchos::Array<dim_type> localDims(num_dims);
2694  for (int axis = 0; axis < num_dims; ++axis)
2695  localDims[axis] = _localDims[axis];
2696  MDArray<int> myElements(localDims);
2697  Teuchos::Array<int> index(num_dims);
2698 
2699  // Iterate over the local MDArray and assign global IDs
2700  for (MDArray<int>::iterator it = myElements.begin();
2701  it != myElements.end(); ++it)
2702  {
2703  int globalID = 0;
2704  for (int axis = 0; axis < num_dims; ++axis)
2705  {
2706  int axisRank = getCommIndex(axis);
2707  int start = _globalRankBounds[axis][axisRank].start() -
2708  _pad[axis][0];
2709  globalID += (start + it.index(axis)) * _globalStrides[axis];
2710  }
2711  *it = globalID;
2712  }
2713 
2714  // Construct the Epetra_Map
2715  Teuchos::RCP< const Epetra_Comm > epetraComm = _mdComm->getEpetraComm();
2716  _epetraMap = Teuchos::rcp(new Epetra_Map(-1,
2717  myElements.size(),
2718  myElements.getRawPtr(),
2719  0,
2720  *epetraComm));
2721  }
2722  return _epetraMap;
2723  }
2724  else
2725  {
2726  if (_epetraOwnMap.is_null())
2727  {
2728  // Check if the maximum global ID is larger than what an int can
2729  // hold (because Epetra uses int ordinals)
2730  if (computeSize(_globalDims) - 1 > std::numeric_limits< int >::max())
2731  throw MapOrdinalError("The maximum global ID of this MDMap is too "
2732  "large for an Epetra_Map");
2733 
2734  // Allocate the myElements MDArray and the index array
2735  int num_dims = numDims();
2736  Teuchos::Array<int> index(num_dims);
2737  Teuchos::Array<dim_type> myDims(num_dims);
2738  for (int axis = 0; axis < num_dims; ++axis)
2739  {
2740  myDims[axis] = _localDims[axis] - _pad[axis][0] - _pad[axis][1];
2741  int axisRank = getCommIndex(axis);
2742  if (axisRank == 0)
2743  myDims[axis] += _bndryPad[axis][0];
2744  if (axisRank == getCommDim(axis)-1)
2745  myDims[axis] += _bndryPad[axis][1];
2746  }
2747  MDArray<int> myElements(myDims());
2748 
2749  // Iterate over the local MDArray and assign global IDs
2750  for (MDArray<int>::iterator it = myElements.begin();
2751  it != myElements.end(); ++it)
2752  {
2753  int globalID = 0;
2754  for (int axis = 0; axis < num_dims; ++axis)
2755  {
2756  int axisRank = getCommIndex(axis);
2757  int start = _globalRankBounds[axis][axisRank].start();
2758  if (axisRank == 0)
2759  start -= _bndryPad[axis][0];
2760  if (axisRank == getCommDim(axis)-1)
2761  start += _bndryPad[axis][1];
2762  globalID += (start + it.index(axis)) * _globalStrides[axis];
2763  }
2764  }
2765 
2766  // Construct the Epetra_Map
2767  Teuchos::RCP< const Epetra_Comm > epetraComm = _mdComm->getEpetraComm();
2768  _epetraOwnMap = Teuchos::rcp(new Epetra_Map(-1,
2769  myElements.size(),
2770  myElements.getRawPtr(),
2771  0,
2772  *epetraComm));
2773  }
2774  return _epetraOwnMap;
2775  }
2776 }
2777 
2778 #endif
2779 
2781 
2782 #ifdef HAVE_EPETRA
2783 
2784 template< class Node >
2785 Teuchos::RCP< const Epetra_Map >
2786 MDMap< Node >::
2787 getEpetraAxisMap(int axis,
2788  bool withCommPad) const
2789 {
2790 #ifdef HAVE_DOMI_ARRAY_BOUNDSCHECK
2791  TEUCHOS_TEST_FOR_EXCEPTION(
2792  ((axis < 0) || (axis >= numDims())),
2793  RangeError,
2794  "invalid axis index = " << axis << " (number of dimensions = " <<
2795  numDims() << ")");
2796 #endif
2797  if ((withCommPad && (_epetraAxisMaps.size() == 0)) ||
2798  (not withCommPad && (_epetraAxisOwnMaps.size() == 0)))
2799  {
2800  int num_dims = numDims();
2801  Teuchos::RCP< const Epetra_Comm > epetraComm = _mdComm->getEpetraComm();
2802  for (int axis=0; axis < num_dims; ++axis)
2803  {
2804  Teuchos::Array<int> elements(getLocalDim(axis, withCommPad));
2805  int start = getGlobalRankBounds(axis,true).start();
2806  if (withCommPad && (getCommIndex(axis) != 0)) start -= _pad[axis][0];
2807  for (int i = 0; i < elements.size(); ++i)
2808  elements[i] = i + start;
2809  if (withCommPad)
2810  {
2811  _epetraAxisMaps.push_back(
2812  Teuchos::rcp(new Epetra_Map(-1,
2813  elements.size(),
2814  elements.getRawPtr(),
2815  0,
2816  *epetraComm)));
2817  }
2818  else
2819  {
2820  _epetraAxisOwnMaps.push_back(
2821  Teuchos::rcp(new Epetra_Map(-1,
2822  elements.size(),
2823  elements.getRawPtr(),
2824  0,
2825  *epetraComm)));
2826  }
2827  }
2828  }
2829 
2830  if (withCommPad)
2831  return _epetraAxisMaps[axis];
2832  else
2833  return _epetraAxisOwnMaps[axis];
2834 }
2835 
2836 #endif
2837 
2839 
2840 #ifdef HAVE_TPETRA
2841 
2842 template< class Node >
2843 template< class LocalOrdinal,
2844  class GlobalOrdinal,
2845  class Node2 >
2846 Teuchos::RCP< const Tpetra::Map< LocalOrdinal, GlobalOrdinal, Node2 > >
2847 MDMap< Node >::getTpetraMap(bool withCommPad) const
2848 {
2849  if (withCommPad)
2850  {
2851  // Allocate the elementsMDArray and the index array
2852  int num_dims = numDims();
2853  Teuchos::Array<dim_type> localDims(num_dims);
2854  for (int axis = 0; axis < num_dims; ++axis)
2855  localDims[axis] = _localDims[axis];
2856  MDArray< GlobalOrdinal > elementMDArray(localDims);
2857  Teuchos::Array< LocalOrdinal > index(num_dims);
2858 
2859  // Iterate over the local MDArray and assign global IDs
2860  for (typename MDArray< GlobalOrdinal >::iterator it = elementMDArray.begin();
2861  it != elementMDArray.end(); ++it)
2862  {
2863  GlobalOrdinal globalID = 0;
2864  for (int axis = 0; axis < num_dims; ++axis)
2865  {
2866  int axisRank = getCommIndex(axis);
2867  GlobalOrdinal start = _globalRankBounds[axis][axisRank].start() -
2868  _pad[axis][0];
2869  globalID += (start + it.index(axis)) * _globalStrides[axis];
2870  }
2871  *it = globalID;
2872  }
2873 
2874  // Return the Tpetra::Map
2875  const Teuchos::Array< GlobalOrdinal > & myElements =
2876  elementMDArray.array();
2877  Teuchos::RCP< const Teuchos::Comm< int > > teuchosComm =
2878  _mdComm->getTeuchosComm();
2879  return
2880  Teuchos::rcp(new Tpetra::Map< LocalOrdinal,
2881  GlobalOrdinal,
2882  Node2 >(Teuchos::OrdinalTraits<
2883  Tpetra::global_size_t>::invalid(),
2884  myElements(),
2885  0,
2886  teuchosComm));
2887  }
2888  else
2889  {
2890  // Allocate the elementMDArray MDArray and the index array
2891  int num_dims = numDims();
2892  Teuchos::Array< LocalOrdinal > index(num_dims);
2893  Teuchos::Array< dim_type > myDims(num_dims);
2894  for (int axis = 0; axis < num_dims; ++axis)
2895  {
2896  myDims[axis] =
2897  _localDims[axis] - _pad[axis][0] - _pad[axis][1];
2898  int axisRank = getCommIndex(axis);
2899  if (axisRank == 0)
2900  myDims[axis] += _bndryPad[axis][0];
2901  if (axisRank == getCommDim(axis)-1)
2902  myDims[axis] += _bndryPad[axis][1];
2903  }
2904  MDArray< GlobalOrdinal > elementMDArray(myDims());
2905 
2906  // Iterate over the local MDArray and assign global IDs
2907  for (typename MDArray< GlobalOrdinal >::iterator it = elementMDArray.begin();
2908  it != elementMDArray.end(); ++it)
2909  {
2910  GlobalOrdinal globalID = 0;
2911  for (int axis = 0; axis < num_dims; ++axis)
2912  {
2913  int axisRank = getCommIndex(axis);
2914  GlobalOrdinal start = _globalRankBounds[axis][axisRank].start();
2915  if (axisRank == 0)
2916  start -= _bndryPad[axis][0];
2917  if (axisRank == getCommDim(axis)-1)
2918  start += _bndryPad[axis][1];
2919  globalID += (start + it.index(axis)) * _globalStrides[axis];
2920  }
2921  }
2922 
2923  // Return the Tpetra::Map
2924  const Teuchos::Array< GlobalOrdinal> & myElements =
2925  elementMDArray.array();
2926  Teuchos::RCP< const Teuchos::Comm< int > > teuchosComm =
2927  _mdComm->getTeuchosComm();
2928  return
2929  Teuchos::rcp(new Tpetra::Map< LocalOrdinal,
2930  GlobalOrdinal,
2931  Node >(Teuchos::OrdinalTraits<
2932  Tpetra::global_size_t>::invalid(),
2933  myElements(),
2934  0,
2935  teuchosComm));
2936  }
2937 }
2938 #endif
2939 
2941 
2942 #ifdef HAVE_TPETRA
2943 
2944 template< class Node >
2945 template< class LocalOrdinal,
2946  class GlobalOrdinal,
2947  class Node2 >
2948 Teuchos::RCP< const Tpetra::Map< LocalOrdinal, GlobalOrdinal, Node2 > >
2949 MDMap< Node >::
2950 getTpetraAxisMap(int axis,
2951  bool withCommPad) const
2952 {
2953 #ifdef HAVE_DOMI_ARRAY_BOUNDSCHECK
2954  TEUCHOS_TEST_FOR_EXCEPTION(
2955  ((axis < 0) || (axis >= numDims())),
2956  RangeError,
2957  "invalid axis index = " << axis << " (number of dimensions = " <<
2958  numDims() << ")");
2959 #endif
2960  int num_dims = numDims();
2961  Teuchos::RCP< const Teuchos::Comm< int > > teuchosComm =
2962  _mdComm->getTeuchosComm();
2963  Teuchos::Array< GlobalOrdinal > elements(getLocalDim(axis,withCommPad));
2964  GlobalOrdinal start = getGlobalRankBounds(axis,true).start();
2965  if (withCommPad && (getCommIndex(axis) != 0)) start -= _pad[axis][0];
2966  for (LocalOrdinal i = 0; i < elements.size(); ++i)
2967  elements[i] = i + start;
2968  return Teuchos::rcp(new Tpetra::Map< LocalOrdinal,
2969  GlobalOrdinal,
2970  Node >(Teuchos::OrdinalTraits<
2971  Tpetra::global_size_t>::invalid(),
2972  elements,
2973  0,
2974  teuchosComm));
2975 }
2976 #endif
2977 
2979 
2980 template< class Node >
2981 Teuchos::Array< dim_type >
2983 getGlobalIndex(size_type globalID) const
2984 {
2985 #ifdef HAVE_DOMI_ARRAY_BOUNDSCHECK
2986  TEUCHOS_TEST_FOR_EXCEPTION(
2987  ((globalID < _globalMin) || (globalID >= _globalMax)),
2988  RangeError,
2989  "invalid global index = " << globalID << " (should be between " <<
2990  _globalMin << " and " << _globalMax << ")");
2991 #endif
2992  int num_dims = numDims();
2993  Teuchos::Array< dim_type > result(num_dims);
2994  size_type index = globalID;
2995  if (_layout == LAST_INDEX_FASTEST)
2996  {
2997  for (int axis = 0; axis < num_dims-1; ++axis)
2998  {
2999  result[axis] = index / _globalStrides[axis];
3000  index = index % _globalStrides[axis];
3001  }
3002  result[num_dims-1] = index;
3003  }
3004  else
3005  {
3006  for (int axis = num_dims-1; axis > 0; --axis)
3007  {
3008  result[axis] = index / _globalStrides[axis];
3009  index = index % _globalStrides[axis];
3010  }
3011  result[0] = index;
3012  }
3013  return result;
3014 }
3015 
3017 
3018 template< class Node >
3019 Teuchos::Array< dim_type >
3021 getLocalIndex(size_type localID) const
3022 {
3023 #ifdef HAVE_DOMI_ARRAY_BOUNDSCHECK
3024  TEUCHOS_TEST_FOR_EXCEPTION(
3025  ((localID < _localMin) || (localID >= _localMax)),
3026  RangeError,
3027  "invalid local index = " << localID << " (should be between " <<
3028  _localMin << " and " << _localMax << ")");
3029 #endif
3030  int num_dims = numDims();
3031  Teuchos::Array< dim_type > result(num_dims);
3032  size_type index = localID;
3033  if (_layout == LAST_INDEX_FASTEST)
3034  {
3035  for (int axis = 0; axis < num_dims-1; ++axis)
3036  {
3037  result[axis] = index / _localStrides[axis];
3038  index = index % _localStrides[axis];
3039  }
3040  result[num_dims-1] = index;
3041  }
3042  else
3043  {
3044  for (int axis = num_dims-1; axis > 0; --axis)
3045  {
3046  result[axis] = index / _localStrides[axis];
3047  index = index % _localStrides[axis];
3048  }
3049  result[0] = index;
3050  }
3051  return result;
3052 }
3053 
3055 
3056 template< class Node >
3057 size_type
3059 getGlobalID(size_type localID) const
3060 {
3061 #ifdef HAVE_DOMI_ARRAY_BOUNDSCHECK
3062  TEUCHOS_TEST_FOR_EXCEPTION(
3063  ((localID < 0) || (localID >= _localMax)),
3064  RangeError,
3065  "invalid local index = " << localID << " (local size = " <<
3066  _localMax << ")");
3067 #endif
3068  Teuchos::Array< dim_type > localIndex = getLocalIndex(localID);
3069  size_type result = 0;
3070  for (int axis = 0; axis < numDims(); ++axis)
3071  {
3072  dim_type globalIndex = localIndex[axis] +
3073  _globalRankBounds[axis][getCommIndex(axis)].start() - _pad[axis][0];
3074  result += globalIndex * _globalStrides[axis];
3075  }
3076  return result;
3077 }
3078 
3080 
3081 template< class Node >
3082 size_type
3084 getGlobalID(const Teuchos::ArrayView< dim_type > & globalIndex) const
3085 {
3086 #ifdef HAVE_DOMI_ARRAY_BOUNDSCHECK
3087  TEUCHOS_TEST_FOR_EXCEPTION(
3088  (globalIndex.size() != numDims()),
3090  "globalIndex has " << globalIndex.size() << " entries; expecting "
3091  << numDims());
3092  for (int axis = 0; axis < numDims(); ++axis)
3093  {
3094  TEUCHOS_TEST_FOR_EXCEPTION(
3095  ((globalIndex[axis] < 0) ||
3096  (globalIndex[axis] >= _globalDims[axis])),
3097  RangeError,
3098  "invalid globalIndex[" << axis << "] = " << globalIndex[axis] <<
3099  " (global dimension = " << _globalDims[axis] << ")");
3100  }
3101 #endif
3102  size_type result = 0;
3103  for (int axis = 0; axis < numDims(); ++axis)
3104  result += globalIndex[axis] * _globalStrides[axis];
3105  return result;
3106 }
3107 
3109 
3110 template< class Node >
3111 size_type
3113 getLocalID(size_type globalID) const
3114 {
3115 #ifdef HAVE_DOMI_ARRAY_BOUNDSCHECK
3116  TEUCHOS_TEST_FOR_EXCEPTION(
3117  ((globalID < _globalMin) || (globalID >= _globalMax)),
3118  RangeError,
3119  "invalid global index = " << globalID << " (should be between " <<
3120  _globalMin << " and " << _globalMax << ")");
3121 #endif
3122  Teuchos::Array< dim_type > globalIndex =
3123  getGlobalIndex(globalID);
3124  size_type result = 0;
3125  for (int axis = 0; axis < numDims(); ++axis)
3126  {
3127  dim_type localIndex = globalIndex[axis] -
3128  _globalRankBounds[axis][getCommIndex(axis)].start() + _pad[axis][0];
3129  TEUCHOS_TEST_FOR_EXCEPTION(
3130  (localIndex < 0 || localIndex >= _localDims[axis]),
3131  RangeError,
3132  "global index not on local processor")
3133  result += localIndex * _localStrides[axis];
3134  }
3135  return result;
3136 }
3137 
3139 
3140 template< class Node >
3141 size_type
3143 getLocalID(const Teuchos::ArrayView< dim_type > & localIndex) const
3144 {
3145 #ifdef HAVE_DOMI_ARRAY_BOUNDSCHECK
3146  TEUCHOS_TEST_FOR_EXCEPTION(
3147  (localIndex.size() != numDims()),
3149  "localIndex has " << localIndex.size() << " entries; expecting "
3150  << numDims());
3151  for (int axis = 0; axis < numDims(); ++axis)
3152  {
3153  TEUCHOS_TEST_FOR_EXCEPTION(
3154  ((localIndex[axis] < 0) ||
3155  (localIndex[axis] >= _localDims[axis])),
3156  RangeError,
3157  "invalid localIndex[" << axis << "] = " << localIndex[axis] <<
3158  " (local dimension = " << _localDims[axis] << ")");
3159  }
3160 #endif
3161  size_type result = 0;
3162  for (int axis = 0; axis < numDims(); ++axis)
3163  result += localIndex[axis] * _localStrides[axis];
3164  return result;
3165 }
3166 
3168 
3169 template< class Node >
3170 bool
3172 {
3173  // Trivial comparison. We assume that if the object pointers match
3174  // on this processor, then they match on all processors
3175  if (this == &mdMap) return true;
3176 
3177  // Check the number of dimensions. Assume this check produces the
3178  // same result on all processors
3179  int num_dims = numDims();
3180  if (num_dims != mdMap.numDims()) return false;
3181 
3182  // Check the commDims. Assume this check produces the same result
3183  // on all processes
3184  for (int axis = 0; axis < num_dims; ++axis)
3185  if (getCommDim(axis) != mdMap.getCommDim(axis)) return false;
3186 
3187  // Check the global dimensions. Assume this check produces the same
3188  // result on all processes
3189  if (_globalDims != mdMap._globalDims) return false;
3190 
3191  // Check the local dimensions. This needs to be checked locally on
3192  // each processor and then the results communicated to obtain global
3193  // result
3194  int localResult = 1;
3195  int globalResult = 1;
3196  for (int axis = 0; axis < num_dims; ++axis)
3197  if (getLocalDim(axis,false) != mdMap.getLocalDim(axis,false))
3198  localResult = 0;
3199  Teuchos::reduceAll(*(getTeuchosComm()),
3200  Teuchos::REDUCE_MIN,
3201  1,
3202  &localResult,
3203  &globalResult);
3204 
3205  // Return the result
3206  return bool(globalResult);
3207 }
3208 
3210 
3211 template< class Node >
3212 bool
3214  const int verbose) const
3215 {
3216  // Trivial comparison. We assume that if the object pointers match
3217  // on this processor, then they match on all processors
3218  if (this == &mdMap) return true;
3219 
3220  // Start by setting a local result to true. We will perform a
3221  // number of tests, and if they fail, the local result will be set
3222  // to false. At the end, we will perform a global reduction to
3223  // obtain the global result.
3224  int localResult = 1;
3225  Teuchos::RCP< const Teuchos::Comm< int > > comm = getTeuchosComm();
3226  int rank = comm->getRank();
3227 
3228  // Check if MDMaps are compatible.
3229  if (! isCompatible(mdMap))
3230  {
3231  localResult = 0;
3232  if (verbose)
3233  std::cout << rank << ": MDMaps are incompatible" << std::endl;
3234  }
3235 
3236  // Check that underlying communicators are the same size
3237  if (comm->getSize() != mdMap.getTeuchosComm()->getSize())
3238  {
3239  localResult = 0;
3240  if (verbose)
3241  std::cout << rank << ": this Comm size = " << comm->getSize() << " != "
3242  << mdMap.getTeuchosComm()->getSize() << std::endl;
3243  }
3244 
3245  // Check that underlying communicators have the same rank
3246  if (rank != mdMap.getTeuchosComm()->getRank())
3247  {
3248  localResult = 0;
3249  if (verbose)
3250  std::cout << rank << ": this Comm rank = " << rank << " != "
3251  << mdMap.getTeuchosComm()->getRank() << std::endl;
3252  }
3253 
3254  // Check the global bounds.
3255  if (_globalBounds != mdMap._globalBounds)
3256  {
3257  localResult = 0;
3258  if (verbose)
3259  std::cout << rank << ": global bounds " << _globalBounds << " != "
3260  << mdMap._globalBounds << std::endl;
3261  }
3262 
3263  // Check the local dimensions.
3264  if (_localDims != mdMap._localDims)
3265  {
3266  localResult = 0;
3267  if (verbose)
3268  std::cout << rank << ": local dimensions " << _localDims << " != "
3269  << mdMap._localDims << std::endl;
3270  }
3271 
3272  // Obtain the global result
3273  int globalResult = 1;
3274  Teuchos::reduceAll(*(getTeuchosComm()),
3275  Teuchos::REDUCE_MIN,
3276  1,
3277  &localResult,
3278  &globalResult);
3279 
3280  // Return the result
3281  return bool(globalResult);
3282 }
3283 
3285 
3286 template< class Node >
3287 bool
3289 {
3290  // Compute the local strides if they were contiguous
3291  Teuchos::Array< size_type > contiguousStrides =
3292  computeStrides< size_type, dim_type >(_localDims, _layout);
3293 
3294  // Compute the local result: 0 = contiguous, 1 = non-contiguous
3295  int localResult = int(_localStrides != contiguousStrides);
3296 
3297  // Compute the global result
3298  int globalResult = 0;
3299  Teuchos::reduceAll(*(_mdComm->getTeuchosComm()),
3300  Teuchos::REDUCE_SUM,
3301  1,
3302  &localResult,
3303  &globalResult);
3304  return (globalResult == 0);
3305 }
3306 
3308 
3309 template< class Node >
3310 void
3312 {
3313  // Initialization
3314  int num_dims = numDims();
3315 
3316  // Decompose the multi-dimensional domain
3317  for (int axis = 0; axis < num_dims; ++axis)
3318  {
3319  // Get the communicator info for this axis
3320  int commDim = getCommDim(axis);
3321  for (int axisRank = 0; axisRank < commDim; ++axisRank)
3322  {
3323  // First estimates assuming even division of global dimensions
3324  // by the number of processors along this axis, and ignoring
3325  // communication and boundary padding.
3326  dim_type localDim = (_globalDims[axis] - _bndryPad[axis][0] -
3327  _bndryPad[axis][1]) / commDim;
3328  dim_type axisStart = axisRank * localDim;
3329 
3330  // Adjustments for non-zero remainder. Compute the remainder
3331  // using the mod operator. If the remainder is > 0, then add an
3332  // element to the appropriate number of processors with the
3333  // highest axis ranks. Note that this is the opposite of the
3334  // standard Tpetra::Map constructor (which adds an elements to
3335  // the lowest processor ranks), and provides better balance for
3336  // finite differencing systems with staggered data location.
3337  dim_type remainder = (_globalDims[axis] - _bndryPad[axis][0] -
3338  _bndryPad[axis][1]) % commDim;
3339  if (commDim - axisRank - 1 < remainder)
3340  {
3341  ++localDim;
3342  axisStart += (remainder - commDim + axisRank);
3343  }
3344 
3345  // Global adjustment for boundary padding
3346  axisStart += _bndryPad[axis][0];
3347 
3348  // Compute and store the global axis bounds
3349  _globalRankBounds[axis].push_back(
3350  ConcreteSlice(axisStart, axisStart + localDim));
3351 
3352  // Set _localDims[axis] and _localBounds[axis] only if
3353  // axisRank equals the axis rank of this processor
3354  if (axisRank == getCommIndex(axis))
3355  {
3356  // Local adjustment for padding. Note that _pad should
3357  // already be corrected to be either the communication padding
3358  // or boundary padding as appropriate
3359  _localDims[axis] = localDim + _pad[axis][0] + _pad[axis][1];
3360 
3361  // Compute and store the axis bounds
3362  _localBounds.push_back(ConcreteSlice(_localDims[axis]));
3363  }
3364  }
3365  }
3366 }
3367 
3368 }
3369 
3370 #endif
Teuchos::RCP< const MDComm > getMDComm() const
Access the underlying MDComm object.
Definition: Domi_MDMap.hpp:2050
MDMap(const Teuchos::RCP< const MDComm > mdComm, const Teuchos::ArrayView< const dim_type > &dimensions, const Teuchos::ArrayView< const int > &commPad=Teuchos::ArrayView< const int >(), const Teuchos::ArrayView< const int > &bndryPad=Teuchos::ArrayView< const int >(), const Teuchos::ArrayView< const int > &replicatedBoundary=Teuchos::ArrayView< const int >(), const Layout layout=DEFAULT_ORDER)
Main constructor.
Definition: Domi_MDMap.hpp:1043
Teuchos::Array< int > getCommDims() const
Get the communicator sizes along each axis.
Definition: Domi_MDMap.hpp:2086
size_type getGlobalID(size_type localID) const
Convert a local ID to a global ID.
Definition: Domi_MDMap.hpp:3059
Teuchos::RCP< Node > getNode() const
Get the Kokkos node.
Definition: Domi_MDMap.hpp:2520
Teuchos::RCP< const MDMap< Node > > getAugmentedMDMap(const dim_type leadingDim, const dim_type trailingDim=0) const
Return an RCP to a new MDMap that is a simple augmentation of this MDMap.
Definition: Domi_MDMap.hpp:2570
int getCommDim(int axis) const
Get the communicator size along the given axis.
Definition: Domi_MDMap.hpp:2095
MDMap< Node > & operator=(const MDMap< Node > &source)
Assignment operator.
Definition: Domi_MDMap.hpp:2024
int getLowerPadSize(int axis) const
Get the size of the lower padding along the given axis.
Definition: Domi_MDMap.hpp:2325
Iterator class suitable for multi-dimensional arrays.
Definition: Domi_MDIterator.hpp:100
Teuchos::Array< dim_type > getGlobalIndex(size_type globalID) const
Convert a global ID to a global index.
Definition: Domi_MDMap.hpp:2983
Range Error exception type.
Definition: Domi_Exceptions.hpp:65
Slice getLocalInteriorBounds(int axis) const
Get the local interior loop bounds along the specified axis.
Definition: Domi_MDMap.hpp:2293
bool isReplicatedBoundary(int axis) const
Return whether the given axis supports a replicated boundary.
Definition: Domi_MDMap.hpp:2502
int numDims() const
Get the number of dimensions.
Definition: Domi_MDMap.hpp:2077
bool onSubcommunicator() const
Query whether this processor is on the sub-communicator.
Definition: Domi_MDMap.hpp:2059
dim_type getLocalDim(int axis, bool withPad=false) const
Get the local dimension along the specified axis.
Definition: Domi_MDMap.hpp:2197
bool isCompatible(const MDMap< Node > &mdMap) const
True if two MDMaps are "compatible".
Definition: Domi_MDMap.hpp:3171
A Slice defines a subset of a container.
Multi-dimensional communicator object.
Definition: Domi_MDComm.hpp:108
Slice getGlobalRankBounds(int axis, bool withBndryPad=false) const
Get the global loop bounds along the specified axis.
Definition: Domi_MDMap.hpp:2228
Bounds Error exception type.
Definition: Domi_Exceptions.hpp:138
const dim_type stop() const
Stop index.
Definition: Domi_Slice.hpp:438
bool isPeriodic(int axis) const
Return the periodic flag for the given axis.
Definition: Domi_MDMap.hpp:2104
A ConcreteSlice is a Slice that does not accept Default or negative start or stop values...
Definition: Domi_Slice.hpp:340
A Slice contains a start, stop, and step index, describing a subset of an ordered container...
Definition: Domi_Slice.hpp:137
Teuchos::ArrayView< Teuchos::RCP< const Domi::MDMap< Node > > > getAxisMaps() const
Return an array of axis maps.
Definition: Domi_MDMap.hpp:2529
Teuchos::RCP< const Teuchos::Comm< int > > getTeuchosComm() const
Get the Teuchos communicator.
Definition: Domi_MDMap.hpp:2068
Teuchos::RCP< const Domi::MDMap< Node > > getAxisMap(int axis) const
Return an axis map for the given axis.
Definition: Domi_MDMap.hpp:2541
Teuchos::Array< int > getBndryPadSizes() const
Get the boundary padding sizes along each axis.
Definition: Domi_MDMap.hpp:2405
Teuchos::ArrayView< const Slice > getLocalBounds() const
Get the local loop bounds along every axis.
Definition: Domi_MDMap.hpp:2258
int getUpperNeighbor(int axis) const
Get the rank of the upper neighbor.
Definition: Domi_MDMap.hpp:2131
MDMap Error exception type.
Definition: Domi_Exceptions.hpp:114
Definition: Domi_ConfigDefs.hpp:97
int getCommPadSize(int axis) const
Get the communication padding size along the given axis.
Definition: Domi_MDMap.hpp:2357
bool isSameAs(const MDMap< Node > &mdMap, const int verbose=0) const
True if two MDMaps are "identical".
Definition: Domi_MDMap.hpp:3213
virtual Slice bounds(dim_type size) const
Return a Slice with concrete start and stop values.
int getUpperPadSize(int axis) const
Get the size of the upper padding along the given axis.
Definition: Domi_MDMap.hpp:2341
bool isContiguous() const
True if there are no stride gaps on any processor.
Definition: Domi_MDMap.hpp:3288
static const dim_type Default
Default value for Slice constructors.
Definition: Domi_Slice.hpp:155
int getLowerBndryPad(int axis) const
Get the size of the lower boundary padding along the given axis.
Definition: Domi_MDMap.hpp:2373
Slice getGlobalBounds(int axis, bool withBndryPad=false) const
Get the bounds of the global problem.
Definition: Domi_MDMap.hpp:2172
int getUpperBndryPad(int axis) const
Get the size of the upper boundary padding along the given axis.
Definition: Domi_MDMap.hpp:2389
~MDMap()
Destructor.
Definition: Domi_MDMap.hpp:2016
dim_type getGlobalDim(int axis, bool withBndryPad=false) const
Get the global dimension along the specified axis.
Definition: Domi_MDMap.hpp:2151
bool hasPadding() const
Return true if there is any padding stored locally.
Definition: Domi_MDMap.hpp:2313
int getBndryPadSize(int axis) const
Get the boundary padding size along the given axis.
Definition: Domi_MDMap.hpp:2414
size_type getLocalID(size_type globalID) const
Convert a global ID to a local ID.
Definition: Domi_MDMap.hpp:3113
size_type size() const
Return the total size of the MDArray
Definition: Domi_MDArray.hpp:1037
int getLowerNeighbor(int axis) const
Get the rank of the lower neighbor.
Definition: Domi_MDMap.hpp:2122
Multi-dimensional map.
Definition: Domi_MDMap.hpp:144
const T * getRawPtr() const
Return a const raw pointer to the beginning of the MDArray or NULL if unsized.
Definition: Domi_MDArray.hpp:1714
const dim_type start() const
Start index.
Definition: Domi_Slice.hpp:431
Memory-safe templated multi-dimensional array class.
Definition: Domi_MDArray.hpp:65
Teuchos::Array< dim_type > getGlobalDims() const
Get an array of the the global dimensions, including boundary padding.
Definition: Domi_MDMap.hpp:2141
Teuchos::Array< dim_type > getLocalIndex(size_type localID) const
Convert a local ID to a local index.
Definition: Domi_MDMap.hpp:3021
Teuchos::Array< dim_type > getLocalDims() const
Get an array of the local dimensions, including padding.
Definition: Domi_MDMap.hpp:2218
Invalid argument exception type.
Definition: Domi_Exceptions.hpp:53
int getCommIndex(int axis) const
Get the axis rank of this processor.
Definition: Domi_MDMap.hpp:2113
Map Ordinal Error exception type.
Definition: Domi_Exceptions.hpp:90
Layout getLayout() const
Get the storage order.
Definition: Domi_MDMap.hpp:2511

Generated for Domi by doxygen 1.8.14