xref: /petsc/src/dm/impls/plex/plexcheckinterface.c (revision 9371c9d470a9602b6d10a8bf50c9b2280a79e45a)
1 #include <petsc/private/dmpleximpl.h> /*I      "petscdmplex.h"   I*/
2 
3 /* TODO PetscArrayExchangeBegin/End */
4 /* TODO blocksize */
5 /* TODO move to API ? */
6 static PetscErrorCode ExchangeArrayByRank_Private(PetscObject obj, MPI_Datatype dt, PetscInt nsranks, const PetscMPIInt sranks[], PetscInt ssize[], const void *sarr[], PetscInt nrranks, const PetscMPIInt rranks[], PetscInt *rsize_out[], void **rarr_out[]) {
7   PetscInt     r;
8   PetscInt    *rsize;
9   void       **rarr;
10   MPI_Request *sreq, *rreq;
11   PetscMPIInt  tag, unitsize;
12   MPI_Comm     comm;
13 
14   PetscFunctionBegin;
15   PetscCallMPI(MPI_Type_size(dt, &unitsize));
16   PetscCall(PetscObjectGetComm(obj, &comm));
17   PetscCall(PetscMalloc2(nrranks, &rsize, nrranks, &rarr));
18   PetscCall(PetscMalloc2(nrranks, &rreq, nsranks, &sreq));
19   /* exchange array size */
20   PetscCall(PetscObjectGetNewTag(obj, &tag));
21   for (r = 0; r < nrranks; r++) { PetscCallMPI(MPI_Irecv(&rsize[r], 1, MPIU_INT, rranks[r], tag, comm, &rreq[r])); }
22   for (r = 0; r < nsranks; r++) { PetscCallMPI(MPI_Isend(&ssize[r], 1, MPIU_INT, sranks[r], tag, comm, &sreq[r])); }
23   PetscCallMPI(MPI_Waitall(nrranks, rreq, MPI_STATUSES_IGNORE));
24   PetscCallMPI(MPI_Waitall(nsranks, sreq, MPI_STATUSES_IGNORE));
25   /* exchange array */
26   PetscCall(PetscObjectGetNewTag(obj, &tag));
27   for (r = 0; r < nrranks; r++) {
28     PetscCall(PetscMalloc(rsize[r] * unitsize, &rarr[r]));
29     PetscCallMPI(MPI_Irecv(rarr[r], rsize[r], dt, rranks[r], tag, comm, &rreq[r]));
30   }
31   for (r = 0; r < nsranks; r++) { PetscCallMPI(MPI_Isend(sarr[r], ssize[r], dt, sranks[r], tag, comm, &sreq[r])); }
32   PetscCallMPI(MPI_Waitall(nrranks, rreq, MPI_STATUSES_IGNORE));
33   PetscCallMPI(MPI_Waitall(nsranks, sreq, MPI_STATUSES_IGNORE));
34   PetscCall(PetscFree2(rreq, sreq));
35   *rsize_out = rsize;
36   *rarr_out  = rarr;
37   PetscFunctionReturn(0);
38 }
39 
40 /* TODO VecExchangeBegin/End */
41 /* TODO move to API ? */
42 static PetscErrorCode ExchangeVecByRank_Private(PetscObject obj, PetscInt nsranks, const PetscMPIInt sranks[], Vec svecs[], PetscInt nrranks, const PetscMPIInt rranks[], Vec *rvecs[]) {
43   PetscInt            r;
44   PetscInt           *ssize, *rsize;
45   PetscScalar       **rarr;
46   const PetscScalar **sarr;
47   Vec                *rvecs_;
48   MPI_Request        *sreq, *rreq;
49 
50   PetscFunctionBegin;
51   PetscCall(PetscMalloc4(nsranks, &ssize, nsranks, &sarr, nrranks, &rreq, nsranks, &sreq));
52   for (r = 0; r < nsranks; r++) {
53     PetscCall(VecGetLocalSize(svecs[r], &ssize[r]));
54     PetscCall(VecGetArrayRead(svecs[r], &sarr[r]));
55   }
56   PetscCall(ExchangeArrayByRank_Private(obj, MPIU_SCALAR, nsranks, sranks, ssize, (const void **)sarr, nrranks, rranks, &rsize, (void ***)&rarr));
57   PetscCall(PetscMalloc1(nrranks, &rvecs_));
58   for (r = 0; r < nrranks; r++) {
59     /* set array in two steps to mimic PETSC_OWN_POINTER */
60     PetscCall(VecCreateSeqWithArray(PETSC_COMM_SELF, 1, rsize[r], NULL, &rvecs_[r]));
61     PetscCall(VecReplaceArray(rvecs_[r], rarr[r]));
62   }
63   for (r = 0; r < nsranks; r++) { PetscCall(VecRestoreArrayRead(svecs[r], &sarr[r])); }
64   PetscCall(PetscFree2(rsize, rarr));
65   PetscCall(PetscFree4(ssize, sarr, rreq, sreq));
66   *rvecs = rvecs_;
67   PetscFunctionReturn(0);
68 }
69 
70 static PetscErrorCode SortByRemote_Private(PetscSF sf, PetscInt *rmine1[], PetscInt *rremote1[]) {
71   PetscInt           nleaves;
72   PetscInt           nranks;
73   const PetscMPIInt *ranks;
74   const PetscInt    *roffset, *rmine, *rremote;
75   PetscInt           n, o, r;
76 
77   PetscFunctionBegin;
78   PetscCall(PetscSFGetRootRanks(sf, &nranks, &ranks, &roffset, &rmine, &rremote));
79   nleaves = roffset[nranks];
80   PetscCall(PetscMalloc2(nleaves, rmine1, nleaves, rremote1));
81   for (r = 0; r < nranks; r++) {
82     /* simultaneously sort rank-wise portions of rmine & rremote by values in rremote
83        - to unify order with the other side */
84     o = roffset[r];
85     n = roffset[r + 1] - o;
86     PetscCall(PetscArraycpy(&(*rmine1)[o], &rmine[o], n));
87     PetscCall(PetscArraycpy(&(*rremote1)[o], &rremote[o], n));
88     PetscCall(PetscSortIntWithArray(n, &(*rremote1)[o], &(*rmine1)[o]));
89   }
90   PetscFunctionReturn(0);
91 }
92 
93 static PetscErrorCode GetRecursiveConeCoordinatesPerRank_Private(DM dm, PetscSF sf, PetscInt rmine[], Vec *coordinatesPerRank[]) {
94   IS                 pointsPerRank, conesPerRank;
95   PetscInt           nranks;
96   const PetscMPIInt *ranks;
97   const PetscInt    *roffset;
98   PetscInt           n, o, r;
99 
100   PetscFunctionBegin;
101   PetscCall(DMGetCoordinatesLocalSetUp(dm));
102   PetscCall(PetscSFGetRootRanks(sf, &nranks, &ranks, &roffset, NULL, NULL));
103   PetscCall(PetscMalloc1(nranks, coordinatesPerRank));
104   for (r = 0; r < nranks; r++) {
105     o = roffset[r];
106     n = roffset[r + 1] - o;
107     PetscCall(ISCreateGeneral(PETSC_COMM_SELF, n, &rmine[o], PETSC_USE_POINTER, &pointsPerRank));
108     PetscCall(DMPlexGetConeRecursiveVertices(dm, pointsPerRank, &conesPerRank));
109     PetscCall(DMGetCoordinatesLocalTuple(dm, conesPerRank, NULL, &(*coordinatesPerRank)[r]));
110     PetscCall(ISDestroy(&pointsPerRank));
111     PetscCall(ISDestroy(&conesPerRank));
112   }
113   PetscFunctionReturn(0);
114 }
115 
116 static PetscErrorCode PetscSFComputeMultiRootOriginalNumberingByRank_Private(PetscSF sf, PetscSF imsf, PetscInt *irmine1[]) {
117   PetscInt       *mRootsOrigNumbering;
118   PetscInt        nileaves, niranks;
119   const PetscInt *iroffset, *irmine, *degree;
120   PetscInt        i, n, o, r;
121 
122   PetscFunctionBegin;
123   PetscCall(PetscSFGetGraph(imsf, NULL, &nileaves, NULL, NULL));
124   PetscCall(PetscSFGetRootRanks(imsf, &niranks, NULL, &iroffset, &irmine, NULL));
125   PetscCheck(nileaves == iroffset[niranks], PETSC_COMM_SELF, PETSC_ERR_PLIB, "nileaves != iroffset[niranks])");
126   PetscCall(PetscSFComputeDegreeBegin(sf, &degree));
127   PetscCall(PetscSFComputeDegreeEnd(sf, &degree));
128   PetscCall(PetscSFComputeMultiRootOriginalNumbering(sf, degree, NULL, &mRootsOrigNumbering));
129   PetscCall(PetscMalloc1(nileaves, irmine1));
130   for (r = 0; r < niranks; r++) {
131     o = iroffset[r];
132     n = iroffset[r + 1] - o;
133     for (i = 0; i < n; i++) (*irmine1)[o + i] = mRootsOrigNumbering[irmine[o + i]];
134   }
135   PetscCall(PetscFree(mRootsOrigNumbering));
136   PetscFunctionReturn(0);
137 }
138 
139 /*@
140   DMPlexCheckInterfaceCones - Check that points on inter-partition interfaces have conforming order of cone points.
141 
142   Input Parameters:
143 . dm - The DMPlex object
144 
145   Notes:
146   For example, if there is an edge (rank,index)=(0,2) connecting points cone(0,2)=[(0,0),(0,1)] in this order, and the point SF containts connections 0 <- (1,0), 1 <- (1,1) and 2 <- (1,2),
147   then this check would pass if the edge (1,2) has cone(1,2)=[(1,0),(1,1)]. By contrast, if cone(1,2)=[(1,1),(1,0)], then this check would fail.
148 
149   This is mainly intended for debugging/testing purposes. Does not check cone orientation, for this purpose use DMPlexCheckFaces().
150 
151   For the complete list of DMPlexCheck* functions, see DMSetFromOptions().
152 
153   Developer Note:
154   Interface cones are expanded into vertices and then their coordinates are compared.
155 
156   Level: developer
157 
158 .seealso: `DMPlexGetCone()`, `DMPlexGetConeSize()`, `DMGetPointSF()`, `DMGetCoordinates()`, `DMSetFromOptions()`
159 @*/
160 PetscErrorCode DMPlexCheckInterfaceCones(DM dm) {
161   PetscSF            sf;
162   PetscInt           nleaves, nranks, nroots;
163   const PetscInt    *mine, *roffset, *rmine, *rremote;
164   const PetscSFNode *remote;
165   const PetscMPIInt *ranks;
166   PetscSF            msf, imsf;
167   PetscInt           nileaves, niranks;
168   const PetscMPIInt *iranks;
169   const PetscInt    *iroffset, *irmine, *irremote;
170   PetscInt          *rmine1, *rremote1; /* rmine and rremote copies simultaneously sorted by rank and rremote */
171   PetscInt          *mine_orig_numbering;
172   Vec               *sntCoordinatesPerRank;
173   Vec               *refCoordinatesPerRank;
174   Vec               *recCoordinatesPerRank = NULL;
175   PetscInt           r;
176   PetscMPIInt        commsize, myrank;
177   PetscBool          same;
178   PetscBool          verbose = PETSC_FALSE;
179   MPI_Comm           comm;
180 
181   PetscFunctionBegin;
182   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
183   PetscCall(PetscObjectGetComm((PetscObject)dm, &comm));
184   PetscCallMPI(MPI_Comm_rank(comm, &myrank));
185   PetscCallMPI(MPI_Comm_size(comm, &commsize));
186   if (commsize < 2) PetscFunctionReturn(0);
187   PetscCall(DMGetPointSF(dm, &sf));
188   if (!sf) PetscFunctionReturn(0);
189   PetscCall(PetscSFGetGraph(sf, &nroots, &nleaves, &mine, &remote));
190   if (nroots < 0) PetscFunctionReturn(0);
191   PetscCheck(dm->coordinates[0].x || dm->coordinates[0].xl, PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_WRONGSTATE, "DM coordinates must be set");
192   PetscCall(PetscSFSetUp(sf));
193   PetscCall(PetscSFGetRootRanks(sf, &nranks, &ranks, &roffset, &rmine, &rremote));
194 
195   /* Expand sent cones per rank */
196   PetscCall(SortByRemote_Private(sf, &rmine1, &rremote1));
197   PetscCall(GetRecursiveConeCoordinatesPerRank_Private(dm, sf, rmine1, &sntCoordinatesPerRank));
198 
199   /* Create inverse SF */
200   PetscCall(PetscSFGetMultiSF(sf, &msf));
201   PetscCall(PetscSFCreateInverseSF(msf, &imsf));
202   PetscCall(PetscSFSetUp(imsf));
203   PetscCall(PetscSFGetGraph(imsf, NULL, &nileaves, NULL, NULL));
204   PetscCall(PetscSFGetRootRanks(imsf, &niranks, &iranks, &iroffset, &irmine, &irremote));
205 
206   /* Compute original numbering of multi-roots (referenced points) */
207   PetscCall(PetscSFComputeMultiRootOriginalNumberingByRank_Private(sf, imsf, &mine_orig_numbering));
208 
209   /* Expand coordinates of the referred cones per rank */
210   PetscCall(GetRecursiveConeCoordinatesPerRank_Private(dm, imsf, mine_orig_numbering, &refCoordinatesPerRank));
211 
212   /* Send the coordinates */
213   PetscCall(ExchangeVecByRank_Private((PetscObject)sf, nranks, ranks, sntCoordinatesPerRank, niranks, iranks, &recCoordinatesPerRank));
214 
215   /* verbose output */
216   PetscCall(PetscOptionsGetBool(((PetscObject)dm)->options, ((PetscObject)dm)->prefix, "-dm_plex_check_cones_conform_on_interfaces_verbose", &verbose, NULL));
217   if (verbose) {
218     PetscViewer sv, v = PETSC_VIEWER_STDOUT_WORLD;
219     PetscCall(PetscViewerASCIIPrintf(v, "============\nDMPlexCheckInterfaceCones output\n============\n"));
220     PetscCall(PetscViewerASCIIPushSynchronized(v));
221     PetscCall(PetscViewerASCIISynchronizedPrintf(v, "[%d] --------\n", myrank));
222     for (r = 0; r < nranks; r++) {
223       PetscCall(PetscViewerASCIISynchronizedPrintf(v, "  r=%" PetscInt_FMT " ranks[r]=%d sntCoordinatesPerRank[r]:\n", r, ranks[r]));
224       PetscCall(PetscViewerASCIIPushTab(v));
225       PetscCall(PetscViewerGetSubViewer(v, PETSC_COMM_SELF, &sv));
226       PetscCall(VecView(sntCoordinatesPerRank[r], sv));
227       PetscCall(PetscViewerRestoreSubViewer(v, PETSC_COMM_SELF, &sv));
228       PetscCall(PetscViewerASCIIPopTab(v));
229     }
230     PetscCall(PetscViewerASCIISynchronizedPrintf(v, "  ----------\n"));
231     for (r = 0; r < niranks; r++) {
232       PetscCall(PetscViewerASCIISynchronizedPrintf(v, "  r=%" PetscInt_FMT " iranks[r]=%d refCoordinatesPerRank[r]:\n", r, iranks[r]));
233       PetscCall(PetscViewerASCIIPushTab(v));
234       PetscCall(PetscViewerGetSubViewer(v, PETSC_COMM_SELF, &sv));
235       PetscCall(VecView(refCoordinatesPerRank[r], sv));
236       PetscCall(PetscViewerRestoreSubViewer(v, PETSC_COMM_SELF, &sv));
237       PetscCall(PetscViewerASCIIPopTab(v));
238     }
239     PetscCall(PetscViewerASCIISynchronizedPrintf(v, "  ----------\n"));
240     for (r = 0; r < niranks; r++) {
241       PetscCall(PetscViewerASCIISynchronizedPrintf(v, "  r=%" PetscInt_FMT " iranks[r]=%d recCoordinatesPerRank[r]:\n", r, iranks[r]));
242       PetscCall(PetscViewerASCIIPushTab(v));
243       PetscCall(PetscViewerGetSubViewer(v, PETSC_COMM_SELF, &sv));
244       PetscCall(VecView(recCoordinatesPerRank[r], sv));
245       PetscCall(PetscViewerRestoreSubViewer(v, PETSC_COMM_SELF, &sv));
246       PetscCall(PetscViewerASCIIPopTab(v));
247     }
248     PetscCall(PetscViewerFlush(v));
249     PetscCall(PetscViewerASCIIPopSynchronized(v));
250   }
251 
252   /* Compare recCoordinatesPerRank with refCoordinatesPerRank */
253   for (r = 0; r < niranks; r++) {
254     PetscCall(VecEqual(refCoordinatesPerRank[r], recCoordinatesPerRank[r], &same));
255     PetscCheck(same, PETSC_COMM_SELF, PETSC_ERR_PLIB, "interface cones do not conform for remote rank %d", iranks[r]);
256   }
257 
258   /* destroy sent stuff */
259   for (r = 0; r < nranks; r++) { PetscCall(VecDestroy(&sntCoordinatesPerRank[r])); }
260   PetscCall(PetscFree(sntCoordinatesPerRank));
261   PetscCall(PetscFree2(rmine1, rremote1));
262   PetscCall(PetscSFDestroy(&imsf));
263 
264   /* destroy referenced stuff */
265   for (r = 0; r < niranks; r++) { PetscCall(VecDestroy(&refCoordinatesPerRank[r])); }
266   PetscCall(PetscFree(refCoordinatesPerRank));
267   PetscCall(PetscFree(mine_orig_numbering));
268 
269   /* destroy received stuff */
270   for (r = 0; r < niranks; r++) { PetscCall(VecDestroy(&recCoordinatesPerRank[r])); }
271   PetscCall(PetscFree(recCoordinatesPerRank));
272   PetscFunctionReturn(0);
273 }
274