xref: /petsc/src/dm/impls/network/networkcreate.c (revision 9371c9d470a9602b6d10a8bf50c9b2280a79e45a)
1 #define PETSCDM_DLL
2 #include <petsc/private/dmnetworkimpl.h> /*I   "petscdmnetwork.h"   I*/
3 #include <petsc/private/vecimpl.h>
4 
5 PetscErrorCode DMSetFromOptions_Network(DM dm, PetscOptionItems *PetscOptionsObject) {
6   PetscFunctionBegin;
7   PetscOptionsHeadBegin(PetscOptionsObject, "DMNetwork Options");
8   PetscOptionsHeadEnd();
9   PetscFunctionReturn(0);
10 }
11 
12 /* External function declarations here */
13 extern PetscErrorCode DMCreateMatrix_Network(DM, Mat *);
14 extern PetscErrorCode DMDestroy_Network(DM);
15 extern PetscErrorCode DMView_Network(DM, PetscViewer);
16 extern PetscErrorCode DMGlobalToLocalBegin_Network(DM, Vec, InsertMode, Vec);
17 extern PetscErrorCode DMGlobalToLocalEnd_Network(DM, Vec, InsertMode, Vec);
18 extern PetscErrorCode DMLocalToGlobalBegin_Network(DM, Vec, InsertMode, Vec);
19 extern PetscErrorCode DMLocalToGlobalEnd_Network(DM, Vec, InsertMode, Vec);
20 extern PetscErrorCode DMSetUp_Network(DM);
21 extern PetscErrorCode DMClone_Network(DM, DM *);
22 
23 static PetscErrorCode VecArrayPrint_private(PetscViewer viewer, PetscInt n, const PetscScalar *xv) {
24   PetscInt i;
25 
26   PetscFunctionBegin;
27   for (i = 0; i < n; i++) {
28 #if defined(PETSC_USE_COMPLEX)
29     if (PetscImaginaryPart(xv[i]) > 0.0) {
30       PetscCall(PetscViewerASCIIPrintf(viewer, "    %g + %g i\n", (double)PetscRealPart(xv[i]), (double)PetscImaginaryPart(xv[i])));
31     } else if (PetscImaginaryPart(xv[i]) < 0.0) {
32       PetscCall(PetscViewerASCIIPrintf(viewer, "    %g - %g i\n", (double)PetscRealPart(xv[i]), -(double)PetscImaginaryPart(xv[i])));
33     } else PetscCall(PetscViewerASCIIPrintf(viewer, "    %g\n", (double)PetscRealPart(xv[i])));
34 #else
35     PetscCall(PetscViewerASCIIPrintf(viewer, "    %g\n", (double)xv[i]));
36 #endif
37   }
38   PetscFunctionReturn(0);
39 }
40 
41 static PetscErrorCode VecView_Network_Seq(DM networkdm, Vec X, PetscViewer viewer) {
42   PetscInt           e, v, Start, End, offset, nvar, id;
43   const PetscScalar *xv;
44 
45   PetscFunctionBegin;
46   PetscCall(VecGetArrayRead(X, &xv));
47 
48   /* iterate over edges */
49   PetscCall(DMNetworkGetEdgeRange(networkdm, &Start, &End));
50   for (e = Start; e < End; e++) {
51     PetscCall(DMNetworkGetComponent(networkdm, e, ALL_COMPONENTS, NULL, NULL, &nvar));
52     if (!nvar) continue;
53 
54     PetscCall(DMNetworkGetLocalVecOffset(networkdm, e, ALL_COMPONENTS, &offset));
55     PetscCall(DMNetworkGetGlobalEdgeIndex(networkdm, e, &id));
56 
57     PetscCall(PetscViewerASCIIPrintf(viewer, "  Edge %" PetscInt_FMT ":\n", id));
58     PetscCall(VecArrayPrint_private(viewer, nvar, xv + offset));
59   }
60 
61   /* iterate over vertices */
62   PetscCall(DMNetworkGetVertexRange(networkdm, &Start, &End));
63   for (v = Start; v < End; v++) {
64     PetscCall(DMNetworkGetComponent(networkdm, v, ALL_COMPONENTS, NULL, NULL, &nvar));
65     if (!nvar) continue;
66 
67     PetscCall(DMNetworkGetLocalVecOffset(networkdm, v, ALL_COMPONENTS, &offset));
68     PetscCall(DMNetworkGetGlobalVertexIndex(networkdm, v, &id));
69 
70     PetscCall(PetscViewerASCIIPrintf(viewer, "  Vertex %" PetscInt_FMT ":\n", id));
71     PetscCall(VecArrayPrint_private(viewer, nvar, xv + offset));
72   }
73   PetscCall(PetscViewerFlush(viewer));
74   PetscCall(VecRestoreArrayRead(X, &xv));
75   PetscFunctionReturn(0);
76 }
77 
78 static PetscErrorCode VecView_Network_MPI(DM networkdm, Vec X, PetscViewer viewer) {
79   PetscInt           i, e, v, eStart, eEnd, vStart, vEnd, offset, nvar, len_loc, len, k;
80   const PetscScalar *xv;
81   MPI_Comm           comm;
82   PetscMPIInt        size, rank, tag = ((PetscObject)viewer)->tag;
83   Vec                localX;
84   PetscBool          ghostvtex;
85   PetscScalar       *values;
86   PetscInt           j, ne, nv, id;
87   MPI_Status         status;
88 
89   PetscFunctionBegin;
90   PetscCall(PetscObjectGetComm((PetscObject)networkdm, &comm));
91   PetscCallMPI(MPI_Comm_size(comm, &size));
92   PetscCallMPI(MPI_Comm_rank(comm, &rank));
93 
94   PetscCall(DMGetLocalVector(networkdm, &localX));
95   PetscCall(DMGlobalToLocalBegin(networkdm, X, INSERT_VALUES, localX));
96   PetscCall(DMGlobalToLocalEnd(networkdm, X, INSERT_VALUES, localX));
97   PetscCall(VecGetArrayRead(localX, &xv));
98 
99   PetscCall(VecGetLocalSize(localX, &len_loc));
100 
101   PetscCall(DMNetworkGetEdgeRange(networkdm, &eStart, &eEnd));
102   PetscCall(DMNetworkGetVertexRange(networkdm, &vStart, &vEnd));
103   len_loc += 2 * (1 + eEnd - eStart + vEnd - vStart);
104 
105   /* values = [nedges, nvertices; id, nvar, xedge; ...; id, nvars, xvertex;...], to be sent to proc[0] */
106   PetscCallMPI(MPI_Allreduce(&len_loc, &len, 1, MPIU_INT, MPI_MAX, comm));
107   PetscCall(PetscCalloc1(len, &values));
108 
109   if (rank == 0) { PetscCall(PetscViewerASCIIPrintf(viewer, "Process [%d]\n", rank)); }
110 
111   /* iterate over edges */
112   k = 2;
113   for (e = eStart; e < eEnd; e++) {
114     PetscCall(DMNetworkGetComponent(networkdm, e, ALL_COMPONENTS, NULL, NULL, &nvar));
115     if (!nvar) continue;
116 
117     PetscCall(DMNetworkGetLocalVecOffset(networkdm, e, ALL_COMPONENTS, &offset));
118     PetscCall(DMNetworkGetGlobalEdgeIndex(networkdm, e, &id));
119 
120     if (rank == 0) { /* print its own entries */
121       PetscCall(PetscViewerASCIIPrintf(viewer, "  Edge %" PetscInt_FMT ":\n", id));
122       PetscCall(VecArrayPrint_private(viewer, nvar, xv + offset));
123     } else {
124       values[0] += 1; /* number of edges */
125       values[k++] = id;
126       values[k++] = nvar;
127       for (i = offset; i < offset + nvar; i++) values[k++] = xv[i];
128     }
129   }
130 
131   /* iterate over vertices */
132   for (v = vStart; v < vEnd; v++) {
133     PetscCall(DMNetworkIsGhostVertex(networkdm, v, &ghostvtex));
134     if (ghostvtex) continue;
135     PetscCall(DMNetworkGetComponent(networkdm, v, ALL_COMPONENTS, NULL, NULL, &nvar));
136     if (!nvar) continue;
137 
138     PetscCall(DMNetworkGetLocalVecOffset(networkdm, v, ALL_COMPONENTS, &offset));
139     PetscCall(DMNetworkGetGlobalVertexIndex(networkdm, v, &id));
140 
141     if (rank == 0) {
142       PetscCall(PetscViewerASCIIPrintf(viewer, "  Vertex %" PetscInt_FMT ":\n", id));
143       PetscCall(VecArrayPrint_private(viewer, nvar, xv + offset));
144     } else {
145       values[1] += 1; /* number of vertices */
146       values[k++] = id;
147       values[k++] = nvar;
148       for (i = offset; i < offset + nvar; i++) values[k++] = xv[i];
149     }
150   }
151 
152   if (rank == 0) {
153     /* proc[0] receives and prints messages */
154     for (j = 1; j < size; j++) {
155       PetscCall(PetscViewerASCIIPrintf(viewer, "Process [%" PetscInt_FMT "]\n", j));
156 
157       PetscCallMPI(MPI_Recv(values, (PetscMPIInt)len, MPIU_SCALAR, j, tag, comm, &status));
158 
159       ne = (PetscInt)PetscAbsScalar(values[0]);
160       nv = (PetscInt)PetscAbsScalar(values[1]);
161 
162       /* print received edges */
163       k = 2;
164       for (i = 0; i < ne; i++) {
165         id   = (PetscInt)PetscAbsScalar(values[k++]);
166         nvar = (PetscInt)PetscAbsScalar(values[k++]);
167         PetscCall(PetscViewerASCIIPrintf(viewer, "  Edge %" PetscInt_FMT ":\n", id));
168         PetscCall(VecArrayPrint_private(viewer, nvar, values + k));
169         k += nvar;
170       }
171 
172       /* print received vertices */
173       for (i = 0; i < nv; i++) {
174         id   = (PetscInt)PetscAbsScalar(values[k++]);
175         nvar = (PetscInt)PetscAbsScalar(values[k++]);
176         PetscCall(PetscViewerASCIIPrintf(viewer, "  Vertex %" PetscInt_FMT ":\n", id));
177         PetscCall(VecArrayPrint_private(viewer, nvar, values + k));
178         k += nvar;
179       }
180     }
181   } else {
182     /* sends values to proc[0] */
183     PetscCallMPI(MPI_Send((void *)values, k, MPIU_SCALAR, 0, tag, comm));
184   }
185 
186   PetscCall(PetscFree(values));
187   PetscCall(VecRestoreArrayRead(localX, &xv));
188   PetscCall(DMRestoreLocalVector(networkdm, &localX));
189   PetscFunctionReturn(0);
190 }
191 
192 PETSC_EXTERN PetscErrorCode VecView_MPI(Vec, PetscViewer);
193 
194 PetscErrorCode VecView_Network(Vec v, PetscViewer viewer) {
195   DM        dm;
196   PetscBool isseq;
197   PetscBool iascii;
198 
199   PetscFunctionBegin;
200   PetscCall(VecGetDM(v, &dm));
201   PetscCheck(dm, PetscObjectComm((PetscObject)v), PETSC_ERR_ARG_WRONG, "Vector not generated from a DM");
202   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii));
203   PetscCall(PetscObjectTypeCompare((PetscObject)v, VECSEQ, &isseq));
204 
205   /* Use VecView_Network if the viewer is ASCII; use VecView_Seq/MPI for other viewer formats */
206   if (iascii) {
207     if (isseq) PetscCall(VecView_Network_Seq(dm, v, viewer));
208     else PetscCall(VecView_Network_MPI(dm, v, viewer));
209   } else {
210     if (isseq) PetscCall(VecView_Seq(v, viewer));
211     else PetscCall(VecView_MPI(v, viewer));
212   }
213   PetscFunctionReturn(0);
214 }
215 
216 static PetscErrorCode DMCreateGlobalVector_Network(DM dm, Vec *vec) {
217   DM_Network *network = (DM_Network *)dm->data;
218 
219   PetscFunctionBegin;
220   PetscCall(DMCreateGlobalVector(network->plex, vec));
221   PetscCall(VecSetOperation(*vec, VECOP_VIEW, (void (*)(void))VecView_Network));
222   PetscCall(VecSetDM(*vec, dm));
223   PetscFunctionReturn(0);
224 }
225 
226 static PetscErrorCode DMCreateLocalVector_Network(DM dm, Vec *vec) {
227   DM_Network *network = (DM_Network *)dm->data;
228 
229   PetscFunctionBegin;
230   PetscCall(DMCreateLocalVector(network->plex, vec));
231   PetscCall(VecSetDM(*vec, dm));
232   PetscFunctionReturn(0);
233 }
234 
235 PetscErrorCode DMNetworkInitializeToDefault_NonShared(DM dm) {
236   DM_Network *network = (DM_Network *)dm->data;
237 
238   PetscFunctionBegin;
239   network->Je                 = NULL;
240   network->Jv                 = NULL;
241   network->Jvptr              = NULL;
242   network->userEdgeJacobian   = PETSC_FALSE;
243   network->userVertexJacobian = PETSC_FALSE;
244 
245   network->vertex.DofSection       = NULL;
246   network->vertex.GlobalDofSection = NULL;
247   network->vertex.mapping          = NULL;
248   network->vertex.sf               = NULL;
249 
250   network->edge.DofSection       = NULL;
251   network->edge.GlobalDofSection = NULL;
252   network->edge.mapping          = NULL;
253   network->edge.sf               = NULL;
254 
255   network->DataSection      = NULL;
256   network->DofSection       = NULL;
257   network->GlobalDofSection = NULL;
258   network->componentsetup   = PETSC_FALSE;
259 
260   network->plex = NULL;
261 
262   network->component  = NULL;
263   network->ncomponent = 0;
264 
265   network->header             = NULL;
266   network->cvalue             = NULL;
267   network->componentdataarray = NULL;
268 
269   network->max_comps_registered = DMNETWORK_MAX_COMP_REGISTERED_DEFAULT; /* return to default */
270   PetscFunctionReturn(0);
271 }
272 /* Default values for the parameters in DMNetwork */
273 PetscErrorCode DMNetworkInitializeToDefault(DM dm) {
274   DM_Network          *network     = (DM_Network *)dm->data;
275   DMNetworkCloneShared cloneshared = network->cloneshared;
276 
277   PetscFunctionBegin;
278   PetscCall(DMNetworkInitializeToDefault_NonShared(dm));
279   /* Default values for shared data */
280   cloneshared->refct            = 1;
281   cloneshared->NVertices        = 0;
282   cloneshared->NEdges           = 0;
283   cloneshared->nVertices        = 0;
284   cloneshared->nEdges           = 0;
285   cloneshared->nsubnet          = 0;
286   cloneshared->pStart           = -1;
287   cloneshared->pEnd             = -1;
288   cloneshared->vStart           = -1;
289   cloneshared->vEnd             = -1;
290   cloneshared->eStart           = -1;
291   cloneshared->eEnd             = -1;
292   cloneshared->vltog            = NULL;
293   cloneshared->distributecalled = PETSC_FALSE;
294 
295   cloneshared->subnet     = NULL;
296   cloneshared->subnetvtx  = NULL;
297   cloneshared->subnetedge = NULL;
298   cloneshared->svtx       = NULL;
299   cloneshared->nsvtx      = 0;
300   cloneshared->Nsvtx      = 0;
301   cloneshared->svertices  = NULL;
302   cloneshared->sedgelist  = NULL;
303   cloneshared->svtable    = NULL;
304   PetscFunctionReturn(0);
305 }
306 
307 PetscErrorCode DMInitialize_Network(DM dm) {
308   PetscFunctionBegin;
309   PetscCall(DMSetDimension(dm, 1));
310   dm->ops->view                    = DMView_Network;
311   dm->ops->setfromoptions          = DMSetFromOptions_Network;
312   dm->ops->clone                   = DMClone_Network;
313   dm->ops->setup                   = DMSetUp_Network;
314   dm->ops->createglobalvector      = DMCreateGlobalVector_Network;
315   dm->ops->createlocalvector       = DMCreateLocalVector_Network;
316   dm->ops->getlocaltoglobalmapping = NULL;
317   dm->ops->createfieldis           = NULL;
318   dm->ops->createcoordinatedm      = NULL;
319   dm->ops->getcoloring             = NULL;
320   dm->ops->creatematrix            = DMCreateMatrix_Network;
321   dm->ops->createinterpolation     = NULL;
322   dm->ops->createinjection         = NULL;
323   dm->ops->refine                  = NULL;
324   dm->ops->coarsen                 = NULL;
325   dm->ops->refinehierarchy         = NULL;
326   dm->ops->coarsenhierarchy        = NULL;
327   dm->ops->globaltolocalbegin      = DMGlobalToLocalBegin_Network;
328   dm->ops->globaltolocalend        = DMGlobalToLocalEnd_Network;
329   dm->ops->localtoglobalbegin      = DMLocalToGlobalBegin_Network;
330   dm->ops->localtoglobalend        = DMLocalToGlobalEnd_Network;
331   dm->ops->destroy                 = DMDestroy_Network;
332   dm->ops->createsubdm             = NULL;
333   dm->ops->locatepoints            = NULL;
334   PetscFunctionReturn(0);
335 }
336 /*
337   copies over the subnetid and index portions of the DMNetworkComponentHeader from orignal dm to the newdm
338 */
339 static PetscErrorCode DMNetworkCopyHeaderTopological(DM dm, DM newdm) {
340   DM_Network *network = (DM_Network *)dm->data, *newnetwork = (DM_Network *)newdm->data;
341   PetscInt    p, i, np, index, subnetid;
342 
343   PetscFunctionBegin;
344   np = network->cloneshared->pEnd - network->cloneshared->pStart;
345   PetscCall(PetscCalloc2(np, &newnetwork->header, np, &newnetwork->cvalue));
346   for (i = 0; i < np; i++) {
347     p = i + network->cloneshared->pStart;
348     PetscCall(DMNetworkGetSubnetID(dm, p, &subnetid));
349     PetscCall(DMNetworkGetIndex(dm, p, &index));
350     newnetwork->header[i].index        = index;
351     newnetwork->header[i].subnetid     = subnetid;
352     newnetwork->header[i].size         = NULL;
353     newnetwork->header[i].key          = NULL;
354     newnetwork->header[i].offset       = NULL;
355     newnetwork->header[i].nvar         = NULL;
356     newnetwork->header[i].offsetvarrel = NULL;
357     newnetwork->header[i].ndata        = 0;
358     newnetwork->header[i].maxcomps     = DMNETWORK_MAX_COMP_AT_POINT_DEFAULT;
359     newnetwork->header[i].hsize        = sizeof(struct _p_DMNetworkComponentHeader) / sizeof(sizeof(DMNetworkComponentGenericDataType));
360   }
361   PetscFunctionReturn(0);
362 }
363 
364 PetscErrorCode DMClone_Network(DM dm, DM *newdm) {
365   DM_Network *network = (DM_Network *)dm->data, *newnetwork = NULL;
366 
367   PetscFunctionBegin;
368   network->cloneshared->refct++;
369   PetscCall(PetscNewLog(*newdm, &newnetwork));
370   (*newdm)->data = newnetwork;
371   PetscCall(DMNetworkInitializeToDefault_NonShared(*newdm));
372   newnetwork->cloneshared = network->cloneshared; /* Share all data that can be cloneshared */
373   PetscCall(DMClone(network->plex, &newnetwork->plex));
374   PetscCall(DMNetworkCopyHeaderTopological(dm, *newdm));
375   PetscCall(DMNetworkInitializeNonTopological(*newdm)); /* initialize all non-topological data to the state after DMNetworkLayoutSetUp as been called */
376   PetscCall(PetscObjectChangeTypeName((PetscObject)*newdm, DMNETWORK));
377   PetscCall(DMInitialize_Network(*newdm));
378   PetscFunctionReturn(0);
379 }
380 
381 /*MC
382   DMNETWORK = "network" - A DM object that encapsulates an unstructured network. The implementation is based on the DM object
383                           DMPlex that manages unstructured grids. Distributed networks use a non-overlapping partitioning of
384                           the edges. In the local representation, Vecs contain all unknowns in the interior and shared boundary.
385                           This is specified by a PetscSection object. Ownership in the global representation is determined by
386                           ownership of the underlying DMPlex points. This is specified by another PetscSection object.
387 
388   Level: intermediate
389 
390 .seealso: `DMType`, `DMNetworkCreate()`, `DMCreate()`, `DMSetType()`
391 M*/
392 
393 PETSC_EXTERN PetscErrorCode DMCreate_Network(DM dm) {
394   DM_Network *network;
395 
396   PetscFunctionBegin;
397   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
398   PetscCall(PetscNewLog(dm, &network));
399   PetscCall(PetscNewLog(dm, &network->cloneshared));
400   dm->data = network;
401 
402   PetscCall(DMNetworkInitializeToDefault(dm));
403   PetscCall(DMInitialize_Network(dm));
404   PetscFunctionReturn(0);
405 }
406 
407 /*@
408   DMNetworkCreate - Creates a DMNetwork object, which encapsulates an unstructured network.
409 
410   Collective
411 
412   Input Parameter:
413 . comm - The communicator for the DMNetwork object
414 
415   Output Parameter:
416 . network  - The DMNetwork object
417 
418   Level: beginner
419 
420 @*/
421 PetscErrorCode DMNetworkCreate(MPI_Comm comm, DM *network) {
422   PetscFunctionBegin;
423   PetscValidPointer(network, 2);
424   PetscCall(DMCreate(comm, network));
425   PetscCall(DMSetType(*network, DMNETWORK));
426   PetscFunctionReturn(0);
427 }
428