1 static char help[] = "This example tests subnetwork coupling. \n\
2 \n\n";
3
4 #include <petscdmnetwork.h>
5
6 typedef struct {
7 PetscInt id;
8 } Comp0;
9
10 typedef struct {
11 PetscScalar val;
12 } Comp1;
13
main(int argc,char ** argv)14 int main(int argc, char **argv)
15 {
16 PetscMPIInt size, rank;
17 DM dmnetwork;
18 PetscInt i, j, net, Nsubnet, nsubnet, ne, nv, nvar, v, ncomp, compkey0, compkey1, compkey, goffset, row;
19 PetscInt numEdges[10], *edgelist[10], asvtx, bsvtx;
20 const PetscInt *vtx, *edges;
21 PetscBool sharedv, ghost, distribute = PETSC_TRUE, test = PETSC_FALSE;
22 Vec X;
23 Comp0 comp0;
24 Comp1 comp1;
25 PetscScalar val;
26
27 PetscFunctionBeginUser;
28 PetscCall(PetscInitialize(&argc, &argv, NULL, help));
29 PetscCallMPI(MPI_Comm_rank(PETSC_COMM_WORLD, &rank));
30 PetscCallMPI(MPI_Comm_size(PETSC_COMM_WORLD, &size));
31
32 /* Create a network of subnetworks */
33 nsubnet = 1;
34 if (size == 1) nsubnet = 2;
35
36 /* Create a dmnetwork and register components */
37 PetscCall(DMNetworkCreate(PETSC_COMM_WORLD, &dmnetwork));
38 PetscCall(DMNetworkRegisterComponent(dmnetwork, "comp0", sizeof(Comp0), &compkey0));
39 PetscCall(DMNetworkRegisterComponent(dmnetwork, "comp1", sizeof(Comp1), &compkey1));
40
41 /* Set componnet values - intentionally take rank-dependent value for test */
42 comp0.id = rank;
43 comp1.val = 10.0 * rank;
44
45 /* Set number of subnetworks, numbers of vertices and edges over each subnetwork */
46 PetscCall(DMNetworkSetNumSubNetworks(dmnetwork, nsubnet, PETSC_DECIDE));
47 PetscCall(DMNetworkGetNumSubNetworks(dmnetwork, NULL, &Nsubnet));
48
49 /* Input subnetworks; when size>1, process[i] creates subnetwork[i] */
50 for (i = 0; i < Nsubnet; i++) numEdges[i] = 0;
51 for (i = 0; i < Nsubnet; i++) {
52 if (i == 0 && (size == 1 || (rank == i && size > 1))) {
53 numEdges[i] = 3;
54 PetscCall(PetscMalloc1(2 * numEdges[i], &edgelist[i]));
55 edgelist[i][0] = 0;
56 edgelist[i][1] = 2;
57 edgelist[i][2] = 2;
58 edgelist[i][3] = 1;
59 edgelist[i][4] = 1;
60 edgelist[i][5] = 3;
61
62 } else if (i == 1 && (size == 1 || (rank == i && size > 1))) {
63 numEdges[i] = 3;
64 PetscCall(PetscMalloc1(2 * numEdges[i], &edgelist[i]));
65 edgelist[i][0] = 0;
66 edgelist[i][1] = 3;
67 edgelist[i][2] = 3;
68 edgelist[i][3] = 2;
69 edgelist[i][4] = 2;
70 edgelist[i][5] = 1;
71
72 } else if (i > 1 && (size == 1 || (rank == i && size > 1))) {
73 numEdges[i] = 3;
74 PetscCall(PetscMalloc1(2 * numEdges[i], &edgelist[i]));
75 for (j = 0; j < numEdges[i]; j++) {
76 edgelist[i][2 * j] = j;
77 edgelist[i][2 * j + 1] = j + 1;
78 }
79 }
80 }
81
82 /* Add subnetworks */
83 for (i = 0; i < Nsubnet; i++) {
84 PetscInt netNum = -1;
85 PetscCall(DMNetworkAddSubnetwork(dmnetwork, NULL, numEdges[i], edgelist[i], &netNum));
86 }
87
88 /* Add shared vertices -- all processes hold this info at current implementation */
89 asvtx = bsvtx = 0;
90 for (j = 1; j < Nsubnet; j++) {
91 /* vertex subnet[0].0 shares with vertex subnet[j].0 */
92 PetscCall(DMNetworkAddSharedVertices(dmnetwork, 0, j, 1, &asvtx, &bsvtx));
93 }
94
95 /* Setup the network layout */
96 PetscCall(DMNetworkLayoutSetUp(dmnetwork));
97
98 /* Get Subnetwork(); Add nvar=1 to subnet[0] and nvar=2 to other subnets */
99 for (net = 0; net < Nsubnet; net++) {
100 PetscCall(DMNetworkGetSubnetwork(dmnetwork, net, &nv, &ne, &vtx, &edges));
101 for (v = 0; v < nv; v++) {
102 PetscCall(DMNetworkIsSharedVertex(dmnetwork, vtx[v], &sharedv));
103 if (sharedv) continue;
104
105 if (!net) {
106 PetscCall(DMNetworkAddComponent(dmnetwork, vtx[v], compkey0, &comp0, 1));
107 } else {
108 PetscCall(DMNetworkAddComponent(dmnetwork, vtx[v], compkey1, &comp1, 2));
109 }
110 }
111 }
112
113 /* Add components and nvar to shared vertex -- owning and all ghost ranks must call DMNetworkAddComponent() */
114 PetscCall(DMNetworkGetSharedVertices(dmnetwork, &nv, &vtx));
115 for (v = 0; v < nv; v++) {
116 PetscCall(DMNetworkAddComponent(dmnetwork, vtx[v], compkey0, &comp0, 1));
117 PetscCall(DMNetworkAddComponent(dmnetwork, vtx[v], compkey1, &comp1, 2));
118 }
119
120 /* Enable runtime option of graph partition type -- must be called before DMSetUp() */
121 if (size > 1) {
122 DM plexdm;
123 PetscPartitioner part;
124 PetscCall(DMNetworkGetPlex(dmnetwork, &plexdm));
125 PetscCall(DMPlexGetPartitioner(plexdm, &part));
126 PetscCall(PetscPartitionerSetType(part, PETSCPARTITIONERSIMPLE));
127 PetscCall(PetscOptionsSetValue(NULL, "-dm_plex_csr_alg", "mat")); /* for parmetis */
128 }
129
130 /* Setup dmnetwork */
131 PetscCall(DMSetUp(dmnetwork));
132
133 /* Redistribute the network layout; use '-distribute false' to skip */
134 PetscCall(PetscOptionsGetBool(NULL, NULL, "-distribute", &distribute, NULL));
135 if (distribute) {
136 PetscCall(DMNetworkDistribute(&dmnetwork, 0));
137 PetscCall(DMView(dmnetwork, PETSC_VIEWER_STDOUT_WORLD));
138 }
139
140 /* Create a global vector */
141 PetscCall(DMCreateGlobalVector(dmnetwork, &X));
142 PetscCall(VecSet(X, 0.0));
143
144 /* Set X values at shared vertex */
145 PetscCall(DMNetworkGetSharedVertices(dmnetwork, &nv, &vtx));
146 for (v = 0; v < nv; v++) {
147 PetscCall(DMNetworkIsGhostVertex(dmnetwork, vtx[v], &ghost));
148 if (ghost) continue;
149
150 /* only one process holds a non-ghost vertex */
151 PetscCall(DMNetworkGetComponent(dmnetwork, vtx[v], ALL_COMPONENTS, NULL, NULL, &nvar));
152 PetscCall(DMNetworkGetNumComponents(dmnetwork, vtx[v], &ncomp));
153 /* PetscCall(PetscPrintf(PETSC_COMM_SELF,"[%d] shared v %" PetscInt_FMT ": nvar %" PetscInt_FMT ", ncomp %" PetscInt_FMT "\n",rank,vtx[v],nvar,ncomp)); */
154 for (j = 0; j < ncomp; j++) {
155 PetscCall(DMNetworkGetComponent(dmnetwork, vtx[v], j, &compkey, NULL, &nvar));
156 PetscCall(DMNetworkGetGlobalVecOffset(dmnetwork, vtx[v], j, &goffset));
157 for (i = 0; i < nvar; i++) {
158 row = goffset + i;
159 val = compkey + 1.0;
160 PetscCall(VecSetValues(X, 1, &row, &val, INSERT_VALUES));
161 }
162 }
163 }
164 PetscCall(VecAssemblyBegin(X));
165 PetscCall(VecAssemblyEnd(X));
166 PetscCall(VecView(X, PETSC_VIEWER_STDOUT_WORLD));
167
168 /* Test DMNetworkGetSubnetwork() */
169 PetscCall(PetscOptionsGetBool(NULL, NULL, "-test_getsubnet", &test, NULL));
170 if (test) {
171 net = 0;
172 PetscCall(PetscOptionsGetInt(NULL, NULL, "-subnet", &net, NULL));
173 PetscCall(DMNetworkGetSubnetwork(dmnetwork, net, &nv, &ne, &vtx, &edges));
174 PetscCall(PetscSynchronizedPrintf(PETSC_COMM_WORLD, "[%d] subnet %" PetscInt_FMT ": nv %" PetscInt_FMT ", ne %" PetscInt_FMT "\n", rank, net, nv, ne));
175 PetscCall(PetscSynchronizedFlush(PETSC_COMM_WORLD, PETSC_STDOUT));
176 PetscCallMPI(MPI_Barrier(PETSC_COMM_WORLD));
177
178 for (i = 0; i < nv; i++) {
179 PetscCall(DMNetworkIsGhostVertex(dmnetwork, vtx[i], &ghost));
180 PetscCall(DMNetworkIsSharedVertex(dmnetwork, vtx[i], &sharedv));
181
182 PetscCall(DMNetworkGetNumComponents(dmnetwork, vtx[i], &ncomp));
183 if (sharedv || ghost) PetscCall(PetscPrintf(PETSC_COMM_SELF, " [%d] v %" PetscInt_FMT " is shared %d, is ghost %d, ncomp %" PetscInt_FMT "\n", rank, vtx[i], sharedv, ghost, ncomp));
184
185 for (j = 0; j < ncomp; j++) {
186 void *component;
187 PetscCall(DMNetworkGetComponent(dmnetwork, vtx[i], j, &compkey, (void **)&component, NULL));
188 if (compkey == 0) {
189 PetscCall(PetscPrintf(PETSC_COMM_SELF, " [%d] v %" PetscInt_FMT " compkey %" PetscInt_FMT ", mycomp0->id %" PetscInt_FMT "\n", rank, vtx[i], compkey, ((Comp0 *)component)->id));
190 } else if (compkey == 1) {
191 PetscCall(PetscPrintf(PETSC_COMM_SELF, " [%d] v %" PetscInt_FMT " compkey %" PetscInt_FMT ", mycomp1->val %g\n", rank, vtx[i], compkey, (double)PetscRealPart(((Comp1 *)component)->val)));
192 }
193 }
194 }
195 }
196
197 /* Free work space */
198 PetscCall(VecDestroy(&X));
199 for (i = 0; i < Nsubnet; i++) {
200 if (size == 1 || rank == i) PetscCall(PetscFree(edgelist[i]));
201 }
202
203 PetscCall(DMDestroy(&dmnetwork));
204 PetscCall(PetscFinalize());
205 return 0;
206 }
207
208 /*TEST
209
210 build:
211 requires: !single double defined(PETSC_HAVE_ATTRIBUTEALIGNED)
212
213 test:
214 args:
215
216 test:
217 suffix: 2
218 nsize: 2
219 args: -options_left no
220
221 test:
222 suffix: 3
223 nsize: 4
224 args: -options_left no
225
226 TEST*/
227