xref: /petsc/src/dm/impls/redundant/dmredundant.c (revision a4e35b1925eceef64945ea472b84f2bf06a67b5e)
1 #include <petsc/private/dmimpl.h>
2 #include <petscdmredundant.h> /*I      "petscdmredundant.h" I*/
3 
4 typedef struct {
5   PetscMPIInt rank; /* owner */
6   PetscInt    N;    /* total number of dofs */
7   PetscInt    n;    /* owned number of dofs, n=N on owner, n=0 on non-owners */
8 } DM_Redundant;
9 
10 static PetscErrorCode DMCreateMatrix_Redundant(DM dm, Mat *J)
11 {
12   DM_Redundant          *red = (DM_Redundant *)dm->data;
13   ISLocalToGlobalMapping ltog;
14   PetscInt               i, rstart, rend, *cols;
15   PetscScalar           *vals;
16 
17   PetscFunctionBegin;
18   PetscCall(MatCreate(PetscObjectComm((PetscObject)dm), J));
19   PetscCall(MatSetSizes(*J, red->n, red->n, red->N, red->N));
20   PetscCall(MatSetType(*J, dm->mattype));
21   PetscCall(MatSeqAIJSetPreallocation(*J, red->n, NULL));
22   PetscCall(MatSeqBAIJSetPreallocation(*J, 1, red->n, NULL));
23   PetscCall(MatMPIAIJSetPreallocation(*J, red->n, NULL, red->N - red->n, NULL));
24   PetscCall(MatMPIBAIJSetPreallocation(*J, 1, red->n, NULL, red->N - red->n, NULL));
25 
26   PetscCall(DMGetLocalToGlobalMapping(dm, &ltog));
27   PetscCall(MatSetLocalToGlobalMapping(*J, ltog, ltog));
28   PetscCall(MatSetDM(*J, dm));
29 
30   PetscCall(PetscMalloc2(red->N, &cols, red->N, &vals));
31   for (i = 0; i < red->N; i++) {
32     cols[i] = i;
33     vals[i] = 0.0;
34   }
35   PetscCall(MatGetOwnershipRange(*J, &rstart, &rend));
36   for (i = rstart; i < rend; i++) PetscCall(MatSetValues(*J, 1, &i, red->N, cols, vals, INSERT_VALUES));
37   PetscCall(PetscFree2(cols, vals));
38   PetscCall(MatAssemblyBegin(*J, MAT_FINAL_ASSEMBLY));
39   PetscCall(MatAssemblyEnd(*J, MAT_FINAL_ASSEMBLY));
40   PetscFunctionReturn(PETSC_SUCCESS);
41 }
42 
43 static PetscErrorCode DMDestroy_Redundant(DM dm)
44 {
45   PetscFunctionBegin;
46   PetscCall(PetscObjectComposeFunction((PetscObject)dm, "DMRedundantSetSize_C", NULL));
47   PetscCall(PetscObjectComposeFunction((PetscObject)dm, "DMRedundantGetSize_C", NULL));
48   PetscCall(PetscObjectComposeFunction((PetscObject)dm, "DMSetUpGLVisViewer_C", NULL));
49   /* This was originally freed in DMDestroy(), but that prevents reference counting of backend objects */
50   PetscCall(PetscFree(dm->data));
51   PetscFunctionReturn(PETSC_SUCCESS);
52 }
53 
54 static PetscErrorCode DMCreateGlobalVector_Redundant(DM dm, Vec *gvec)
55 {
56   DM_Redundant          *red = (DM_Redundant *)dm->data;
57   ISLocalToGlobalMapping ltog;
58 
59   PetscFunctionBegin;
60   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
61   PetscAssertPointer(gvec, 2);
62   *gvec = NULL;
63   PetscCall(VecCreate(PetscObjectComm((PetscObject)dm), gvec));
64   PetscCall(VecSetSizes(*gvec, red->n, red->N));
65   PetscCall(VecSetType(*gvec, dm->vectype));
66   PetscCall(DMGetLocalToGlobalMapping(dm, &ltog));
67   PetscCall(VecSetLocalToGlobalMapping(*gvec, ltog));
68   PetscCall(VecSetDM(*gvec, dm));
69   PetscFunctionReturn(PETSC_SUCCESS);
70 }
71 
72 static PetscErrorCode DMCreateLocalVector_Redundant(DM dm, Vec *lvec)
73 {
74   DM_Redundant *red = (DM_Redundant *)dm->data;
75 
76   PetscFunctionBegin;
77   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
78   PetscAssertPointer(lvec, 2);
79   *lvec = NULL;
80   PetscCall(VecCreate(PETSC_COMM_SELF, lvec));
81   PetscCall(VecSetSizes(*lvec, red->N, red->N));
82   PetscCall(VecSetType(*lvec, dm->vectype));
83   PetscCall(VecSetDM(*lvec, dm));
84   PetscFunctionReturn(PETSC_SUCCESS);
85 }
86 
87 static PetscErrorCode DMLocalToGlobalBegin_Redundant(DM dm, Vec l, InsertMode imode, Vec g)
88 {
89   DM_Redundant      *red = (DM_Redundant *)dm->data;
90   const PetscScalar *lv;
91   PetscScalar       *gv;
92   PetscMPIInt        rank;
93 
94   PetscFunctionBegin;
95   PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)dm), &rank));
96   PetscCall(VecGetArrayRead(l, &lv));
97   PetscCall(VecGetArray(g, &gv));
98   switch (imode) {
99   case ADD_VALUES:
100   case MAX_VALUES: {
101     void        *source;
102     PetscScalar *buffer;
103     PetscInt     i;
104     if (rank == red->rank) {
105       buffer = gv;
106       source = MPI_IN_PLACE;
107       if (imode == ADD_VALUES)
108         for (i = 0; i < red->N; i++) buffer[i] = gv[i] + lv[i];
109 #if !defined(PETSC_USE_COMPLEX)
110       if (imode == MAX_VALUES)
111         for (i = 0; i < red->N; i++) buffer[i] = PetscMax(gv[i], lv[i]);
112 #endif
113     } else source = (void *)lv;
114     PetscCallMPI(MPI_Reduce(source, gv, red->N, MPIU_SCALAR, (imode == ADD_VALUES) ? MPIU_SUM : MPIU_MAX, red->rank, PetscObjectComm((PetscObject)dm)));
115   } break;
116   case INSERT_VALUES:
117     PetscCall(PetscArraycpy(gv, lv, red->n));
118     break;
119   default:
120     SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_SUP, "InsertMode not supported");
121   }
122   PetscCall(VecRestoreArrayRead(l, &lv));
123   PetscCall(VecRestoreArray(g, &gv));
124   PetscFunctionReturn(PETSC_SUCCESS);
125 }
126 
127 static PetscErrorCode DMLocalToGlobalEnd_Redundant(DM dm, Vec l, InsertMode imode, Vec g)
128 {
129   PetscFunctionBegin;
130   PetscFunctionReturn(PETSC_SUCCESS);
131 }
132 
133 static PetscErrorCode DMGlobalToLocalBegin_Redundant(DM dm, Vec g, InsertMode imode, Vec l)
134 {
135   DM_Redundant      *red = (DM_Redundant *)dm->data;
136   const PetscScalar *gv;
137   PetscScalar       *lv;
138 
139   PetscFunctionBegin;
140   PetscCall(VecGetArrayRead(g, &gv));
141   PetscCall(VecGetArray(l, &lv));
142   switch (imode) {
143   case INSERT_VALUES:
144     if (red->n) PetscCall(PetscArraycpy(lv, gv, red->n));
145     PetscCallMPI(MPI_Bcast(lv, red->N, MPIU_SCALAR, red->rank, PetscObjectComm((PetscObject)dm)));
146     break;
147   default:
148     SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_SUP, "InsertMode not supported");
149   }
150   PetscCall(VecRestoreArrayRead(g, &gv));
151   PetscCall(VecRestoreArray(l, &lv));
152   PetscFunctionReturn(PETSC_SUCCESS);
153 }
154 
155 static PetscErrorCode DMGlobalToLocalEnd_Redundant(DM dm, Vec g, InsertMode imode, Vec l)
156 {
157   PetscFunctionBegin;
158   PetscFunctionReturn(PETSC_SUCCESS);
159 }
160 
161 static PetscErrorCode DMSetUp_Redundant(DM dm)
162 {
163   PetscFunctionBegin;
164   PetscFunctionReturn(PETSC_SUCCESS);
165 }
166 
167 static PetscErrorCode DMView_Redundant(DM dm, PetscViewer viewer)
168 {
169   DM_Redundant *red = (DM_Redundant *)dm->data;
170   PetscBool     iascii;
171 
172   PetscFunctionBegin;
173   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii));
174   if (iascii) PetscCall(PetscViewerASCIIPrintf(viewer, "redundant: rank=%d N=%" PetscInt_FMT "\n", red->rank, red->N));
175   PetscFunctionReturn(PETSC_SUCCESS);
176 }
177 
178 static PetscErrorCode DMCreateColoring_Redundant(DM dm, ISColoringType ctype, ISColoring *coloring)
179 {
180   DM_Redundant    *red = (DM_Redundant *)dm->data;
181   PetscInt         i, nloc;
182   ISColoringValue *colors;
183 
184   PetscFunctionBegin;
185   switch (ctype) {
186   case IS_COLORING_GLOBAL:
187     nloc = red->n;
188     break;
189   case IS_COLORING_LOCAL:
190     nloc = red->N;
191     break;
192   default:
193     SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_WRONG, "Unknown ISColoringType %d", (int)ctype);
194   }
195   PetscCall(PetscMalloc1(nloc, &colors));
196   for (i = 0; i < nloc; i++) colors[i] = i;
197   PetscCall(ISColoringCreate(PetscObjectComm((PetscObject)dm), red->N, nloc, colors, PETSC_OWN_POINTER, coloring));
198   PetscCall(ISColoringSetType(*coloring, ctype));
199   PetscFunctionReturn(PETSC_SUCCESS);
200 }
201 
202 static PetscErrorCode DMRefine_Redundant(DM dmc, MPI_Comm comm, DM *dmf)
203 {
204   PetscMPIInt   flag;
205   DM_Redundant *redc = (DM_Redundant *)dmc->data;
206 
207   PetscFunctionBegin;
208   if (comm == MPI_COMM_NULL) PetscCall(PetscObjectGetComm((PetscObject)dmc, &comm));
209   PetscCallMPI(MPI_Comm_compare(PetscObjectComm((PetscObject)dmc), comm, &flag));
210   PetscCheck(flag == MPI_CONGRUENT || flag == MPI_IDENT, PetscObjectComm((PetscObject)dmc), PETSC_ERR_SUP, "cannot change communicators");
211   PetscCall(DMRedundantCreate(comm, redc->rank, redc->N, dmf));
212   PetscFunctionReturn(PETSC_SUCCESS);
213 }
214 
215 static PetscErrorCode DMCoarsen_Redundant(DM dmf, MPI_Comm comm, DM *dmc)
216 {
217   PetscMPIInt   flag;
218   DM_Redundant *redf = (DM_Redundant *)dmf->data;
219 
220   PetscFunctionBegin;
221   if (comm == MPI_COMM_NULL) PetscCall(PetscObjectGetComm((PetscObject)dmf, &comm));
222   PetscCallMPI(MPI_Comm_compare(PetscObjectComm((PetscObject)dmf), comm, &flag));
223   PetscCheck(flag == MPI_CONGRUENT || flag == MPI_IDENT, PetscObjectComm((PetscObject)dmf), PETSC_ERR_SUP, "cannot change communicators");
224   PetscCall(DMRedundantCreate(comm, redf->rank, redf->N, dmc));
225   PetscFunctionReturn(PETSC_SUCCESS);
226 }
227 
228 static PetscErrorCode DMCreateInterpolation_Redundant(DM dmc, DM dmf, Mat *P, Vec *scale)
229 {
230   DM_Redundant *redc = (DM_Redundant *)dmc->data;
231   DM_Redundant *redf = (DM_Redundant *)dmf->data;
232   PetscMPIInt   flag;
233   PetscInt      i, rstart, rend;
234 
235   PetscFunctionBegin;
236   PetscCallMPI(MPI_Comm_compare(PetscObjectComm((PetscObject)dmc), PetscObjectComm((PetscObject)dmf), &flag));
237   PetscCheck(flag == MPI_CONGRUENT || flag == MPI_IDENT, PetscObjectComm((PetscObject)dmf), PETSC_ERR_SUP, "cannot change communicators");
238   PetscCheck(redc->rank == redf->rank, PetscObjectComm((PetscObject)dmf), PETSC_ERR_ARG_INCOMP, "Owning rank does not match");
239   PetscCheck(redc->N == redf->N, PetscObjectComm((PetscObject)dmf), PETSC_ERR_ARG_INCOMP, "Global size does not match");
240   PetscCall(MatCreate(PetscObjectComm((PetscObject)dmc), P));
241   PetscCall(MatSetSizes(*P, redc->n, redc->n, redc->N, redc->N));
242   PetscCall(MatSetType(*P, MATAIJ));
243   PetscCall(MatSeqAIJSetPreallocation(*P, 1, NULL));
244   PetscCall(MatMPIAIJSetPreallocation(*P, 1, NULL, 0, NULL));
245   PetscCall(MatGetOwnershipRange(*P, &rstart, &rend));
246   for (i = rstart; i < rend; i++) PetscCall(MatSetValue(*P, i, i, 1.0, INSERT_VALUES));
247   PetscCall(MatAssemblyBegin(*P, MAT_FINAL_ASSEMBLY));
248   PetscCall(MatAssemblyEnd(*P, MAT_FINAL_ASSEMBLY));
249   if (scale) PetscCall(DMCreateInterpolationScale(dmc, dmf, *P, scale));
250   PetscFunctionReturn(PETSC_SUCCESS);
251 }
252 
253 /*@
254   DMRedundantSetSize - Sets the size of a densely coupled redundant object
255 
256   Collective
257 
258   Input Parameters:
259 + dm   - `DM` object of type `DMREDUNDANT`
260 . rank - rank of process to own the redundant degrees of freedom
261 - N    - total number of redundant degrees of freedom
262 
263   Level: advanced
264 
265 .seealso: `DM`, `DMREDUNDANT`, `DMDestroy()`, `DMCreateGlobalVector()`, `DMRedundantCreate()`, `DMRedundantGetSize()`
266 @*/
267 PetscErrorCode DMRedundantSetSize(DM dm, PetscMPIInt rank, PetscInt N)
268 {
269   PetscFunctionBegin;
270   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
271   PetscValidType(dm, 1);
272   PetscValidLogicalCollectiveMPIInt(dm, rank, 2);
273   PetscValidLogicalCollectiveInt(dm, N, 3);
274   PetscTryMethod(dm, "DMRedundantSetSize_C", (DM, PetscMPIInt, PetscInt), (dm, rank, N));
275   PetscFunctionReturn(PETSC_SUCCESS);
276 }
277 
278 /*@
279   DMRedundantGetSize - Gets the size of a densely coupled redundant object
280 
281   Not Collective
282 
283   Input Parameter:
284 . dm - `DM` object of type `DMREDUNDANT`
285 
286   Output Parameters:
287 + rank - rank of process to own the redundant degrees of freedom (or `NULL`)
288 - N    - total number of redundant degrees of freedom (or `NULL`)
289 
290   Level: advanced
291 
292 .seealso: `DM`, `DMREDUNDANT`, `DMDestroy()`, `DMCreateGlobalVector()`, `DMRedundantCreate()`, `DMRedundantSetSize()`
293 @*/
294 PetscErrorCode DMRedundantGetSize(DM dm, PetscMPIInt *rank, PetscInt *N)
295 {
296   PetscFunctionBegin;
297   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
298   PetscValidType(dm, 1);
299   PetscUseMethod(dm, "DMRedundantGetSize_C", (DM, PetscMPIInt *, PetscInt *), (dm, rank, N));
300   PetscFunctionReturn(PETSC_SUCCESS);
301 }
302 
303 static PetscErrorCode DMRedundantSetSize_Redundant(DM dm, PetscMPIInt rank, PetscInt N)
304 {
305   DM_Redundant *red = (DM_Redundant *)dm->data;
306   PetscMPIInt   myrank;
307   PetscInt      i, *globals;
308 
309   PetscFunctionBegin;
310   PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)dm), &myrank));
311   red->rank = rank;
312   red->N    = N;
313   red->n    = (myrank == rank) ? N : 0;
314 
315   /* mapping is setup here */
316   PetscCall(PetscMalloc1(red->N, &globals));
317   for (i = 0; i < red->N; i++) globals[i] = i;
318   PetscCall(ISLocalToGlobalMappingDestroy(&dm->ltogmap));
319   PetscCall(ISLocalToGlobalMappingCreate(PetscObjectComm((PetscObject)dm), 1, red->N, globals, PETSC_OWN_POINTER, &dm->ltogmap));
320   PetscFunctionReturn(PETSC_SUCCESS);
321 }
322 
323 static PetscErrorCode DMRedundantGetSize_Redundant(DM dm, PetscInt *rank, PetscInt *N)
324 {
325   DM_Redundant *red = (DM_Redundant *)dm->data;
326 
327   PetscFunctionBegin;
328   if (rank) *rank = red->rank;
329   if (N) *N = red->N;
330   PetscFunctionReturn(PETSC_SUCCESS);
331 }
332 
333 static PetscErrorCode DMSetUpGLVisViewer_Redundant(PetscObject odm, PetscViewer viewer)
334 {
335   PetscFunctionBegin;
336   PetscFunctionReturn(PETSC_SUCCESS);
337 }
338 
339 /*MC
340    DMREDUNDANT = "redundant" - A DM object that is used to manage data for a small set of dense globally coupled variables.
341          In the global representation of the vector the variables are all stored on a single MPI process (all the other MPI processes have
342          no variables) in the local representation all the variables are stored on ALL the MPI processes (because they are all needed for each
343          processes local computations).
344 
345          This DM is generally used inside a DMCOMPOSITE object. For example, it may be used to store continuation parameters for a bifurcation problem.
346 
347   Level: intermediate
348 
349 .seealso: `DMType`, `DMCOMPOSITE`, `DMCreate()`, `DMRedundantSetSize()`, `DMRedundantGetSize()`
350 M*/
351 
352 PETSC_EXTERN PetscErrorCode DMCreate_Redundant(DM dm)
353 {
354   DM_Redundant *red;
355 
356   PetscFunctionBegin;
357   PetscCall(PetscNew(&red));
358   dm->data = red;
359 
360   dm->ops->setup               = DMSetUp_Redundant;
361   dm->ops->view                = DMView_Redundant;
362   dm->ops->createglobalvector  = DMCreateGlobalVector_Redundant;
363   dm->ops->createlocalvector   = DMCreateLocalVector_Redundant;
364   dm->ops->creatematrix        = DMCreateMatrix_Redundant;
365   dm->ops->destroy             = DMDestroy_Redundant;
366   dm->ops->globaltolocalbegin  = DMGlobalToLocalBegin_Redundant;
367   dm->ops->globaltolocalend    = DMGlobalToLocalEnd_Redundant;
368   dm->ops->localtoglobalbegin  = DMLocalToGlobalBegin_Redundant;
369   dm->ops->localtoglobalend    = DMLocalToGlobalEnd_Redundant;
370   dm->ops->refine              = DMRefine_Redundant;
371   dm->ops->coarsen             = DMCoarsen_Redundant;
372   dm->ops->createinterpolation = DMCreateInterpolation_Redundant;
373   dm->ops->getcoloring         = DMCreateColoring_Redundant;
374 
375   PetscCall(PetscObjectComposeFunction((PetscObject)dm, "DMRedundantSetSize_C", DMRedundantSetSize_Redundant));
376   PetscCall(PetscObjectComposeFunction((PetscObject)dm, "DMRedundantGetSize_C", DMRedundantGetSize_Redundant));
377   PetscCall(PetscObjectComposeFunction((PetscObject)dm, "DMSetUpGLVisViewer_C", DMSetUpGLVisViewer_Redundant));
378   PetscFunctionReturn(PETSC_SUCCESS);
379 }
380 
381 /*@C
382   DMRedundantCreate - Creates a `DM` object, used to manage data for dense globally coupled variables
383 
384   Collective
385 
386   Input Parameters:
387 + comm - the processors that will share the global vector
388 . rank - the MPI rank to own the redundant values
389 - N    - total number of degrees of freedom
390 
391   Output Parameter:
392 . dm - the `DM` object of type `DMREDUNDANT`
393 
394   Level: advanced
395 
396 .seealso: `DM`, `DMREDUNDANT`, `DMDestroy()`, `DMCreateGlobalVector()`, `DMCreateMatrix()`, `DMCompositeAddDM()`, `DMSetType()`, `DMRedundantSetSize()`, `DMRedundantGetSize()`
397 @*/
398 PetscErrorCode DMRedundantCreate(MPI_Comm comm, PetscMPIInt rank, PetscInt N, DM *dm)
399 {
400   PetscFunctionBegin;
401   PetscAssertPointer(dm, 4);
402   PetscCall(DMCreate(comm, dm));
403   PetscCall(DMSetType(*dm, DMREDUNDANT));
404   PetscCall(DMRedundantSetSize(*dm, rank, N));
405   PetscCall(DMSetUp(*dm));
406   PetscFunctionReturn(PETSC_SUCCESS);
407 }
408