xref: /petsc/src/dm/impls/redundant/dmredundant.c (revision bcda9346efad4e5ba2d553af84eb238771ba1e25)
1 #include <petsc/private/dmimpl.h>
2 #include <petscdmredundant.h> /*I      "petscdmredundant.h" I*/
3 
4 typedef struct {
5   PetscMPIInt rank; /* owner */
6   PetscInt    N;    /* total number of dofs */
7   PetscInt    n;    /* owned number of dofs, n=N on owner, n=0 on non-owners */
8 } DM_Redundant;
9 
DMCreateMatrix_Redundant(DM dm,Mat * J)10 static PetscErrorCode DMCreateMatrix_Redundant(DM dm, Mat *J)
11 {
12   DM_Redundant          *red = (DM_Redundant *)dm->data;
13   ISLocalToGlobalMapping ltog;
14   PetscInt               i, rstart, rend, *cols;
15   PetscScalar           *vals;
16 
17   PetscFunctionBegin;
18   PetscCall(MatCreate(PetscObjectComm((PetscObject)dm), J));
19   PetscCall(MatSetSizes(*J, red->n, red->n, red->N, red->N));
20   PetscCall(MatSetType(*J, dm->mattype));
21   PetscCall(MatSeqAIJSetPreallocation(*J, red->n, NULL));
22   PetscCall(MatSeqBAIJSetPreallocation(*J, 1, red->n, NULL));
23   PetscCall(MatMPIAIJSetPreallocation(*J, red->n, NULL, red->N - red->n, NULL));
24   PetscCall(MatMPIBAIJSetPreallocation(*J, 1, red->n, NULL, red->N - red->n, NULL));
25 
26   PetscCall(DMGetLocalToGlobalMapping(dm, &ltog));
27   PetscCall(MatSetLocalToGlobalMapping(*J, ltog, ltog));
28   PetscCall(MatSetDM(*J, dm));
29 
30   PetscCall(PetscMalloc2(red->N, &cols, red->N, &vals));
31   for (i = 0; i < red->N; i++) {
32     cols[i] = i;
33     vals[i] = 0.0;
34   }
35   PetscCall(MatGetOwnershipRange(*J, &rstart, &rend));
36   for (i = rstart; i < rend; i++) PetscCall(MatSetValues(*J, 1, &i, red->N, cols, vals, INSERT_VALUES));
37   PetscCall(PetscFree2(cols, vals));
38   PetscCall(MatAssemblyBegin(*J, MAT_FINAL_ASSEMBLY));
39   PetscCall(MatAssemblyEnd(*J, MAT_FINAL_ASSEMBLY));
40   PetscFunctionReturn(PETSC_SUCCESS);
41 }
42 
DMDestroy_Redundant(DM dm)43 static PetscErrorCode DMDestroy_Redundant(DM dm)
44 {
45   PetscFunctionBegin;
46   PetscCall(PetscObjectComposeFunction((PetscObject)dm, "DMRedundantSetSize_C", NULL));
47   PetscCall(PetscObjectComposeFunction((PetscObject)dm, "DMRedundantGetSize_C", NULL));
48   PetscCall(PetscObjectComposeFunction((PetscObject)dm, "DMSetUpGLVisViewer_C", NULL));
49   /* This was originally freed in DMDestroy(), but that prevents reference counting of backend objects */
50   PetscCall(PetscFree(dm->data));
51   PetscFunctionReturn(PETSC_SUCCESS);
52 }
53 
DMCreateGlobalVector_Redundant(DM dm,Vec * gvec)54 static PetscErrorCode DMCreateGlobalVector_Redundant(DM dm, Vec *gvec)
55 {
56   DM_Redundant          *red = (DM_Redundant *)dm->data;
57   ISLocalToGlobalMapping ltog;
58 
59   PetscFunctionBegin;
60   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
61   PetscAssertPointer(gvec, 2);
62   *gvec = NULL;
63   PetscCall(VecCreate(PetscObjectComm((PetscObject)dm), gvec));
64   PetscCall(VecSetSizes(*gvec, red->n, red->N));
65   PetscCall(VecSetType(*gvec, dm->vectype));
66   PetscCall(DMGetLocalToGlobalMapping(dm, &ltog));
67   PetscCall(VecSetLocalToGlobalMapping(*gvec, ltog));
68   PetscCall(VecSetDM(*gvec, dm));
69   PetscFunctionReturn(PETSC_SUCCESS);
70 }
71 
DMCreateLocalVector_Redundant(DM dm,Vec * lvec)72 static PetscErrorCode DMCreateLocalVector_Redundant(DM dm, Vec *lvec)
73 {
74   DM_Redundant *red = (DM_Redundant *)dm->data;
75 
76   PetscFunctionBegin;
77   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
78   PetscAssertPointer(lvec, 2);
79   *lvec = NULL;
80   PetscCall(VecCreate(PETSC_COMM_SELF, lvec));
81   PetscCall(VecSetSizes(*lvec, red->N, red->N));
82   PetscCall(VecSetType(*lvec, dm->vectype));
83   PetscCall(VecSetDM(*lvec, dm));
84   PetscFunctionReturn(PETSC_SUCCESS);
85 }
86 
DMLocalToGlobalBegin_Redundant(DM dm,Vec l,InsertMode imode,Vec g)87 static PetscErrorCode DMLocalToGlobalBegin_Redundant(DM dm, Vec l, InsertMode imode, Vec g)
88 {
89   DM_Redundant      *red = (DM_Redundant *)dm->data;
90   const PetscScalar *lv;
91   PetscScalar       *gv;
92   PetscMPIInt        rank, iN;
93 
94   PetscFunctionBegin;
95   PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)dm), &rank));
96   PetscCall(VecGetArrayRead(l, &lv));
97   PetscCall(VecGetArray(g, &gv));
98   switch (imode) {
99   case ADD_VALUES:
100   case MAX_VALUES: {
101     void        *source;
102     PetscScalar *buffer;
103     PetscInt     i;
104     if (rank == red->rank) {
105       buffer = gv;
106       source = MPI_IN_PLACE;
107       if (imode == ADD_VALUES)
108         for (i = 0; i < red->N; i++) buffer[i] = gv[i] + lv[i];
109 #if !defined(PETSC_USE_COMPLEX)
110       if (imode == MAX_VALUES)
111         for (i = 0; i < red->N; i++) buffer[i] = PetscMax(gv[i], lv[i]);
112 #endif
113     } else source = (void *)lv;
114     PetscCall(PetscMPIIntCast(red->N, &iN));
115     PetscCallMPI(MPI_Reduce(source, gv, iN, MPIU_SCALAR, (imode == ADD_VALUES) ? MPIU_SUM : MPIU_MAX, red->rank, PetscObjectComm((PetscObject)dm)));
116   } break;
117   case INSERT_VALUES:
118     PetscCall(PetscArraycpy(gv, lv, red->n));
119     break;
120   default:
121     SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_SUP, "InsertMode not supported");
122   }
123   PetscCall(VecRestoreArrayRead(l, &lv));
124   PetscCall(VecRestoreArray(g, &gv));
125   PetscFunctionReturn(PETSC_SUCCESS);
126 }
127 
DMLocalToGlobalEnd_Redundant(DM dm,Vec l,InsertMode imode,Vec g)128 static PetscErrorCode DMLocalToGlobalEnd_Redundant(DM dm, Vec l, InsertMode imode, Vec g)
129 {
130   PetscFunctionBegin;
131   PetscFunctionReturn(PETSC_SUCCESS);
132 }
133 
DMGlobalToLocalBegin_Redundant(DM dm,Vec g,InsertMode imode,Vec l)134 static PetscErrorCode DMGlobalToLocalBegin_Redundant(DM dm, Vec g, InsertMode imode, Vec l)
135 {
136   DM_Redundant      *red = (DM_Redundant *)dm->data;
137   const PetscScalar *gv;
138   PetscScalar       *lv;
139   PetscMPIInt        iN;
140 
141   PetscFunctionBegin;
142   PetscCall(VecGetArrayRead(g, &gv));
143   PetscCall(VecGetArray(l, &lv));
144   switch (imode) {
145   case INSERT_VALUES:
146     if (red->n) PetscCall(PetscArraycpy(lv, gv, red->n));
147     PetscCall(PetscMPIIntCast(red->N, &iN));
148     PetscCallMPI(MPI_Bcast(lv, iN, MPIU_SCALAR, red->rank, PetscObjectComm((PetscObject)dm)));
149     break;
150   default:
151     SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_SUP, "InsertMode not supported");
152   }
153   PetscCall(VecRestoreArrayRead(g, &gv));
154   PetscCall(VecRestoreArray(l, &lv));
155   PetscFunctionReturn(PETSC_SUCCESS);
156 }
157 
DMGlobalToLocalEnd_Redundant(DM dm,Vec g,InsertMode imode,Vec l)158 static PetscErrorCode DMGlobalToLocalEnd_Redundant(DM dm, Vec g, InsertMode imode, Vec l)
159 {
160   PetscFunctionBegin;
161   PetscFunctionReturn(PETSC_SUCCESS);
162 }
163 
DMView_Redundant(DM dm,PetscViewer viewer)164 static PetscErrorCode DMView_Redundant(DM dm, PetscViewer viewer)
165 {
166   DM_Redundant *red = (DM_Redundant *)dm->data;
167   PetscBool     isascii;
168 
169   PetscFunctionBegin;
170   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &isascii));
171   if (isascii) PetscCall(PetscViewerASCIIPrintf(viewer, "redundant: rank=%d N=%" PetscInt_FMT "\n", red->rank, red->N));
172   PetscFunctionReturn(PETSC_SUCCESS);
173 }
174 
DMCreateColoring_Redundant(DM dm,ISColoringType ctype,ISColoring * coloring)175 static PetscErrorCode DMCreateColoring_Redundant(DM dm, ISColoringType ctype, ISColoring *coloring)
176 {
177   DM_Redundant    *red = (DM_Redundant *)dm->data;
178   PetscInt         i, nloc;
179   ISColoringValue *colors;
180 
181   PetscFunctionBegin;
182   switch (ctype) {
183   case IS_COLORING_GLOBAL:
184     nloc = red->n;
185     break;
186   case IS_COLORING_LOCAL:
187     nloc = red->N;
188     break;
189   default:
190     SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_WRONG, "Unknown ISColoringType %d", (int)ctype);
191   }
192   PetscCall(PetscMalloc1(nloc, &colors));
193   for (i = 0; i < nloc; i++) PetscCall(ISColoringValueCast(i, colors + i));
194   PetscCall(ISColoringCreate(PetscObjectComm((PetscObject)dm), red->N, nloc, colors, PETSC_OWN_POINTER, coloring));
195   PetscCall(ISColoringSetType(*coloring, ctype));
196   PetscFunctionReturn(PETSC_SUCCESS);
197 }
198 
DMRefine_Redundant(DM dmc,MPI_Comm comm,DM * dmf)199 static PetscErrorCode DMRefine_Redundant(DM dmc, MPI_Comm comm, DM *dmf)
200 {
201   PetscMPIInt   flag;
202   DM_Redundant *redc = (DM_Redundant *)dmc->data;
203 
204   PetscFunctionBegin;
205   if (comm == MPI_COMM_NULL) PetscCall(PetscObjectGetComm((PetscObject)dmc, &comm));
206   PetscCallMPI(MPI_Comm_compare(PetscObjectComm((PetscObject)dmc), comm, &flag));
207   PetscCheck(flag == MPI_CONGRUENT || flag == MPI_IDENT, PetscObjectComm((PetscObject)dmc), PETSC_ERR_SUP, "cannot change communicators");
208   PetscCall(DMRedundantCreate(comm, redc->rank, redc->N, dmf));
209   PetscFunctionReturn(PETSC_SUCCESS);
210 }
211 
DMCoarsen_Redundant(DM dmf,MPI_Comm comm,DM * dmc)212 static PetscErrorCode DMCoarsen_Redundant(DM dmf, MPI_Comm comm, DM *dmc)
213 {
214   PetscMPIInt   flag;
215   DM_Redundant *redf = (DM_Redundant *)dmf->data;
216 
217   PetscFunctionBegin;
218   if (comm == MPI_COMM_NULL) PetscCall(PetscObjectGetComm((PetscObject)dmf, &comm));
219   PetscCallMPI(MPI_Comm_compare(PetscObjectComm((PetscObject)dmf), comm, &flag));
220   PetscCheck(flag == MPI_CONGRUENT || flag == MPI_IDENT, PetscObjectComm((PetscObject)dmf), PETSC_ERR_SUP, "cannot change communicators");
221   PetscCall(DMRedundantCreate(comm, redf->rank, redf->N, dmc));
222   PetscFunctionReturn(PETSC_SUCCESS);
223 }
224 
DMCreateInterpolation_Redundant(DM dmc,DM dmf,Mat * P,Vec * scale)225 static PetscErrorCode DMCreateInterpolation_Redundant(DM dmc, DM dmf, Mat *P, Vec *scale)
226 {
227   DM_Redundant *redc = (DM_Redundant *)dmc->data;
228   DM_Redundant *redf = (DM_Redundant *)dmf->data;
229   PetscMPIInt   flag;
230   PetscInt      i, rstart, rend;
231 
232   PetscFunctionBegin;
233   PetscCallMPI(MPI_Comm_compare(PetscObjectComm((PetscObject)dmc), PetscObjectComm((PetscObject)dmf), &flag));
234   PetscCheck(flag == MPI_CONGRUENT || flag == MPI_IDENT, PetscObjectComm((PetscObject)dmf), PETSC_ERR_SUP, "cannot change communicators");
235   PetscCheck(redc->rank == redf->rank, PetscObjectComm((PetscObject)dmf), PETSC_ERR_ARG_INCOMP, "Owning rank does not match");
236   PetscCheck(redc->N == redf->N, PetscObjectComm((PetscObject)dmf), PETSC_ERR_ARG_INCOMP, "Global size does not match");
237   PetscCall(MatCreate(PetscObjectComm((PetscObject)dmc), P));
238   PetscCall(MatSetSizes(*P, redc->n, redc->n, redc->N, redc->N));
239   PetscCall(MatSetType(*P, MATAIJ));
240   PetscCall(MatSeqAIJSetPreallocation(*P, 1, NULL));
241   PetscCall(MatMPIAIJSetPreallocation(*P, 1, NULL, 0, NULL));
242   PetscCall(MatGetOwnershipRange(*P, &rstart, &rend));
243   for (i = rstart; i < rend; i++) PetscCall(MatSetValue(*P, i, i, 1.0, INSERT_VALUES));
244   PetscCall(MatAssemblyBegin(*P, MAT_FINAL_ASSEMBLY));
245   PetscCall(MatAssemblyEnd(*P, MAT_FINAL_ASSEMBLY));
246   if (scale) PetscCall(DMCreateInterpolationScale(dmc, dmf, *P, scale));
247   PetscFunctionReturn(PETSC_SUCCESS);
248 }
249 
250 /*@
251   DMRedundantSetSize - Sets the size of a densely coupled redundant object
252 
253   Collective
254 
255   Input Parameters:
256 + dm   - `DM` object of type `DMREDUNDANT`
257 . rank - rank of process to own the redundant degrees of freedom
258 - N    - total number of redundant degrees of freedom
259 
260   Level: advanced
261 
262 .seealso: `DM`, `DMREDUNDANT`, `DMDestroy()`, `DMCreateGlobalVector()`, `DMRedundantCreate()`, `DMRedundantGetSize()`
263 @*/
DMRedundantSetSize(DM dm,PetscMPIInt rank,PetscInt N)264 PetscErrorCode DMRedundantSetSize(DM dm, PetscMPIInt rank, PetscInt N)
265 {
266   PetscFunctionBegin;
267   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
268   PetscValidType(dm, 1);
269   PetscValidLogicalCollectiveMPIInt(dm, rank, 2);
270   PetscValidLogicalCollectiveInt(dm, N, 3);
271   PetscTryMethod(dm, "DMRedundantSetSize_C", (DM, PetscMPIInt, PetscInt), (dm, rank, N));
272   PetscFunctionReturn(PETSC_SUCCESS);
273 }
274 
275 /*@
276   DMRedundantGetSize - Gets the size of a densely coupled redundant object
277 
278   Not Collective
279 
280   Input Parameter:
281 . dm - `DM` object of type `DMREDUNDANT`
282 
283   Output Parameters:
284 + rank - rank of process to own the redundant degrees of freedom (or `NULL`)
285 - N    - total number of redundant degrees of freedom (or `NULL`)
286 
287   Level: advanced
288 
289 .seealso: `DM`, `DMREDUNDANT`, `DMDestroy()`, `DMCreateGlobalVector()`, `DMRedundantCreate()`, `DMRedundantSetSize()`
290 @*/
DMRedundantGetSize(DM dm,PetscMPIInt * rank,PetscInt * N)291 PetscErrorCode DMRedundantGetSize(DM dm, PetscMPIInt *rank, PetscInt *N)
292 {
293   PetscFunctionBegin;
294   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
295   PetscValidType(dm, 1);
296   PetscUseMethod(dm, "DMRedundantGetSize_C", (DM, PetscMPIInt *, PetscInt *), (dm, rank, N));
297   PetscFunctionReturn(PETSC_SUCCESS);
298 }
299 
DMRedundantSetSize_Redundant(DM dm,PetscMPIInt rank,PetscInt N)300 static PetscErrorCode DMRedundantSetSize_Redundant(DM dm, PetscMPIInt rank, PetscInt N)
301 {
302   DM_Redundant *red = (DM_Redundant *)dm->data;
303   PetscMPIInt   myrank;
304   PetscInt      i, *globals;
305 
306   PetscFunctionBegin;
307   PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)dm), &myrank));
308   red->rank = rank;
309   red->N    = N;
310   red->n    = (myrank == rank) ? N : 0;
311 
312   /* mapping is setup here */
313   PetscCall(PetscMalloc1(red->N, &globals));
314   for (i = 0; i < red->N; i++) globals[i] = i;
315   PetscCall(ISLocalToGlobalMappingDestroy(&dm->ltogmap));
316   PetscCall(ISLocalToGlobalMappingCreate(PetscObjectComm((PetscObject)dm), 1, red->N, globals, PETSC_OWN_POINTER, &dm->ltogmap));
317   PetscFunctionReturn(PETSC_SUCCESS);
318 }
319 
DMRedundantGetSize_Redundant(DM dm,PetscInt * rank,PetscInt * N)320 static PetscErrorCode DMRedundantGetSize_Redundant(DM dm, PetscInt *rank, PetscInt *N)
321 {
322   DM_Redundant *red = (DM_Redundant *)dm->data;
323 
324   PetscFunctionBegin;
325   if (rank) *rank = red->rank;
326   if (N) *N = red->N;
327   PetscFunctionReturn(PETSC_SUCCESS);
328 }
329 
DMSetUpGLVisViewer_Redundant(PetscObject odm,PetscViewer viewer)330 static PetscErrorCode DMSetUpGLVisViewer_Redundant(PetscObject odm, PetscViewer viewer)
331 {
332   PetscFunctionBegin;
333   PetscFunctionReturn(PETSC_SUCCESS);
334 }
335 
336 /*MC
337    DMREDUNDANT = "redundant" - A DM object that is used to manage data for a small set of dense globally coupled variables.
338          In the global representation of the vector the variables are all stored on a single MPI process (all the other MPI processes have
339          no variables) in the local representation all the variables are stored on ALL the MPI processes (because they are all needed for each
340          processes local computations).
341 
342          This DM is generally used inside a DMCOMPOSITE object. For example, it may be used to store continuation parameters for a bifurcation problem.
343 
344   Level: intermediate
345 
346 .seealso: `DMType`, `DMCOMPOSITE`, `DMCreate()`, `DMRedundantSetSize()`, `DMRedundantGetSize()`
347 M*/
348 
DMCreate_Redundant(DM dm)349 PETSC_EXTERN PetscErrorCode DMCreate_Redundant(DM dm)
350 {
351   DM_Redundant *red;
352 
353   PetscFunctionBegin;
354   PetscCall(PetscNew(&red));
355   dm->data = red;
356 
357   dm->ops->view                = DMView_Redundant;
358   dm->ops->createglobalvector  = DMCreateGlobalVector_Redundant;
359   dm->ops->createlocalvector   = DMCreateLocalVector_Redundant;
360   dm->ops->creatematrix        = DMCreateMatrix_Redundant;
361   dm->ops->destroy             = DMDestroy_Redundant;
362   dm->ops->globaltolocalbegin  = DMGlobalToLocalBegin_Redundant;
363   dm->ops->globaltolocalend    = DMGlobalToLocalEnd_Redundant;
364   dm->ops->localtoglobalbegin  = DMLocalToGlobalBegin_Redundant;
365   dm->ops->localtoglobalend    = DMLocalToGlobalEnd_Redundant;
366   dm->ops->refine              = DMRefine_Redundant;
367   dm->ops->coarsen             = DMCoarsen_Redundant;
368   dm->ops->createinterpolation = DMCreateInterpolation_Redundant;
369   dm->ops->getcoloring         = DMCreateColoring_Redundant;
370 
371   PetscCall(PetscObjectComposeFunction((PetscObject)dm, "DMRedundantSetSize_C", DMRedundantSetSize_Redundant));
372   PetscCall(PetscObjectComposeFunction((PetscObject)dm, "DMRedundantGetSize_C", DMRedundantGetSize_Redundant));
373   PetscCall(PetscObjectComposeFunction((PetscObject)dm, "DMSetUpGLVisViewer_C", DMSetUpGLVisViewer_Redundant));
374   PetscFunctionReturn(PETSC_SUCCESS);
375 }
376 
377 /*@
378   DMRedundantCreate - Creates a `DM` object, used to manage data for dense globally coupled variables
379 
380   Collective
381 
382   Input Parameters:
383 + comm - the processors that will share the global vector
384 . rank - the MPI rank to own the redundant values
385 - N    - total number of degrees of freedom
386 
387   Output Parameter:
388 . dm - the `DM` object of type `DMREDUNDANT`
389 
390   Level: advanced
391 
392 .seealso: `DM`, `DMREDUNDANT`, `DMDestroy()`, `DMCreateGlobalVector()`, `DMCreateMatrix()`, `DMCompositeAddDM()`, `DMSetType()`, `DMRedundantSetSize()`, `DMRedundantGetSize()`
393 @*/
DMRedundantCreate(MPI_Comm comm,PetscMPIInt rank,PetscInt N,DM * dm)394 PetscErrorCode DMRedundantCreate(MPI_Comm comm, PetscMPIInt rank, PetscInt N, DM *dm)
395 {
396   PetscFunctionBegin;
397   PetscAssertPointer(dm, 4);
398   PetscCall(DMCreate(comm, dm));
399   PetscCall(DMSetType(*dm, DMREDUNDANT));
400   PetscCall(DMRedundantSetSize(*dm, rank, N));
401   PetscCall(DMSetUp(*dm));
402   PetscFunctionReturn(PETSC_SUCCESS);
403 }
404