xref: /petsc/src/dm/impls/sliced/sliced.c (revision 5753113e6c2e3d3881d4bf9bd87b746e7987b172)
1 #include <petscdmsliced.h> /*I      "petscdmsliced.h" I*/
2 #include <petscmat.h>
3 #include <petsc/private/dmimpl.h>
4 
5 /* CSR storage of the nonzero structure of a bs*bs matrix */
6 typedef struct {
7   PetscInt bs, nz, *i, *j;
8 } DMSlicedBlockFills;
9 
10 typedef struct {
11   PetscInt            bs, n, N, Nghosts, *ghosts;
12   PetscInt            d_nz, o_nz, *d_nnz, *o_nnz;
13   DMSlicedBlockFills *dfill, *ofill;
14 } DM_Sliced;
15 
DMCreateMatrix_Sliced(DM dm,Mat * J)16 static PetscErrorCode DMCreateMatrix_Sliced(DM dm, Mat *J)
17 {
18   PetscInt              *globals, *sd_nnz, *so_nnz, rstart, bs, i;
19   ISLocalToGlobalMapping lmap;
20   PetscBool              aij   = PETSC_FALSE;
21   DM_Sliced             *slice = (DM_Sliced *)dm->data;
22 
23   PetscFunctionBegin;
24   bs = slice->bs;
25   PetscCall(MatCreate(PetscObjectComm((PetscObject)dm), J));
26   PetscCall(MatSetSizes(*J, slice->n * bs, slice->n * bs, PETSC_DETERMINE, PETSC_DETERMINE));
27   PetscCall(MatSetBlockSize(*J, bs));
28   PetscCall(MatSetType(*J, dm->mattype));
29   PetscCall(MatSeqBAIJSetPreallocation(*J, bs, slice->d_nz, slice->d_nnz));
30   PetscCall(MatMPIBAIJSetPreallocation(*J, bs, slice->d_nz, slice->d_nnz, slice->o_nz, slice->o_nnz));
31   /* In general, we have to do extra work to preallocate for scalar (AIJ) matrices so we check whether it will do any
32   * good before going on with it. */
33   PetscCall(PetscObjectHasFunction((PetscObject)*J, "MatMPIAIJSetPreallocation_C", &aij));
34   if (!aij) PetscCall(PetscObjectHasFunction((PetscObject)*J, "MatSeqAIJSetPreallocation_C", &aij));
35   if (aij) {
36     if (bs == 1) {
37       PetscCall(MatSeqAIJSetPreallocation(*J, slice->d_nz, slice->d_nnz));
38       PetscCall(MatMPIAIJSetPreallocation(*J, slice->d_nz, slice->d_nnz, slice->o_nz, slice->o_nnz));
39     } else if (!slice->d_nnz) {
40       PetscCall(MatSeqAIJSetPreallocation(*J, slice->d_nz * bs, NULL));
41       PetscCall(MatMPIAIJSetPreallocation(*J, slice->d_nz * bs, NULL, slice->o_nz * bs, NULL));
42     } else {
43       /* The user has provided preallocation per block-row, convert it to per scalar-row respecting DMSlicedSetBlockFills() if applicable */
44       PetscCall(PetscMalloc2(slice->n * bs, &sd_nnz, (!!slice->o_nnz) * slice->n * bs, &so_nnz));
45       for (i = 0; i < slice->n * bs; i++) {
46         sd_nnz[i] = (slice->d_nnz[i / bs] - 1) * (slice->ofill ? slice->ofill->i[i % bs + 1] - slice->ofill->i[i % bs] : bs) + (slice->dfill ? slice->dfill->i[i % bs + 1] - slice->dfill->i[i % bs] : bs);
47         if (so_nnz) so_nnz[i] = slice->o_nnz[i / bs] * (slice->ofill ? slice->ofill->i[i % bs + 1] - slice->ofill->i[i % bs] : bs);
48       }
49       PetscCall(MatSeqAIJSetPreallocation(*J, slice->d_nz * bs, sd_nnz));
50       PetscCall(MatMPIAIJSetPreallocation(*J, slice->d_nz * bs, sd_nnz, slice->o_nz * bs, so_nnz));
51       PetscCall(PetscFree2(sd_nnz, so_nnz));
52     }
53   }
54 
55   /* Set up the local to global map.  For the scalar map, we have to translate to entry-wise indexing instead of block-wise. */
56   PetscCall(PetscMalloc1(slice->n + slice->Nghosts, &globals));
57   PetscCall(MatGetOwnershipRange(*J, &rstart, NULL));
58   for (i = 0; i < slice->n; i++) globals[i] = rstart / bs + i;
59 
60   for (i = 0; i < slice->Nghosts; i++) globals[slice->n + i] = slice->ghosts[i];
61   PetscCall(ISLocalToGlobalMappingCreate(PETSC_COMM_SELF, bs, slice->n + slice->Nghosts, globals, PETSC_OWN_POINTER, &lmap));
62   PetscCall(MatSetLocalToGlobalMapping(*J, lmap, lmap));
63   PetscCall(ISLocalToGlobalMappingDestroy(&lmap));
64   PetscCall(MatSetDM(*J, dm));
65   PetscFunctionReturn(PETSC_SUCCESS);
66 }
67 
68 /*@
69   DMSlicedSetGhosts - Sets the global indices of other processes elements that will
70   be ghosts on this process
71 
72   Not Collective
73 
74   Input Parameters:
75 + dm      - the `DMSLICED` object
76 . bs      - block size
77 . nlocal  - number of local (owned, non-ghost) blocks
78 . Nghosts - number of ghost blocks on this process
79 - ghosts  - global indices of each ghost block
80 
81   Level: advanced
82 
83 .seealso: `DM`, `DMSLICED`, `DMDestroy()`, `DMCreateGlobalVector()`
84 @*/
DMSlicedSetGhosts(DM dm,PetscInt bs,PetscInt nlocal,PetscInt Nghosts,const PetscInt ghosts[])85 PetscErrorCode DMSlicedSetGhosts(DM dm, PetscInt bs, PetscInt nlocal, PetscInt Nghosts, const PetscInt ghosts[])
86 {
87   DM_Sliced *slice = (DM_Sliced *)dm->data;
88 
89   PetscFunctionBegin;
90   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
91   PetscCall(PetscFree(slice->ghosts));
92   PetscCall(PetscMalloc1(Nghosts, &slice->ghosts));
93   PetscCall(PetscArraycpy(slice->ghosts, ghosts, Nghosts));
94   slice->bs      = bs;
95   slice->n       = nlocal;
96   slice->Nghosts = Nghosts;
97   PetscFunctionReturn(PETSC_SUCCESS);
98 }
99 
100 /*@
101   DMSlicedSetPreallocation - sets the matrix memory preallocation for matrices computed by `DMSLICED`
102 
103   Not Collective
104 
105   Input Parameters:
106 + dm    - the `DM` object
107 . d_nz  - number of block nonzeros per block row in diagonal portion of local
108           submatrix  (same for all local rows)
109 . d_nnz - array containing the number of block nonzeros in the various block rows
110           of the in diagonal portion of the local (possibly different for each block
111           row) or `NULL`.
112 . o_nz  - number of block nonzeros per block row in the off-diagonal portion of local
113           submatrix (same for all local rows).
114 - o_nnz - array containing the number of nonzeros in the various block rows of the
115           off-diagonal portion of the local submatrix (possibly different for
116           each block row) or `NULL`.
117 
118   Level: advanced
119 
120   Note:
121   See `MatMPIBAIJSetPreallocation()` for more details on preallocation.  If a scalar matrix (`MATAIJ`) is
122   obtained with `DMSlicedGetMatrix()`, the correct preallocation will be set, respecting `DMSlicedSetBlockFills()`.
123 
124 .seealso: `DM`, `DMSLICED`, `DMDestroy()`, `DMCreateGlobalVector()`, `MatMPIAIJSetPreallocation()`,
125          `MatMPIBAIJSetPreallocation()`, `DMSlicedGetMatrix()`, `DMSlicedSetBlockFills()`
126 @*/
DMSlicedSetPreallocation(DM dm,PetscInt d_nz,const PetscInt d_nnz[],PetscInt o_nz,const PetscInt o_nnz[])127 PetscErrorCode DMSlicedSetPreallocation(DM dm, PetscInt d_nz, const PetscInt d_nnz[], PetscInt o_nz, const PetscInt o_nnz[])
128 {
129   DM_Sliced *slice = (DM_Sliced *)dm->data;
130 
131   PetscFunctionBegin;
132   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
133   slice->d_nz  = d_nz;
134   slice->d_nnz = (PetscInt *)d_nnz;
135   slice->o_nz  = o_nz;
136   slice->o_nnz = (PetscInt *)o_nnz;
137   PetscFunctionReturn(PETSC_SUCCESS);
138 }
139 
DMSlicedSetBlockFills_Private(PetscInt bs,const PetscInt * fill,DMSlicedBlockFills ** inf)140 static PetscErrorCode DMSlicedSetBlockFills_Private(PetscInt bs, const PetscInt *fill, DMSlicedBlockFills **inf)
141 {
142   PetscInt            i, j, nz, *fi, *fj;
143   DMSlicedBlockFills *f;
144 
145   PetscFunctionBegin;
146   PetscAssertPointer(inf, 3);
147   if (*inf) PetscCall(PetscFree3(*inf, (*inf)->i, (*inf)->j));
148   if (!fill) PetscFunctionReturn(PETSC_SUCCESS);
149   for (i = 0, nz = 0; i < bs * bs; i++)
150     if (fill[i]) nz++;
151   PetscCall(PetscMalloc3(1, &f, bs + 1, &fi, nz, &fj));
152   f->bs = bs;
153   f->nz = nz;
154   f->i  = fi;
155   f->j  = fj;
156   for (i = 0, nz = 0; i < bs; i++) {
157     fi[i] = nz;
158     for (j = 0; j < bs; j++)
159       if (fill[i * bs + j]) fj[nz++] = j;
160   }
161   fi[i] = nz;
162   *inf  = f;
163   PetscFunctionReturn(PETSC_SUCCESS);
164 }
165 
166 /*@
167   DMSlicedSetBlockFills - Sets the fill pattern in each block for a multi-component problem
168   of the matrix returned by `DMSlicedGetMatrix()`.
169 
170   Logically Collective
171 
172   Input Parameters:
173 + dm    - the `DM` object
174 . dfill - the fill pattern in the diagonal block (may be `NULL`, means use dense block)
175 - ofill - the fill pattern in the off-diagonal blocks
176 
177   Level: advanced
178 
179   Note:
180   This only makes sense for multicomponent problems using scalar matrix formats (AIJ).
181   See `DMDASetBlockFills()` for example usage.
182 
183 .seealso: `DM`, `DMSLICED`, `DMSlicedGetMatrix()`, `DMDASetBlockFills()`
184 @*/
DMSlicedSetBlockFills(DM dm,const PetscInt dfill[],const PetscInt ofill[])185 PetscErrorCode DMSlicedSetBlockFills(DM dm, const PetscInt dfill[], const PetscInt ofill[])
186 {
187   DM_Sliced *slice = (DM_Sliced *)dm->data;
188 
189   PetscFunctionBegin;
190   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
191   PetscCall(DMSlicedSetBlockFills_Private(slice->bs, dfill, &slice->dfill));
192   PetscCall(DMSlicedSetBlockFills_Private(slice->bs, ofill, &slice->ofill));
193   PetscFunctionReturn(PETSC_SUCCESS);
194 }
195 
DMDestroy_Sliced(DM dm)196 static PetscErrorCode DMDestroy_Sliced(DM dm)
197 {
198   DM_Sliced *slice = (DM_Sliced *)dm->data;
199 
200   PetscFunctionBegin;
201   PetscCall(PetscFree(slice->ghosts));
202   if (slice->dfill) PetscCall(PetscFree3(slice->dfill, slice->dfill->i, slice->dfill->j));
203   if (slice->ofill) PetscCall(PetscFree3(slice->ofill, slice->ofill->i, slice->ofill->j));
204   /* This was originally freed in DMDestroy(), but that prevents reference counting of backend objects */
205   PetscCall(PetscFree(slice));
206   PetscFunctionReturn(PETSC_SUCCESS);
207 }
208 
DMCreateGlobalVector_Sliced(DM dm,Vec * gvec)209 static PetscErrorCode DMCreateGlobalVector_Sliced(DM dm, Vec *gvec)
210 {
211   DM_Sliced *slice = (DM_Sliced *)dm->data;
212 
213   PetscFunctionBegin;
214   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
215   PetscAssertPointer(gvec, 2);
216   *gvec = NULL;
217   PetscCall(VecCreateGhostBlock(PetscObjectComm((PetscObject)dm), slice->bs, slice->n * slice->bs, PETSC_DETERMINE, slice->Nghosts, slice->ghosts, gvec));
218   PetscCall(VecSetDM(*gvec, dm));
219   PetscFunctionReturn(PETSC_SUCCESS);
220 }
221 
DMGlobalToLocalBegin_Sliced(DM da,Vec g,InsertMode mode,Vec l)222 static PetscErrorCode DMGlobalToLocalBegin_Sliced(DM da, Vec g, InsertMode mode, Vec l)
223 {
224   PetscBool flg;
225 
226   PetscFunctionBegin;
227   PetscCall(VecGhostIsLocalForm(g, l, &flg));
228   PetscCheck(flg, PetscObjectComm((PetscObject)da), PETSC_ERR_ARG_WRONG, "Local vector is not local form of global vector");
229   PetscCall(VecGhostUpdateEnd(g, mode, SCATTER_FORWARD));
230   PetscCall(VecGhostUpdateBegin(g, mode, SCATTER_FORWARD));
231   PetscFunctionReturn(PETSC_SUCCESS);
232 }
233 
DMGlobalToLocalEnd_Sliced(DM da,Vec g,InsertMode mode,Vec l)234 static PetscErrorCode DMGlobalToLocalEnd_Sliced(DM da, Vec g, InsertMode mode, Vec l)
235 {
236   PetscBool flg;
237 
238   PetscFunctionBegin;
239   PetscCall(VecGhostIsLocalForm(g, l, &flg));
240   PetscCheck(flg, PetscObjectComm((PetscObject)da), PETSC_ERR_ARG_WRONG, "Local vector is not local form of global vector");
241   PetscCall(VecGhostUpdateEnd(g, mode, SCATTER_FORWARD));
242   PetscFunctionReturn(PETSC_SUCCESS);
243 }
244 
245 /*MC
246    DMSLICED = "sliced" - A `DM` object that is used to manage data for a general graph. Uses `VecCreateGhost()` ghosted vectors for storing the fields
247 
248    See `DMCreateSliced()` for details.
249 
250   Level: intermediate
251 
252 .seealso: `DMType`, `DMCOMPOSITE`, `DMCreateSliced()`, `DMCreate()`
253 M*/
254 
DMCreate_Sliced(DM p)255 PETSC_EXTERN PetscErrorCode DMCreate_Sliced(DM p)
256 {
257   DM_Sliced *slice;
258 
259   PetscFunctionBegin;
260   PetscCall(PetscNew(&slice));
261   p->data = slice;
262 
263   p->ops->createglobalvector = DMCreateGlobalVector_Sliced;
264   p->ops->creatematrix       = DMCreateMatrix_Sliced;
265   p->ops->globaltolocalbegin = DMGlobalToLocalBegin_Sliced;
266   p->ops->globaltolocalend   = DMGlobalToLocalEnd_Sliced;
267   p->ops->destroy            = DMDestroy_Sliced;
268   PetscFunctionReturn(PETSC_SUCCESS);
269 }
270 
271 /*@
272   DMSlicedCreate - Creates a `DM` object, used to manage data for a unstructured problem
273 
274   Collective
275 
276   Input Parameters:
277 + comm    - the processors that will share the global vector
278 . bs      - the block size
279 . nlocal  - number of vector entries on this process
280 . Nghosts - number of ghost points needed on this process
281 . ghosts  - global indices of all ghost points for this process
282 . d_nnz   - matrix preallocation information representing coupling within this process
283 - o_nnz   - matrix preallocation information representing coupling between this process and other processes
284 
285   Output Parameter:
286 . dm - the slice object
287 
288   Level: advanced
289 
290   Notes:
291   This `DM` does not support `DMCreateLocalVector()`, `DMGlobalToLocalBegin()`, and `DMGlobalToLocalEnd()` instead one directly uses
292   `VecGhostGetLocalForm()` and `VecGhostRestoreLocalForm()` to access the local representation and `VecGhostUpdateBegin()` and `VecGhostUpdateEnd()` to update
293   the ghost points.
294 
295   One can use `DMGlobalToLocalBegin()`, and `DMGlobalToLocalEnd()` instead of `VecGhostUpdateBegin()` and `VecGhostUpdateEnd()`.
296 
297 .seealso: `DM`, `DMSLICED`, `DMDestroy()`, `DMCreateGlobalVector()`, `DMSetType()`, `DMSlicedSetGhosts()`, `DMSlicedSetPreallocation()`,
298           `VecGhostUpdateBegin()`, `VecGhostUpdateEnd()`,
299           `VecGhostGetLocalForm()`, `VecGhostRestoreLocalForm()`
300 @*/
DMSlicedCreate(MPI_Comm comm,PetscInt bs,PetscInt nlocal,PetscInt Nghosts,const PetscInt ghosts[],const PetscInt d_nnz[],const PetscInt o_nnz[],DM * dm)301 PetscErrorCode DMSlicedCreate(MPI_Comm comm, PetscInt bs, PetscInt nlocal, PetscInt Nghosts, const PetscInt ghosts[], const PetscInt d_nnz[], const PetscInt o_nnz[], DM *dm)
302 {
303   PetscFunctionBegin;
304   PetscAssertPointer(dm, 8);
305   PetscCall(DMCreate(comm, dm));
306   PetscCall(DMSetType(*dm, DMSLICED));
307   PetscCall(DMSlicedSetGhosts(*dm, bs, nlocal, Nghosts, ghosts));
308   if (d_nnz) PetscCall(DMSlicedSetPreallocation(*dm, 0, d_nnz, 0, o_nnz));
309   PetscFunctionReturn(PETSC_SUCCESS);
310 }
311