xref: /petsc/src/dm/impls/sliced/sliced.c (revision d5b43468fb8780a8feea140ccd6fa3e6a50411cc)
1 #include <petscdmsliced.h> /*I      "petscdmsliced.h" I*/
2 #include <petscmat.h>
3 #include <petsc/private/dmimpl.h>
4 
5 /* CSR storage of the nonzero structure of a bs*bs matrix */
6 typedef struct {
7   PetscInt bs, nz, *i, *j;
8 } DMSlicedBlockFills;
9 
10 typedef struct {
11   PetscInt            bs, n, N, Nghosts, *ghosts;
12   PetscInt            d_nz, o_nz, *d_nnz, *o_nnz;
13   DMSlicedBlockFills *dfill, *ofill;
14 } DM_Sliced;
15 
16 PetscErrorCode DMCreateMatrix_Sliced(DM dm, Mat *J)
17 {
18   PetscInt              *globals, *sd_nnz, *so_nnz, rstart, bs, i;
19   ISLocalToGlobalMapping lmap;
20   void (*aij)(void) = NULL;
21   DM_Sliced *slice  = (DM_Sliced *)dm->data;
22 
23   PetscFunctionBegin;
24   bs = slice->bs;
25   PetscCall(MatCreate(PetscObjectComm((PetscObject)dm), J));
26   PetscCall(MatSetSizes(*J, slice->n * bs, slice->n * bs, PETSC_DETERMINE, PETSC_DETERMINE));
27   PetscCall(MatSetBlockSize(*J, bs));
28   PetscCall(MatSetType(*J, dm->mattype));
29   PetscCall(MatSeqBAIJSetPreallocation(*J, bs, slice->d_nz, slice->d_nnz));
30   PetscCall(MatMPIBAIJSetPreallocation(*J, bs, slice->d_nz, slice->d_nnz, slice->o_nz, slice->o_nnz));
31   /* In general, we have to do extra work to preallocate for scalar (AIJ) matrices so we check whether it will do any
32   * good before going on with it. */
33   PetscCall(PetscObjectQueryFunction((PetscObject)*J, "MatMPIAIJSetPreallocation_C", &aij));
34   if (!aij) PetscCall(PetscObjectQueryFunction((PetscObject)*J, "MatSeqAIJSetPreallocation_C", &aij));
35   if (aij) {
36     if (bs == 1) {
37       PetscCall(MatSeqAIJSetPreallocation(*J, slice->d_nz, slice->d_nnz));
38       PetscCall(MatMPIAIJSetPreallocation(*J, slice->d_nz, slice->d_nnz, slice->o_nz, slice->o_nnz));
39     } else if (!slice->d_nnz) {
40       PetscCall(MatSeqAIJSetPreallocation(*J, slice->d_nz * bs, NULL));
41       PetscCall(MatMPIAIJSetPreallocation(*J, slice->d_nz * bs, NULL, slice->o_nz * bs, NULL));
42     } else {
43       /* The user has provided preallocation per block-row, convert it to per scalar-row respecting DMSlicedSetBlockFills() if applicable */
44       PetscCall(PetscMalloc2(slice->n * bs, &sd_nnz, (!!slice->o_nnz) * slice->n * bs, &so_nnz));
45       for (i = 0; i < slice->n * bs; i++) {
46         sd_nnz[i] = (slice->d_nnz[i / bs] - 1) * (slice->ofill ? slice->ofill->i[i % bs + 1] - slice->ofill->i[i % bs] : bs) + (slice->dfill ? slice->dfill->i[i % bs + 1] - slice->dfill->i[i % bs] : bs);
47         if (so_nnz) so_nnz[i] = slice->o_nnz[i / bs] * (slice->ofill ? slice->ofill->i[i % bs + 1] - slice->ofill->i[i % bs] : bs);
48       }
49       PetscCall(MatSeqAIJSetPreallocation(*J, slice->d_nz * bs, sd_nnz));
50       PetscCall(MatMPIAIJSetPreallocation(*J, slice->d_nz * bs, sd_nnz, slice->o_nz * bs, so_nnz));
51       PetscCall(PetscFree2(sd_nnz, so_nnz));
52     }
53   }
54 
55   /* Set up the local to global map.  For the scalar map, we have to translate to entry-wise indexing instead of block-wise. */
56   PetscCall(PetscMalloc1(slice->n + slice->Nghosts, &globals));
57   PetscCall(MatGetOwnershipRange(*J, &rstart, NULL));
58   for (i = 0; i < slice->n; i++) globals[i] = rstart / bs + i;
59 
60   for (i = 0; i < slice->Nghosts; i++) globals[slice->n + i] = slice->ghosts[i];
61   PetscCall(ISLocalToGlobalMappingCreate(PETSC_COMM_SELF, bs, slice->n + slice->Nghosts, globals, PETSC_OWN_POINTER, &lmap));
62   PetscCall(MatSetLocalToGlobalMapping(*J, lmap, lmap));
63   PetscCall(ISLocalToGlobalMappingDestroy(&lmap));
64   PetscCall(MatSetDM(*J, dm));
65   PetscFunctionReturn(0);
66 }
67 
68 /*@C
69     DMSlicedSetGhosts - Sets the global indices of other processes elements that will
70       be ghosts on this process
71 
72     Not Collective
73 
74     Input Parameters:
75 +    slice - the DM object
76 .    bs - block size
77 .    nlocal - number of local (owned, non-ghost) blocks
78 .    Nghosts - number of ghost blocks on this process
79 -    ghosts - global indices of each ghost block
80 
81     Level: advanced
82 
83 .seealso `DMDestroy()`, `DMCreateGlobalVector()`
84 
85 @*/
86 PetscErrorCode DMSlicedSetGhosts(DM dm, PetscInt bs, PetscInt nlocal, PetscInt Nghosts, const PetscInt ghosts[])
87 {
88   DM_Sliced *slice = (DM_Sliced *)dm->data;
89 
90   PetscFunctionBegin;
91   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
92   PetscCall(PetscFree(slice->ghosts));
93   PetscCall(PetscMalloc1(Nghosts, &slice->ghosts));
94   PetscCall(PetscArraycpy(slice->ghosts, ghosts, Nghosts));
95   slice->bs      = bs;
96   slice->n       = nlocal;
97   slice->Nghosts = Nghosts;
98   PetscFunctionReturn(0);
99 }
100 
101 /*@C
102     DMSlicedSetPreallocation - sets the matrix memory preallocation for matrices computed by DMSliced
103 
104     Not Collective
105 
106     Input Parameters:
107 +    slice - the DM object
108 .    d_nz  - number of block nonzeros per block row in diagonal portion of local
109            submatrix  (same for all local rows)
110 .    d_nnz - array containing the number of block nonzeros in the various block rows
111            of the in diagonal portion of the local (possibly different for each block
112            row) or NULL.
113 .    o_nz  - number of block nonzeros per block row in the off-diagonal portion of local
114            submatrix (same for all local rows).
115 -    o_nnz - array containing the number of nonzeros in the various block rows of the
116            off-diagonal portion of the local submatrix (possibly different for
117            each block row) or NULL.
118 
119     Notes:
120     See MatMPIBAIJSetPreallocation() for more details on preallocation.  If a scalar matrix (AIJ) is
121     obtained with DMSlicedGetMatrix(), the correct preallocation will be set, respecting DMSlicedSetBlockFills().
122 
123     Level: advanced
124 
125 .seealso `DMDestroy()`, `DMCreateGlobalVector()`, `MatMPIAIJSetPreallocation()`,
126          `MatMPIBAIJSetPreallocation()`, `DMSlicedGetMatrix()`, `DMSlicedSetBlockFills()`
127 
128 @*/
129 PetscErrorCode DMSlicedSetPreallocation(DM dm, PetscInt d_nz, const PetscInt d_nnz[], PetscInt o_nz, const PetscInt o_nnz[])
130 {
131   DM_Sliced *slice = (DM_Sliced *)dm->data;
132 
133   PetscFunctionBegin;
134   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
135   slice->d_nz  = d_nz;
136   slice->d_nnz = (PetscInt *)d_nnz;
137   slice->o_nz  = o_nz;
138   slice->o_nnz = (PetscInt *)o_nnz;
139   PetscFunctionReturn(0);
140 }
141 
142 static PetscErrorCode DMSlicedSetBlockFills_Private(PetscInt bs, const PetscInt *fill, DMSlicedBlockFills **inf)
143 {
144   PetscInt            i, j, nz, *fi, *fj;
145   DMSlicedBlockFills *f;
146 
147   PetscFunctionBegin;
148   PetscValidPointer(inf, 3);
149   if (*inf) PetscCall(PetscFree3(*inf, (*inf)->i, (*inf)->j));
150   if (!fill) PetscFunctionReturn(0);
151   for (i = 0, nz = 0; i < bs * bs; i++)
152     if (fill[i]) nz++;
153   PetscCall(PetscMalloc3(1, &f, bs + 1, &fi, nz, &fj));
154   f->bs = bs;
155   f->nz = nz;
156   f->i  = fi;
157   f->j  = fj;
158   for (i = 0, nz = 0; i < bs; i++) {
159     fi[i] = nz;
160     for (j = 0; j < bs; j++)
161       if (fill[i * bs + j]) fj[nz++] = j;
162   }
163   fi[i] = nz;
164   *inf  = f;
165   PetscFunctionReturn(0);
166 }
167 
168 /*@C
169     DMSlicedSetBlockFills - Sets the fill pattern in each block for a multi-component problem
170     of the matrix returned by DMSlicedGetMatrix().
171 
172     Logically Collective on dm
173 
174     Input Parameters:
175 +   sliced - the DM object
176 .   dfill - the fill pattern in the diagonal block (may be NULL, means use dense block)
177 -   ofill - the fill pattern in the off-diagonal blocks
178 
179     Notes:
180     This only makes sense for multicomponent problems using scalar matrix formats (AIJ).
181     See DMDASetBlockFills() for example usage.
182 
183     Level: advanced
184 
185 .seealso `DMSlicedGetMatrix()`, `DMDASetBlockFills()`
186 @*/
187 PetscErrorCode DMSlicedSetBlockFills(DM dm, const PetscInt *dfill, const PetscInt *ofill)
188 {
189   DM_Sliced *slice = (DM_Sliced *)dm->data;
190 
191   PetscFunctionBegin;
192   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
193   PetscCall(DMSlicedSetBlockFills_Private(slice->bs, dfill, &slice->dfill));
194   PetscCall(DMSlicedSetBlockFills_Private(slice->bs, ofill, &slice->ofill));
195   PetscFunctionReturn(0);
196 }
197 
198 static PetscErrorCode DMDestroy_Sliced(DM dm)
199 {
200   DM_Sliced *slice = (DM_Sliced *)dm->data;
201 
202   PetscFunctionBegin;
203   PetscCall(PetscFree(slice->ghosts));
204   if (slice->dfill) PetscCall(PetscFree3(slice->dfill, slice->dfill->i, slice->dfill->j));
205   if (slice->ofill) PetscCall(PetscFree3(slice->ofill, slice->ofill->i, slice->ofill->j));
206   /* This was originally freed in DMDestroy(), but that prevents reference counting of backend objects */
207   PetscCall(PetscFree(slice));
208   PetscFunctionReturn(0);
209 }
210 
211 static PetscErrorCode DMCreateGlobalVector_Sliced(DM dm, Vec *gvec)
212 {
213   DM_Sliced *slice = (DM_Sliced *)dm->data;
214 
215   PetscFunctionBegin;
216   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
217   PetscValidPointer(gvec, 2);
218   *gvec = NULL;
219   PetscCall(VecCreateGhostBlock(PetscObjectComm((PetscObject)dm), slice->bs, slice->n * slice->bs, PETSC_DETERMINE, slice->Nghosts, slice->ghosts, gvec));
220   PetscCall(VecSetDM(*gvec, dm));
221   PetscFunctionReturn(0);
222 }
223 
224 static PetscErrorCode DMGlobalToLocalBegin_Sliced(DM da, Vec g, InsertMode mode, Vec l)
225 {
226   PetscBool flg;
227 
228   PetscFunctionBegin;
229   PetscCall(VecGhostIsLocalForm(g, l, &flg));
230   PetscCheck(flg, PetscObjectComm((PetscObject)da), PETSC_ERR_ARG_WRONG, "Local vector is not local form of global vector");
231   PetscCall(VecGhostUpdateEnd(g, mode, SCATTER_FORWARD));
232   PetscCall(VecGhostUpdateBegin(g, mode, SCATTER_FORWARD));
233   PetscFunctionReturn(0);
234 }
235 
236 static PetscErrorCode DMGlobalToLocalEnd_Sliced(DM da, Vec g, InsertMode mode, Vec l)
237 {
238   PetscBool flg;
239 
240   PetscFunctionBegin;
241   PetscCall(VecGhostIsLocalForm(g, l, &flg));
242   PetscCheck(flg, PetscObjectComm((PetscObject)da), PETSC_ERR_ARG_WRONG, "Local vector is not local form of global vector");
243   PetscCall(VecGhostUpdateEnd(g, mode, SCATTER_FORWARD));
244   PetscFunctionReturn(0);
245 }
246 
247 /*MC
248    DMSLICED = "sliced" - A DM object that is used to manage data for a general graph. Uses VecCreateGhost() ghosted vectors for storing the fields
249 
250    See DMCreateSliced() for details.
251 
252   Level: intermediate
253 
254 .seealso: `DMType`, `DMCOMPOSITE`, `DMCreateSliced()`, `DMCreate()`
255 M*/
256 
257 PETSC_EXTERN PetscErrorCode DMCreate_Sliced(DM p)
258 {
259   DM_Sliced *slice;
260 
261   PetscFunctionBegin;
262   PetscCall(PetscNew(&slice));
263   p->data = slice;
264 
265   p->ops->createglobalvector = DMCreateGlobalVector_Sliced;
266   p->ops->creatematrix       = DMCreateMatrix_Sliced;
267   p->ops->globaltolocalbegin = DMGlobalToLocalBegin_Sliced;
268   p->ops->globaltolocalend   = DMGlobalToLocalEnd_Sliced;
269   p->ops->destroy            = DMDestroy_Sliced;
270   PetscFunctionReturn(0);
271 }
272 
273 /*@C
274     DMSlicedCreate - Creates a DM object, used to manage data for a unstructured problem
275 
276     Collective
277 
278     Input Parameters:
279 +   comm - the processors that will share the global vector
280 .   bs - the block size
281 .   nlocal - number of vector entries on this process
282 .   Nghosts - number of ghost points needed on this process
283 .   ghosts - global indices of all ghost points for this process
284 .   d_nnz - matrix preallocation information representing coupling within this process
285 -   o_nnz - matrix preallocation information representing coupling between this process and other processes
286 
287     Output Parameters:
288 .   slice - the slice object
289 
290     Notes:
291         This DM does not support DMCreateLocalVector(), DMGlobalToLocalBegin(), and DMGlobalToLocalEnd() instead one directly uses
292         VecGhostGetLocalForm() and VecGhostRestoreLocalForm() to access the local representation and VecGhostUpdateBegin() and VecGhostUpdateEnd() to update
293         the ghost points.
294 
295         One can use DMGlobalToLocalBegin(), and DMGlobalToLocalEnd() instead of VecGhostUpdateBegin() and VecGhostUpdateEnd().
296 
297     Level: advanced
298 
299 .seealso `DMDestroy()`, `DMCreateGlobalVector()`, `DMSetType()`, `DMSLICED`, `DMSlicedSetGhosts()`, `DMSlicedSetPreallocation()`, `VecGhostUpdateBegin()`, `VecGhostUpdateEnd()`,
300          `VecGhostGetLocalForm()`, `VecGhostRestoreLocalForm()`
301 
302 @*/
303 PetscErrorCode DMSlicedCreate(MPI_Comm comm, PetscInt bs, PetscInt nlocal, PetscInt Nghosts, const PetscInt ghosts[], const PetscInt d_nnz[], const PetscInt o_nnz[], DM *dm)
304 {
305   PetscFunctionBegin;
306   PetscValidPointer(dm, 8);
307   PetscCall(DMCreate(comm, dm));
308   PetscCall(DMSetType(*dm, DMSLICED));
309   PetscCall(DMSlicedSetGhosts(*dm, bs, nlocal, Nghosts, ghosts));
310   if (d_nnz) PetscCall(DMSlicedSetPreallocation(*dm, 0, d_nnz, 0, o_nnz));
311   PetscFunctionReturn(0);
312 }
313