xref: /petsc/src/dm/impls/sliced/sliced.c (revision 1511cd715a1f0c8d257549c5ebe5cee9c6feed4d)
1 #include <petscdmsliced.h> /*I      "petscdmsliced.h" I*/
2 #include <petscmat.h>
3 #include <petsc/private/dmimpl.h>
4 
5 /* CSR storage of the nonzero structure of a bs*bs matrix */
6 typedef struct {
7   PetscInt bs, nz, *i, *j;
8 } DMSlicedBlockFills;
9 
10 typedef struct {
11   PetscInt            bs, n, N, Nghosts, *ghosts;
12   PetscInt            d_nz, o_nz, *d_nnz, *o_nnz;
13   DMSlicedBlockFills *dfill, *ofill;
14 } DM_Sliced;
15 
16 PetscErrorCode DMCreateMatrix_Sliced(DM dm, Mat *J) {
17   PetscInt              *globals, *sd_nnz, *so_nnz, rstart, bs, i;
18   ISLocalToGlobalMapping lmap;
19   void (*aij)(void) = NULL;
20   DM_Sliced *slice  = (DM_Sliced *)dm->data;
21 
22   PetscFunctionBegin;
23   bs = slice->bs;
24   PetscCall(MatCreate(PetscObjectComm((PetscObject)dm), J));
25   PetscCall(MatSetSizes(*J, slice->n * bs, slice->n * bs, PETSC_DETERMINE, PETSC_DETERMINE));
26   PetscCall(MatSetBlockSize(*J, bs));
27   PetscCall(MatSetType(*J, dm->mattype));
28   PetscCall(MatSeqBAIJSetPreallocation(*J, bs, slice->d_nz, slice->d_nnz));
29   PetscCall(MatMPIBAIJSetPreallocation(*J, bs, slice->d_nz, slice->d_nnz, slice->o_nz, slice->o_nnz));
30   /* In general, we have to do extra work to preallocate for scalar (AIJ) matrices so we check whether it will do any
31   * good before going on with it. */
32   PetscCall(PetscObjectQueryFunction((PetscObject)*J, "MatMPIAIJSetPreallocation_C", &aij));
33   if (!aij) PetscCall(PetscObjectQueryFunction((PetscObject)*J, "MatSeqAIJSetPreallocation_C", &aij));
34   if (aij) {
35     if (bs == 1) {
36       PetscCall(MatSeqAIJSetPreallocation(*J, slice->d_nz, slice->d_nnz));
37       PetscCall(MatMPIAIJSetPreallocation(*J, slice->d_nz, slice->d_nnz, slice->o_nz, slice->o_nnz));
38     } else if (!slice->d_nnz) {
39       PetscCall(MatSeqAIJSetPreallocation(*J, slice->d_nz * bs, NULL));
40       PetscCall(MatMPIAIJSetPreallocation(*J, slice->d_nz * bs, NULL, slice->o_nz * bs, NULL));
41     } else {
42       /* The user has provided preallocation per block-row, convert it to per scalar-row respecting DMSlicedSetBlockFills() if applicable */
43       PetscCall(PetscMalloc2(slice->n * bs, &sd_nnz, (!!slice->o_nnz) * slice->n * bs, &so_nnz));
44       for (i = 0; i < slice->n * bs; i++) {
45         sd_nnz[i] = (slice->d_nnz[i / bs] - 1) * (slice->ofill ? slice->ofill->i[i % bs + 1] - slice->ofill->i[i % bs] : bs) + (slice->dfill ? slice->dfill->i[i % bs + 1] - slice->dfill->i[i % bs] : bs);
46         if (so_nnz) so_nnz[i] = slice->o_nnz[i / bs] * (slice->ofill ? slice->ofill->i[i % bs + 1] - slice->ofill->i[i % bs] : bs);
47       }
48       PetscCall(MatSeqAIJSetPreallocation(*J, slice->d_nz * bs, sd_nnz));
49       PetscCall(MatMPIAIJSetPreallocation(*J, slice->d_nz * bs, sd_nnz, slice->o_nz * bs, so_nnz));
50       PetscCall(PetscFree2(sd_nnz, so_nnz));
51     }
52   }
53 
54   /* Set up the local to global map.  For the scalar map, we have to translate to entry-wise indexing instead of block-wise. */
55   PetscCall(PetscMalloc1(slice->n + slice->Nghosts, &globals));
56   PetscCall(MatGetOwnershipRange(*J, &rstart, NULL));
57   for (i = 0; i < slice->n; i++) globals[i] = rstart / bs + i;
58 
59   for (i = 0; i < slice->Nghosts; i++) globals[slice->n + i] = slice->ghosts[i];
60   PetscCall(ISLocalToGlobalMappingCreate(PETSC_COMM_SELF, bs, slice->n + slice->Nghosts, globals, PETSC_OWN_POINTER, &lmap));
61   PetscCall(MatSetLocalToGlobalMapping(*J, lmap, lmap));
62   PetscCall(ISLocalToGlobalMappingDestroy(&lmap));
63   PetscCall(MatSetDM(*J, dm));
64   PetscFunctionReturn(0);
65 }
66 
67 /*@C
68     DMSlicedSetGhosts - Sets the global indices of other processes elements that will
69       be ghosts on this process
70 
71     Not Collective
72 
73     Input Parameters:
74 +    slice - the DM object
75 .    bs - block size
76 .    nlocal - number of local (owned, non-ghost) blocks
77 .    Nghosts - number of ghost blocks on this process
78 -    ghosts - global indices of each ghost block
79 
80     Level: advanced
81 
82 .seealso `DMDestroy()`, `DMCreateGlobalVector()`
83 
84 @*/
85 PetscErrorCode DMSlicedSetGhosts(DM dm, PetscInt bs, PetscInt nlocal, PetscInt Nghosts, const PetscInt ghosts[]) {
86   DM_Sliced *slice = (DM_Sliced *)dm->data;
87 
88   PetscFunctionBegin;
89   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
90   PetscCall(PetscFree(slice->ghosts));
91   PetscCall(PetscMalloc1(Nghosts, &slice->ghosts));
92   PetscCall(PetscArraycpy(slice->ghosts, ghosts, Nghosts));
93   slice->bs      = bs;
94   slice->n       = nlocal;
95   slice->Nghosts = Nghosts;
96   PetscFunctionReturn(0);
97 }
98 
99 /*@C
100     DMSlicedSetPreallocation - sets the matrix memory preallocation for matrices computed by DMSliced
101 
102     Not Collective
103 
104     Input Parameters:
105 +    slice - the DM object
106 .    d_nz  - number of block nonzeros per block row in diagonal portion of local
107            submatrix  (same for all local rows)
108 .    d_nnz - array containing the number of block nonzeros in the various block rows
109            of the in diagonal portion of the local (possibly different for each block
110            row) or NULL.
111 .    o_nz  - number of block nonzeros per block row in the off-diagonal portion of local
112            submatrix (same for all local rows).
113 -    o_nnz - array containing the number of nonzeros in the various block rows of the
114            off-diagonal portion of the local submatrix (possibly different for
115            each block row) or NULL.
116 
117     Notes:
118     See MatMPIBAIJSetPreallocation() for more details on preallocation.  If a scalar matrix (AIJ) is
119     obtained with DMSlicedGetMatrix(), the correct preallocation will be set, respecting DMSlicedSetBlockFills().
120 
121     Level: advanced
122 
123 .seealso `DMDestroy()`, `DMCreateGlobalVector()`, `MatMPIAIJSetPreallocation()`,
124          `MatMPIBAIJSetPreallocation()`, `DMSlicedGetMatrix()`, `DMSlicedSetBlockFills()`
125 
126 @*/
127 PetscErrorCode DMSlicedSetPreallocation(DM dm, PetscInt d_nz, const PetscInt d_nnz[], PetscInt o_nz, const PetscInt o_nnz[]) {
128   DM_Sliced *slice = (DM_Sliced *)dm->data;
129 
130   PetscFunctionBegin;
131   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
132   slice->d_nz  = d_nz;
133   slice->d_nnz = (PetscInt *)d_nnz;
134   slice->o_nz  = o_nz;
135   slice->o_nnz = (PetscInt *)o_nnz;
136   PetscFunctionReturn(0);
137 }
138 
139 static PetscErrorCode DMSlicedSetBlockFills_Private(PetscInt bs, const PetscInt *fill, DMSlicedBlockFills **inf) {
140   PetscInt            i, j, nz, *fi, *fj;
141   DMSlicedBlockFills *f;
142 
143   PetscFunctionBegin;
144   PetscValidPointer(inf, 3);
145   if (*inf) PetscCall(PetscFree3(*inf, (*inf)->i, (*inf)->j));
146   if (!fill) PetscFunctionReturn(0);
147   for (i = 0, nz = 0; i < bs * bs; i++)
148     if (fill[i]) nz++;
149   PetscCall(PetscMalloc3(1, &f, bs + 1, &fi, nz, &fj));
150   f->bs = bs;
151   f->nz = nz;
152   f->i  = fi;
153   f->j  = fj;
154   for (i = 0, nz = 0; i < bs; i++) {
155     fi[i] = nz;
156     for (j = 0; j < bs; j++)
157       if (fill[i * bs + j]) fj[nz++] = j;
158   }
159   fi[i] = nz;
160   *inf  = f;
161   PetscFunctionReturn(0);
162 }
163 
164 /*@C
165     DMSlicedSetBlockFills - Sets the fill pattern in each block for a multi-component problem
166     of the matrix returned by DMSlicedGetMatrix().
167 
168     Logically Collective on dm
169 
170     Input Parameters:
171 +   sliced - the DM object
172 .   dfill - the fill pattern in the diagonal block (may be NULL, means use dense block)
173 -   ofill - the fill pattern in the off-diagonal blocks
174 
175     Notes:
176     This only makes sense for multicomponent problems using scalar matrix formats (AIJ).
177     See DMDASetBlockFills() for example usage.
178 
179     Level: advanced
180 
181 .seealso `DMSlicedGetMatrix()`, `DMDASetBlockFills()`
182 @*/
183 PetscErrorCode DMSlicedSetBlockFills(DM dm, const PetscInt *dfill, const PetscInt *ofill) {
184   DM_Sliced *slice = (DM_Sliced *)dm->data;
185 
186   PetscFunctionBegin;
187   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
188   PetscCall(DMSlicedSetBlockFills_Private(slice->bs, dfill, &slice->dfill));
189   PetscCall(DMSlicedSetBlockFills_Private(slice->bs, ofill, &slice->ofill));
190   PetscFunctionReturn(0);
191 }
192 
193 static PetscErrorCode DMDestroy_Sliced(DM dm) {
194   DM_Sliced *slice = (DM_Sliced *)dm->data;
195 
196   PetscFunctionBegin;
197   PetscCall(PetscFree(slice->ghosts));
198   if (slice->dfill) PetscCall(PetscFree3(slice->dfill, slice->dfill->i, slice->dfill->j));
199   if (slice->ofill) PetscCall(PetscFree3(slice->ofill, slice->ofill->i, slice->ofill->j));
200   /* This was originally freed in DMDestroy(), but that prevents reference counting of backend objects */
201   PetscCall(PetscFree(slice));
202   PetscFunctionReturn(0);
203 }
204 
205 static PetscErrorCode DMCreateGlobalVector_Sliced(DM dm, Vec *gvec) {
206   DM_Sliced *slice = (DM_Sliced *)dm->data;
207 
208   PetscFunctionBegin;
209   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
210   PetscValidPointer(gvec, 2);
211   *gvec = NULL;
212   PetscCall(VecCreateGhostBlock(PetscObjectComm((PetscObject)dm), slice->bs, slice->n * slice->bs, PETSC_DETERMINE, slice->Nghosts, slice->ghosts, gvec));
213   PetscCall(VecSetDM(*gvec, dm));
214   PetscFunctionReturn(0);
215 }
216 
217 static PetscErrorCode DMGlobalToLocalBegin_Sliced(DM da, Vec g, InsertMode mode, Vec l) {
218   PetscBool flg;
219 
220   PetscFunctionBegin;
221   PetscCall(VecGhostIsLocalForm(g, l, &flg));
222   PetscCheck(flg, PetscObjectComm((PetscObject)da), PETSC_ERR_ARG_WRONG, "Local vector is not local form of global vector");
223   PetscCall(VecGhostUpdateEnd(g, mode, SCATTER_FORWARD));
224   PetscCall(VecGhostUpdateBegin(g, mode, SCATTER_FORWARD));
225   PetscFunctionReturn(0);
226 }
227 
228 static PetscErrorCode DMGlobalToLocalEnd_Sliced(DM da, Vec g, InsertMode mode, Vec l) {
229   PetscBool flg;
230 
231   PetscFunctionBegin;
232   PetscCall(VecGhostIsLocalForm(g, l, &flg));
233   PetscCheck(flg, PetscObjectComm((PetscObject)da), PETSC_ERR_ARG_WRONG, "Local vector is not local form of global vector");
234   PetscCall(VecGhostUpdateEnd(g, mode, SCATTER_FORWARD));
235   PetscFunctionReturn(0);
236 }
237 
238 /*MC
239    DMSLICED = "sliced" - A DM object that is used to manage data for a general graph. Uses VecCreateGhost() ghosted vectors for storing the fields
240 
241    See DMCreateSliced() for details.
242 
243   Level: intermediate
244 
245 .seealso: `DMType`, `DMCOMPOSITE`, `DMCreateSliced()`, `DMCreate()`
246 M*/
247 
248 PETSC_EXTERN PetscErrorCode DMCreate_Sliced(DM p) {
249   DM_Sliced *slice;
250 
251   PetscFunctionBegin;
252   PetscCall(PetscNewLog(p, &slice));
253   p->data = slice;
254 
255   p->ops->createglobalvector = DMCreateGlobalVector_Sliced;
256   p->ops->creatematrix       = DMCreateMatrix_Sliced;
257   p->ops->globaltolocalbegin = DMGlobalToLocalBegin_Sliced;
258   p->ops->globaltolocalend   = DMGlobalToLocalEnd_Sliced;
259   p->ops->destroy            = DMDestroy_Sliced;
260   PetscFunctionReturn(0);
261 }
262 
263 /*@C
264     DMSlicedCreate - Creates a DM object, used to manage data for a unstructured problem
265 
266     Collective
267 
268     Input Parameters:
269 +   comm - the processors that will share the global vector
270 .   bs - the block size
271 .   nlocal - number of vector entries on this process
272 .   Nghosts - number of ghost points needed on this process
273 .   ghosts - global indices of all ghost points for this process
274 .   d_nnz - matrix preallocation information representing coupling within this process
275 -   o_nnz - matrix preallocation information representing coupling between this process and other processes
276 
277     Output Parameters:
278 .   slice - the slice object
279 
280     Notes:
281         This DM does not support DMCreateLocalVector(), DMGlobalToLocalBegin(), and DMGlobalToLocalEnd() instead one directly uses
282         VecGhostGetLocalForm() and VecGhostRestoreLocalForm() to access the local representation and VecGhostUpdateBegin() and VecGhostUpdateEnd() to update
283         the ghost points.
284 
285         One can use DMGlobalToLocalBegin(), and DMGlobalToLocalEnd() instead of VecGhostUpdateBegin() and VecGhostUpdateEnd().
286 
287     Level: advanced
288 
289 .seealso `DMDestroy()`, `DMCreateGlobalVector()`, `DMSetType()`, `DMSLICED`, `DMSlicedSetGhosts()`, `DMSlicedSetPreallocation()`, `VecGhostUpdateBegin()`, `VecGhostUpdateEnd()`,
290          `VecGhostGetLocalForm()`, `VecGhostRestoreLocalForm()`
291 
292 @*/
293 PetscErrorCode DMSlicedCreate(MPI_Comm comm, PetscInt bs, PetscInt nlocal, PetscInt Nghosts, const PetscInt ghosts[], const PetscInt d_nnz[], const PetscInt o_nnz[], DM *dm) {
294   PetscFunctionBegin;
295   PetscValidPointer(dm, 8);
296   PetscCall(DMCreate(comm, dm));
297   PetscCall(DMSetType(*dm, DMSLICED));
298   PetscCall(DMSlicedSetGhosts(*dm, bs, nlocal, Nghosts, ghosts));
299   if (d_nnz) PetscCall(DMSlicedSetPreallocation(*dm, 0, d_nnz, 0, o_nnz));
300   PetscFunctionReturn(0);
301 }
302