xref: /petsc/src/dm/impls/sliced/sliced.c (revision 503c0ea9b45bcfbcebbb1ea5341243bbc69f0bea)
1 #include <petscdmsliced.h>      /*I      "petscdmsliced.h" I*/
2 #include <petscmat.h>
3 #include <petsc/private/dmimpl.h>
4 
5 /* CSR storage of the nonzero structure of a bs*bs matrix */
6 typedef struct {
7   PetscInt bs,nz,*i,*j;
8 } DMSlicedBlockFills;
9 
10 typedef struct  {
11   PetscInt           bs,n,N,Nghosts,*ghosts;
12   PetscInt           d_nz,o_nz,*d_nnz,*o_nnz;
13   DMSlicedBlockFills *dfill,*ofill;
14 } DM_Sliced;
15 
16 PetscErrorCode  DMCreateMatrix_Sliced(DM dm, Mat *J)
17 {
18   PetscInt               *globals,*sd_nnz,*so_nnz,rstart,bs,i;
19   ISLocalToGlobalMapping lmap;
20   void                   (*aij)(void) = NULL;
21   DM_Sliced              *slice = (DM_Sliced*)dm->data;
22 
23   PetscFunctionBegin;
24   bs   = slice->bs;
25   PetscCall(MatCreate(PetscObjectComm((PetscObject)dm),J));
26   PetscCall(MatSetSizes(*J,slice->n*bs,slice->n*bs,PETSC_DETERMINE,PETSC_DETERMINE));
27   PetscCall(MatSetBlockSize(*J,bs));
28   PetscCall(MatSetType(*J,dm->mattype));
29   PetscCall(MatSeqBAIJSetPreallocation(*J,bs,slice->d_nz,slice->d_nnz));
30   PetscCall(MatMPIBAIJSetPreallocation(*J,bs,slice->d_nz,slice->d_nnz,slice->o_nz,slice->o_nnz));
31   /* In general, we have to do extra work to preallocate for scalar (AIJ) matrices so we check whether it will do any
32   * good before going on with it. */
33   PetscCall(PetscObjectQueryFunction((PetscObject)*J,"MatMPIAIJSetPreallocation_C",&aij));
34   if (!aij) {
35     PetscCall(PetscObjectQueryFunction((PetscObject)*J,"MatSeqAIJSetPreallocation_C",&aij));
36   }
37   if (aij) {
38     if (bs == 1) {
39       PetscCall(MatSeqAIJSetPreallocation(*J,slice->d_nz,slice->d_nnz));
40       PetscCall(MatMPIAIJSetPreallocation(*J,slice->d_nz,slice->d_nnz,slice->o_nz,slice->o_nnz));
41     } else if (!slice->d_nnz) {
42       PetscCall(MatSeqAIJSetPreallocation(*J,slice->d_nz*bs,NULL));
43       PetscCall(MatMPIAIJSetPreallocation(*J,slice->d_nz*bs,NULL,slice->o_nz*bs,NULL));
44     } else {
45       /* The user has provided preallocation per block-row, convert it to per scalar-row respecting DMSlicedSetBlockFills() if applicable */
46       PetscCall(PetscMalloc2(slice->n*bs,&sd_nnz,(!!slice->o_nnz)*slice->n*bs,&so_nnz));
47       for (i=0; i<slice->n*bs; i++) {
48         sd_nnz[i] = (slice->d_nnz[i/bs]-1) * (slice->ofill ? slice->ofill->i[i%bs+1]-slice->ofill->i[i%bs] : bs)
49                                            + (slice->dfill ? slice->dfill->i[i%bs+1]-slice->dfill->i[i%bs] : bs);
50         if (so_nnz) {
51           so_nnz[i] = slice->o_nnz[i/bs] * (slice->ofill ? slice->ofill->i[i%bs+1]-slice->ofill->i[i%bs] : bs);
52         }
53       }
54       PetscCall(MatSeqAIJSetPreallocation(*J,slice->d_nz*bs,sd_nnz));
55       PetscCall(MatMPIAIJSetPreallocation(*J,slice->d_nz*bs,sd_nnz,slice->o_nz*bs,so_nnz));
56       PetscCall(PetscFree2(sd_nnz,so_nnz));
57     }
58   }
59 
60   /* Set up the local to global map.  For the scalar map, we have to translate to entry-wise indexing instead of block-wise. */
61   PetscCall(PetscMalloc1(slice->n+slice->Nghosts,&globals));
62   PetscCall(MatGetOwnershipRange(*J,&rstart,NULL));
63   for (i=0; i<slice->n; i++) globals[i] = rstart/bs + i;
64 
65   for (i=0; i<slice->Nghosts; i++) {
66     globals[slice->n+i] = slice->ghosts[i];
67   }
68   PetscCall(ISLocalToGlobalMappingCreate(PETSC_COMM_SELF,bs,slice->n+slice->Nghosts,globals,PETSC_OWN_POINTER,&lmap));
69   PetscCall(MatSetLocalToGlobalMapping(*J,lmap,lmap));
70   PetscCall(ISLocalToGlobalMappingDestroy(&lmap));
71   PetscCall(MatSetDM(*J,dm));
72   PetscFunctionReturn(0);
73 }
74 
75 /*@C
76     DMSlicedSetGhosts - Sets the global indices of other processes elements that will
77       be ghosts on this process
78 
79     Not Collective
80 
81     Input Parameters:
82 +    slice - the DM object
83 .    bs - block size
84 .    nlocal - number of local (owned, non-ghost) blocks
85 .    Nghosts - number of ghost blocks on this process
86 -    ghosts - global indices of each ghost block
87 
88     Level: advanced
89 
90 .seealso DMDestroy(), DMCreateGlobalVector()
91 
92 @*/
93 PetscErrorCode  DMSlicedSetGhosts(DM dm,PetscInt bs,PetscInt nlocal,PetscInt Nghosts,const PetscInt ghosts[])
94 {
95   DM_Sliced      *slice = (DM_Sliced*)dm->data;
96 
97   PetscFunctionBegin;
98   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
99   PetscCall(PetscFree(slice->ghosts));
100   PetscCall(PetscMalloc1(Nghosts,&slice->ghosts));
101   PetscCall(PetscArraycpy(slice->ghosts,ghosts,Nghosts));
102   slice->bs      = bs;
103   slice->n       = nlocal;
104   slice->Nghosts = Nghosts;
105   PetscFunctionReturn(0);
106 }
107 
108 /*@C
109     DMSlicedSetPreallocation - sets the matrix memory preallocation for matrices computed by DMSliced
110 
111     Not Collective
112 
113     Input Parameters:
114 +    slice - the DM object
115 .    d_nz  - number of block nonzeros per block row in diagonal portion of local
116            submatrix  (same for all local rows)
117 .    d_nnz - array containing the number of block nonzeros in the various block rows
118            of the in diagonal portion of the local (possibly different for each block
119            row) or NULL.
120 .    o_nz  - number of block nonzeros per block row in the off-diagonal portion of local
121            submatrix (same for all local rows).
122 -    o_nnz - array containing the number of nonzeros in the various block rows of the
123            off-diagonal portion of the local submatrix (possibly different for
124            each block row) or NULL.
125 
126     Notes:
127     See MatMPIBAIJSetPreallocation() for more details on preallocation.  If a scalar matrix (AIJ) is
128     obtained with DMSlicedGetMatrix(), the correct preallocation will be set, respecting DMSlicedSetBlockFills().
129 
130     Level: advanced
131 
132 .seealso DMDestroy(), DMCreateGlobalVector(), MatMPIAIJSetPreallocation(),
133          MatMPIBAIJSetPreallocation(), DMSlicedGetMatrix(), DMSlicedSetBlockFills()
134 
135 @*/
136 PetscErrorCode  DMSlicedSetPreallocation(DM dm,PetscInt d_nz,const PetscInt d_nnz[],PetscInt o_nz,const PetscInt o_nnz[])
137 {
138   DM_Sliced *slice = (DM_Sliced*)dm->data;
139 
140   PetscFunctionBegin;
141   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
142   slice->d_nz  = d_nz;
143   slice->d_nnz = (PetscInt*)d_nnz;
144   slice->o_nz  = o_nz;
145   slice->o_nnz = (PetscInt*)o_nnz;
146   PetscFunctionReturn(0);
147 }
148 
149 static PetscErrorCode DMSlicedSetBlockFills_Private(PetscInt bs,const PetscInt *fill,DMSlicedBlockFills **inf)
150 {
151   PetscInt           i,j,nz,*fi,*fj;
152   DMSlicedBlockFills *f;
153 
154   PetscFunctionBegin;
155   PetscValidPointer(inf,3);
156   if (*inf) PetscCall(PetscFree3(*inf,(*inf)->i,(*inf)->j));
157   if (!fill) PetscFunctionReturn(0);
158   for (i=0,nz=0; i<bs*bs; i++) if (fill[i]) nz++;
159   PetscCall(PetscMalloc3(1,&f,bs+1,&fi,nz,&fj));
160   f->bs = bs;
161   f->nz = nz;
162   f->i  = fi;
163   f->j  = fj;
164   for (i=0,nz=0; i<bs; i++) {
165     fi[i] = nz;
166     for (j=0; j<bs; j++) if (fill[i*bs+j]) fj[nz++] = j;
167   }
168   fi[i] = nz;
169   *inf  = f;
170   PetscFunctionReturn(0);
171 }
172 
173 /*@C
174     DMSlicedSetBlockFills - Sets the fill pattern in each block for a multi-component problem
175     of the matrix returned by DMSlicedGetMatrix().
176 
177     Logically Collective on dm
178 
179     Input Parameters:
180 +   sliced - the DM object
181 .   dfill - the fill pattern in the diagonal block (may be NULL, means use dense block)
182 -   ofill - the fill pattern in the off-diagonal blocks
183 
184     Notes:
185     This only makes sense for multicomponent problems using scalar matrix formats (AIJ).
186     See DMDASetBlockFills() for example usage.
187 
188     Level: advanced
189 
190 .seealso DMSlicedGetMatrix(), DMDASetBlockFills()
191 @*/
192 PetscErrorCode  DMSlicedSetBlockFills(DM dm,const PetscInt *dfill,const PetscInt *ofill)
193 {
194   DM_Sliced      *slice = (DM_Sliced*)dm->data;
195 
196   PetscFunctionBegin;
197   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
198   PetscCall(DMSlicedSetBlockFills_Private(slice->bs,dfill,&slice->dfill));
199   PetscCall(DMSlicedSetBlockFills_Private(slice->bs,ofill,&slice->ofill));
200   PetscFunctionReturn(0);
201 }
202 
203 static PetscErrorCode  DMDestroy_Sliced(DM dm)
204 {
205   DM_Sliced      *slice = (DM_Sliced*)dm->data;
206 
207   PetscFunctionBegin;
208   PetscCall(PetscFree(slice->ghosts));
209   if (slice->dfill) PetscCall(PetscFree3(slice->dfill,slice->dfill->i,slice->dfill->j));
210   if (slice->ofill) PetscCall(PetscFree3(slice->ofill,slice->ofill->i,slice->ofill->j));
211   /* This was originally freed in DMDestroy(), but that prevents reference counting of backend objects */
212   PetscCall(PetscFree(slice));
213   PetscFunctionReturn(0);
214 }
215 
216 static PetscErrorCode  DMCreateGlobalVector_Sliced(DM dm,Vec *gvec)
217 {
218   DM_Sliced      *slice = (DM_Sliced*)dm->data;
219 
220   PetscFunctionBegin;
221   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
222   PetscValidPointer(gvec,2);
223   *gvec = NULL;
224   PetscCall(VecCreateGhostBlock(PetscObjectComm((PetscObject)dm),slice->bs,slice->n*slice->bs,PETSC_DETERMINE,slice->Nghosts,slice->ghosts,gvec));
225   PetscCall(VecSetDM(*gvec,dm));
226   PetscFunctionReturn(0);
227 }
228 
229 static PetscErrorCode  DMGlobalToLocalBegin_Sliced(DM da,Vec g,InsertMode mode,Vec l)
230 {
231   PetscBool      flg;
232 
233   PetscFunctionBegin;
234   PetscCall(VecGhostIsLocalForm(g,l,&flg));
235   PetscCheck(flg,PetscObjectComm((PetscObject)da),PETSC_ERR_ARG_WRONG,"Local vector is not local form of global vector");
236   PetscCall(VecGhostUpdateEnd(g,mode,SCATTER_FORWARD));
237   PetscCall(VecGhostUpdateBegin(g,mode,SCATTER_FORWARD));
238   PetscFunctionReturn(0);
239 }
240 
241 static PetscErrorCode  DMGlobalToLocalEnd_Sliced(DM da,Vec g,InsertMode mode,Vec l)
242 {
243   PetscBool      flg;
244 
245   PetscFunctionBegin;
246   PetscCall(VecGhostIsLocalForm(g,l,&flg));
247   PetscCheck(flg,PetscObjectComm((PetscObject)da),PETSC_ERR_ARG_WRONG,"Local vector is not local form of global vector");
248   PetscCall(VecGhostUpdateEnd(g,mode,SCATTER_FORWARD));
249   PetscFunctionReturn(0);
250 }
251 
252 /*MC
253    DMSLICED = "sliced" - A DM object that is used to manage data for a general graph. Uses VecCreateGhost() ghosted vectors for storing the fields
254 
255    See DMCreateSliced() for details.
256 
257   Level: intermediate
258 
259 .seealso: DMType, DMCOMPOSITE, DMCreateSliced(), DMCreate()
260 M*/
261 
262 PETSC_EXTERN PetscErrorCode DMCreate_Sliced(DM p)
263 {
264   DM_Sliced      *slice;
265 
266   PetscFunctionBegin;
267   PetscCall(PetscNewLog(p,&slice));
268   p->data = slice;
269 
270   p->ops->createglobalvector = DMCreateGlobalVector_Sliced;
271   p->ops->creatematrix       = DMCreateMatrix_Sliced;
272   p->ops->globaltolocalbegin = DMGlobalToLocalBegin_Sliced;
273   p->ops->globaltolocalend   = DMGlobalToLocalEnd_Sliced;
274   p->ops->destroy            = DMDestroy_Sliced;
275   PetscFunctionReturn(0);
276 }
277 
278 /*@C
279     DMSlicedCreate - Creates a DM object, used to manage data for a unstructured problem
280 
281     Collective
282 
283     Input Parameters:
284 +   comm - the processors that will share the global vector
285 .   bs - the block size
286 .   nlocal - number of vector entries on this process
287 .   Nghosts - number of ghost points needed on this process
288 .   ghosts - global indices of all ghost points for this process
289 .   d_nnz - matrix preallocation information representing coupling within this process
290 -   o_nnz - matrix preallocation information representing coupling between this process and other processes
291 
292     Output Parameters:
293 .   slice - the slice object
294 
295     Notes:
296         This DM does not support DMCreateLocalVector(), DMGlobalToLocalBegin(), and DMGlobalToLocalEnd() instead one directly uses
297         VecGhostGetLocalForm() and VecGhostRestoreLocalForm() to access the local representation and VecGhostUpdateBegin() and VecGhostUpdateEnd() to update
298         the ghost points.
299 
300         One can use DMGlobalToLocalBegin(), and DMGlobalToLocalEnd() instead of VecGhostUpdateBegin() and VecGhostUpdateEnd().
301 
302     Level: advanced
303 
304 .seealso DMDestroy(), DMCreateGlobalVector(), DMSetType(), DMSLICED, DMSlicedSetGhosts(), DMSlicedSetPreallocation(), VecGhostUpdateBegin(), VecGhostUpdateEnd(),
305          VecGhostGetLocalForm(), VecGhostRestoreLocalForm()
306 
307 @*/
308 PetscErrorCode  DMSlicedCreate(MPI_Comm comm,PetscInt bs,PetscInt nlocal,PetscInt Nghosts,const PetscInt ghosts[], const PetscInt d_nnz[],const PetscInt o_nnz[],DM *dm)
309 {
310   PetscFunctionBegin;
311   PetscValidPointer(dm,8);
312   PetscCall(DMCreate(comm,dm));
313   PetscCall(DMSetType(*dm,DMSLICED));
314   PetscCall(DMSlicedSetGhosts(*dm,bs,nlocal,Nghosts,ghosts));
315   if (d_nnz) {
316     PetscCall(DMSlicedSetPreallocation(*dm,0, d_nnz,0,o_nnz));
317   }
318   PetscFunctionReturn(0);
319 }
320