xref: /petsc/src/dm/impls/sliced/sliced.c (revision 2205254efee3a00a594e5e2a3a70f74dcb40bc03) !
1 #include <petscdmsliced.h>      /*I      "petscdmsliced.h" I*/
2 #include <petscmat.h>
3 #include <petsc-private/dmimpl.h>
4 
5 /* CSR storage of the nonzero structure of a bs*bs matrix */
6 typedef struct {
7   PetscInt bs,nz,*i,*j;
8 } DMSlicedBlockFills;
9 
10 typedef struct  {
11   PetscInt           bs,n,N,Nghosts,*ghosts;
12   PetscInt           d_nz,o_nz,*d_nnz,*o_nnz;
13   DMSlicedBlockFills *dfill,*ofill;
14 } DM_Sliced;
15 
16 #undef __FUNCT__
17 #define __FUNCT__ "DMCreateMatrix_Sliced"
18 PetscErrorCode  DMCreateMatrix_Sliced(DM dm, MatType mtype,Mat *J)
19 {
20   PetscErrorCode         ierr;
21   PetscInt               *globals,*sd_nnz,*so_nnz,rstart,bs,i;
22   ISLocalToGlobalMapping lmap,blmap;
23   void                   (*aij)(void) = PETSC_NULL;
24   DM_Sliced              *slice = (DM_Sliced*)dm->data;
25 
26   PetscFunctionBegin;
27   bs   = slice->bs;
28   ierr = MatCreate(((PetscObject)dm)->comm,J);CHKERRQ(ierr);
29   ierr = MatSetSizes(*J,slice->n*bs,slice->n*bs,PETSC_DETERMINE,PETSC_DETERMINE);CHKERRQ(ierr);
30   ierr = MatSetBlockSize(*J,bs);CHKERRQ(ierr);
31   ierr = MatSetType(*J,mtype);CHKERRQ(ierr);
32   ierr = MatSeqBAIJSetPreallocation(*J,bs,slice->d_nz,slice->d_nnz);CHKERRQ(ierr);
33   ierr = MatMPIBAIJSetPreallocation(*J,bs,slice->d_nz,slice->d_nnz,slice->o_nz,slice->o_nnz);CHKERRQ(ierr);
34   /* In general, we have to do extra work to preallocate for scalar (AIJ) matrices so we check whether it will do any
35   * good before going on with it. */
36   ierr = PetscObjectQueryFunction((PetscObject)*J,"MatMPIAIJSetPreallocation_C",&aij);CHKERRQ(ierr);
37   if (!aij) {
38     ierr = PetscObjectQueryFunction((PetscObject)*J,"MatSeqAIJSetPreallocation_C",&aij);CHKERRQ(ierr);
39   }
40   if (aij) {
41     if (bs == 1) {
42       ierr = MatSeqAIJSetPreallocation(*J,slice->d_nz,slice->d_nnz);CHKERRQ(ierr);
43       ierr = MatMPIAIJSetPreallocation(*J,slice->d_nz,slice->d_nnz,slice->o_nz,slice->o_nnz);CHKERRQ(ierr);
44     } else if (!slice->d_nnz) {
45       ierr = MatSeqAIJSetPreallocation(*J,slice->d_nz*bs,PETSC_NULL);CHKERRQ(ierr);
46       ierr = MatMPIAIJSetPreallocation(*J,slice->d_nz*bs,PETSC_NULL,slice->o_nz*bs,PETSC_NULL);CHKERRQ(ierr);
47     } else {
48       /* The user has provided preallocation per block-row, convert it to per scalar-row respecting DMSlicedSetBlockFills() if applicable */
49       ierr = PetscMalloc2(slice->n*bs,PetscInt,&sd_nnz,(!!slice->o_nnz)*slice->n*bs,PetscInt,&so_nnz);CHKERRQ(ierr);
50       for (i=0; i<slice->n*bs; i++) {
51         sd_nnz[i] = (slice->d_nnz[i/bs]-1) * (slice->ofill ? slice->ofill->i[i%bs+1]-slice->ofill->i[i%bs] : bs)
52                                            + (slice->dfill ? slice->dfill->i[i%bs+1]-slice->dfill->i[i%bs] : bs);
53         if (so_nnz) {
54           so_nnz[i] = slice->o_nnz[i/bs] * (slice->ofill ? slice->ofill->i[i%bs+1]-slice->ofill->i[i%bs] : bs);
55         }
56       }
57       ierr = MatSeqAIJSetPreallocation(*J,slice->d_nz*bs,sd_nnz);CHKERRQ(ierr);
58       ierr = MatMPIAIJSetPreallocation(*J,slice->d_nz*bs,sd_nnz,slice->o_nz*bs,so_nnz);CHKERRQ(ierr);
59       ierr = PetscFree2(sd_nnz,so_nnz);CHKERRQ(ierr);
60     }
61   }
62 
63   /* Set up the local to global map.  For the scalar map, we have to translate to entry-wise indexing instead of block-wise. */
64   ierr = PetscMalloc((slice->n+slice->Nghosts)*bs*sizeof(PetscInt),&globals);CHKERRQ(ierr);
65   ierr = MatGetOwnershipRange(*J,&rstart,PETSC_NULL);CHKERRQ(ierr);
66   for (i=0; i<slice->n*bs; i++) globals[i] = rstart + i;
67 
68   for (i=0; i<slice->Nghosts*bs; i++) {
69     globals[slice->n*bs+i] = slice->ghosts[i/bs]*bs + i%bs;
70   }
71   ierr = ISLocalToGlobalMappingCreate(PETSC_COMM_SELF,(slice->n+slice->Nghosts)*bs,globals,PETSC_OWN_POINTER,&lmap);CHKERRQ(ierr);
72   ierr = ISLocalToGlobalMappingBlock(lmap,bs,&blmap);CHKERRQ(ierr);
73   ierr = MatSetLocalToGlobalMapping(*J,lmap,lmap);CHKERRQ(ierr);
74   ierr = MatSetLocalToGlobalMappingBlock(*J,blmap,blmap);CHKERRQ(ierr);
75   ierr = ISLocalToGlobalMappingDestroy(&lmap);CHKERRQ(ierr);
76   ierr = ISLocalToGlobalMappingDestroy(&blmap);CHKERRQ(ierr);
77   PetscFunctionReturn(0);
78 }
79 
80 #undef __FUNCT__
81 #define __FUNCT__ "DMSlicedSetGhosts"
82 /*@C
83     DMSlicedSetGhosts - Sets the global indices of other processes elements that will
84       be ghosts on this process
85 
86     Not Collective
87 
88     Input Parameters:
89 +    slice - the DM object
90 .    bs - block size
91 .    nlocal - number of local (owned, non-ghost) blocks
92 .    Nghosts - number of ghost blocks on this process
93 -    ghosts - global indices of each ghost block
94 
95     Level: advanced
96 
97 .seealso DMDestroy(), DMCreateGlobalVector()
98 
99 @*/
100 PetscErrorCode  DMSlicedSetGhosts(DM dm,PetscInt bs,PetscInt nlocal,PetscInt Nghosts,const PetscInt ghosts[])
101 {
102   PetscErrorCode ierr;
103   DM_Sliced      *slice = (DM_Sliced*)dm->data;
104 
105   PetscFunctionBegin;
106   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
107   ierr           = PetscFree(slice->ghosts);CHKERRQ(ierr);
108   ierr           = PetscMalloc(Nghosts*sizeof(PetscInt),&slice->ghosts);CHKERRQ(ierr);
109   ierr           = PetscMemcpy(slice->ghosts,ghosts,Nghosts*sizeof(PetscInt));CHKERRQ(ierr);
110   slice->bs      = bs;
111   slice->n       = nlocal;
112   slice->Nghosts = Nghosts;
113   PetscFunctionReturn(0);
114 }
115 
116 #undef __FUNCT__
117 #define __FUNCT__ "DMSlicedSetPreallocation"
118 /*@C
119     DMSlicedSetPreallocation - sets the matrix memory preallocation for matrices computed by DMSliced
120 
121     Not Collective
122 
123     Input Parameters:
124 +    slice - the DM object
125 .    d_nz  - number of block nonzeros per block row in diagonal portion of local
126            submatrix  (same for all local rows)
127 .    d_nnz - array containing the number of block nonzeros in the various block rows
128            of the in diagonal portion of the local (possibly different for each block
129            row) or PETSC_NULL.
130 .    o_nz  - number of block nonzeros per block row in the off-diagonal portion of local
131            submatrix (same for all local rows).
132 -    o_nnz - array containing the number of nonzeros in the various block rows of the
133            off-diagonal portion of the local submatrix (possibly different for
134            each block row) or PETSC_NULL.
135 
136     Notes:
137     See MatMPIBAIJSetPreallocation() for more details on preallocation.  If a scalar matrix (AIJ) is
138     obtained with DMSlicedGetMatrix(), the correct preallocation will be set, respecting DMSlicedSetBlockFills().
139 
140     Level: advanced
141 
142 .seealso DMDestroy(), DMCreateGlobalVector(), MatMPIAIJSetPreallocation(),
143          MatMPIBAIJSetPreallocation(), DMSlicedGetMatrix(), DMSlicedSetBlockFills()
144 
145 @*/
146 PetscErrorCode  DMSlicedSetPreallocation(DM dm,PetscInt d_nz,const PetscInt d_nnz[],PetscInt o_nz,const PetscInt o_nnz[])
147 {
148   DM_Sliced *slice = (DM_Sliced*)dm->data;
149 
150   PetscFunctionBegin;
151   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
152   slice->d_nz  = d_nz;
153   slice->d_nnz = (PetscInt*)d_nnz;
154   slice->o_nz  = o_nz;
155   slice->o_nnz = (PetscInt*)o_nnz;
156   PetscFunctionReturn(0);
157 }
158 
159 #undef __FUNCT__
160 #define __FUNCT__ "DMSlicedSetBlockFills_Private"
161 static PetscErrorCode DMSlicedSetBlockFills_Private(PetscInt bs,const PetscInt *fill,DMSlicedBlockFills **inf)
162 {
163   PetscErrorCode     ierr;
164   PetscInt           i,j,nz,*fi,*fj;
165   DMSlicedBlockFills *f;
166 
167   PetscFunctionBegin;
168   PetscValidPointer(inf,3);
169   if (*inf) {ierr = PetscFree3((*inf)->i,(*inf)->j,*inf);CHKERRQ(ierr);}
170   if (!fill) PetscFunctionReturn(0);
171   for (i=0,nz=0; i<bs*bs; i++) if (fill[i]) nz++;
172   ierr  = PetscMalloc3(1,DMSlicedBlockFills,&f,bs+1,PetscInt,&fi,nz,PetscInt,&fj);CHKERRQ(ierr);
173   f->bs = bs;
174   f->nz = nz;
175   f->i  = fi;
176   f->j  = fj;
177   for (i=0,nz=0; i<bs; i++) {
178     fi[i] = nz;
179     for (j=0; j<bs; j++) if (fill[i*bs+j]) fj[nz++] = j;
180   }
181   fi[i] = nz;
182   *inf  = f;
183   PetscFunctionReturn(0);
184 }
185 
186 #undef __FUNCT__
187 #define __FUNCT__ "DMSlicedSetBlockFills"
188 /*@C
189     DMSlicedSetBlockFills - Sets the fill pattern in each block for a multi-component problem
190     of the matrix returned by DMSlicedGetMatrix().
191 
192     Logically Collective on DM
193 
194     Input Parameter:
195 +   sliced - the DM object
196 .   dfill - the fill pattern in the diagonal block (may be PETSC_NULL, means use dense block)
197 -   ofill - the fill pattern in the off-diagonal blocks
198 
199     Notes:
200     This only makes sense for multicomponent problems using scalar matrix formats (AIJ).
201     See DMDASetBlockFills() for example usage.
202 
203     Level: advanced
204 
205 .seealso DMSlicedGetMatrix(), DMDASetBlockFills()
206 @*/
207 PetscErrorCode  DMSlicedSetBlockFills(DM dm,const PetscInt *dfill,const PetscInt *ofill)
208 {
209   DM_Sliced      *slice = (DM_Sliced*)dm->data;
210   PetscErrorCode ierr;
211 
212   PetscFunctionBegin;
213   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
214   ierr = DMSlicedSetBlockFills_Private(slice->bs,dfill,&slice->dfill);CHKERRQ(ierr);
215   ierr = DMSlicedSetBlockFills_Private(slice->bs,ofill,&slice->ofill);CHKERRQ(ierr);
216   PetscFunctionReturn(0);
217 }
218 
219 #undef __FUNCT__
220 #define __FUNCT__ "DMDestroy_Sliced"
221 static PetscErrorCode  DMDestroy_Sliced(DM dm)
222 {
223   PetscErrorCode ierr;
224   DM_Sliced      *slice = (DM_Sliced*)dm->data;
225 
226   PetscFunctionBegin;
227   ierr = PetscFree(slice->ghosts);CHKERRQ(ierr);
228   if (slice->dfill) {ierr = PetscFree3(slice->dfill,slice->dfill->i,slice->dfill->j);CHKERRQ(ierr);}
229   if (slice->ofill) {ierr = PetscFree3(slice->ofill,slice->ofill->i,slice->ofill->j);CHKERRQ(ierr);}
230   /* This was originally freed in DMDestroy(), but that prevents reference counting of backend objects */
231   ierr = PetscFree(slice);CHKERRQ(ierr);
232   PetscFunctionReturn(0);
233 }
234 
235 #undef __FUNCT__
236 #define __FUNCT__ "DMCreateGlobalVector_Sliced"
237 static PetscErrorCode  DMCreateGlobalVector_Sliced(DM dm,Vec *gvec)
238 {
239   PetscErrorCode ierr;
240   DM_Sliced      *slice = (DM_Sliced*)dm->data;
241 
242   PetscFunctionBegin;
243   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
244   PetscValidPointer(gvec,2);
245   *gvec = 0;
246   ierr  = VecCreateGhostBlock(((PetscObject)dm)->comm,slice->bs,slice->n*slice->bs,PETSC_DETERMINE,slice->Nghosts,slice->ghosts,gvec);CHKERRQ(ierr);
247   ierr  = VecSetDM(*gvec,dm);CHKERRQ(ierr);
248   PetscFunctionReturn(0);
249 }
250 
251 #undef __FUNCT__
252 #define __FUNCT__ "DMGlobalToLocalBegin_Sliced"
253 static PetscErrorCode  DMGlobalToLocalBegin_Sliced(DM da,Vec g,InsertMode mode,Vec l)
254 {
255   PetscErrorCode ierr;
256   PetscBool      flg;
257 
258   PetscFunctionBegin;
259   ierr = VecGhostIsLocalForm(g,l,&flg);CHKERRQ(ierr);
260   if (!flg) SETERRQ(((PetscObject)da)->comm,PETSC_ERR_ARG_WRONG,"Local vector is not local form of global vector");
261   ierr = VecGhostUpdateEnd(g,mode,SCATTER_FORWARD);CHKERRQ(ierr);
262   ierr = VecGhostUpdateBegin(g,mode,SCATTER_FORWARD);CHKERRQ(ierr);
263   PetscFunctionReturn(0);
264 }
265 
266 #undef __FUNCT__
267 #define __FUNCT__ "DMGlobalToLocalEnd_Sliced"
268 static PetscErrorCode  DMGlobalToLocalEnd_Sliced(DM da,Vec g,InsertMode mode,Vec l)
269 {
270   PetscErrorCode ierr;
271   PetscBool      flg;
272 
273   PetscFunctionBegin;
274   ierr = VecGhostIsLocalForm(g,l,&flg);CHKERRQ(ierr);
275   if (!flg) SETERRQ(((PetscObject)da)->comm,PETSC_ERR_ARG_WRONG,"Local vector is not local form of global vector");
276   ierr = VecGhostUpdateEnd(g,mode,SCATTER_FORWARD);CHKERRQ(ierr);
277   PetscFunctionReturn(0);
278 }
279 
280 /*MC
281    DMSLICED = "sliced" - A DM object that is used to manage data for a general graph. Uses VecCreateGhost() ghosted vectors for storing the fields
282 
283    See DMCreateSliced() for details.
284 
285   Level: intermediate
286 
287 .seealso: DMType, DMCOMPOSITE, DMCreateSliced(), DMCreate()
288 M*/
289 
290 EXTERN_C_BEGIN
291 #undef __FUNCT__
292 #define __FUNCT__ "DMCreate_Sliced"
293 PetscErrorCode  DMCreate_Sliced(DM p)
294 {
295   PetscErrorCode ierr;
296   DM_Sliced      *slice;
297 
298   PetscFunctionBegin;
299   ierr    = PetscNewLog(p,DM_Sliced,&slice);CHKERRQ(ierr);
300   p->data = slice;
301 
302   ierr = PetscObjectChangeTypeName((PetscObject)p,DMSLICED);CHKERRQ(ierr);
303 
304   p->ops->createglobalvector = DMCreateGlobalVector_Sliced;
305   p->ops->creatematrix       = DMCreateMatrix_Sliced;
306   p->ops->globaltolocalbegin = DMGlobalToLocalBegin_Sliced;
307   p->ops->globaltolocalend   = DMGlobalToLocalEnd_Sliced;
308   p->ops->destroy            = DMDestroy_Sliced;
309   PetscFunctionReturn(0);
310 }
311 EXTERN_C_END
312 
313 #undef __FUNCT__
314 #define __FUNCT__ "DMSlicedCreate"
315 /*@C
316     DMSlicedCreate - Creates a DM object, used to manage data for a unstructured problem
317 
318     Collective on MPI_Comm
319 
320     Input Parameter:
321 +   comm - the processors that will share the global vector
322 .   bs - the block size
323 .   nlocal - number of vector entries on this process
324 .   Nghosts - number of ghost points needed on this process
325 .   ghosts - global indices of all ghost points for this process
326 .   d_nnz - matrix preallocation information representing coupling within this process
327 -   o_nnz - matrix preallocation information representing coupling between this process and other processes
328 
329     Output Parameters:
330 .   slice - the slice object
331 
332     Notes:
333         This DM does not support DMCreateLocalVector(), DMGlobalToLocalBegin(), and DMGlobalToLocalEnd() instead one directly uses
334         VecGhostGetLocalForm() and VecGhostRestoreLocalForm() to access the local representation and VecGhostUpdateBegin() and VecGhostUpdateEnd() to update
335         the ghost points.
336 
337         One can use DMGlobalToLocalBegin(), and DMGlobalToLocalEnd() instead of VecGhostUpdateBegin() and VecGhostUpdateEnd().
338 
339     Level: advanced
340 
341 .seealso DMDestroy(), DMCreateGlobalVector(), DMSetType(), DMSLICED, DMSlicedSetGhosts(), DMSlicedSetPreallocation(), VecGhostUpdateBegin(), VecGhostUpdateEnd(),
342          VecGhostGetLocalForm(), VecGhostRestoreLocalForm()
343 
344 @*/
345 PetscErrorCode  DMSlicedCreate(MPI_Comm comm,PetscInt bs,PetscInt nlocal,PetscInt Nghosts,const PetscInt ghosts[], const PetscInt d_nnz[],const PetscInt o_nnz[],DM *dm)
346 {
347   PetscErrorCode ierr;
348 
349   PetscFunctionBegin;
350   PetscValidPointer(dm,2);
351   ierr = DMCreate(comm,dm);CHKERRQ(ierr);
352   ierr = DMSetType(*dm,DMSLICED);CHKERRQ(ierr);
353   ierr = DMSlicedSetGhosts(*dm,bs,nlocal,Nghosts,ghosts);CHKERRQ(ierr);
354   if (d_nnz) {
355     ierr = DMSlicedSetPreallocation(*dm,0, d_nnz,0,o_nnz);CHKERRQ(ierr);
356   }
357   PetscFunctionReturn(0);
358 }
359 
360