xref: /petsc/src/dm/impls/sliced/sliced.c (revision 030f984af8d8bb4c203755d35bded3c05b3d83ce)
1 #include <petscdmsliced.h>      /*I      "petscdmsliced.h" I*/
2 #include <petscmat.h>
3 #include <petsc/private/dmimpl.h>
4 
5 /* CSR storage of the nonzero structure of a bs*bs matrix */
6 typedef struct {
7   PetscInt bs,nz,*i,*j;
8 } DMSlicedBlockFills;
9 
10 typedef struct  {
11   PetscInt           bs,n,N,Nghosts,*ghosts;
12   PetscInt           d_nz,o_nz,*d_nnz,*o_nnz;
13   DMSlicedBlockFills *dfill,*ofill;
14 } DM_Sliced;
15 
16 PetscErrorCode  DMCreateMatrix_Sliced(DM dm, Mat *J)
17 {
18   PetscErrorCode         ierr;
19   PetscInt               *globals,*sd_nnz,*so_nnz,rstart,bs,i;
20   ISLocalToGlobalMapping lmap;
21   void                   (*aij)(void) = NULL;
22   DM_Sliced              *slice = (DM_Sliced*)dm->data;
23 
24   PetscFunctionBegin;
25   bs   = slice->bs;
26   ierr = MatCreate(PetscObjectComm((PetscObject)dm),J);CHKERRQ(ierr);
27   ierr = MatSetSizes(*J,slice->n*bs,slice->n*bs,PETSC_DETERMINE,PETSC_DETERMINE);CHKERRQ(ierr);
28   ierr = MatSetBlockSize(*J,bs);CHKERRQ(ierr);
29   ierr = MatSetType(*J,dm->mattype);CHKERRQ(ierr);
30   ierr = MatSeqBAIJSetPreallocation(*J,bs,slice->d_nz,slice->d_nnz);CHKERRQ(ierr);
31   ierr = MatMPIBAIJSetPreallocation(*J,bs,slice->d_nz,slice->d_nnz,slice->o_nz,slice->o_nnz);CHKERRQ(ierr);
32   /* In general, we have to do extra work to preallocate for scalar (AIJ) matrices so we check whether it will do any
33   * good before going on with it. */
34   ierr = PetscObjectQueryFunction((PetscObject)*J,"MatMPIAIJSetPreallocation_C",&aij);CHKERRQ(ierr);
35   if (!aij) {
36     ierr = PetscObjectQueryFunction((PetscObject)*J,"MatSeqAIJSetPreallocation_C",&aij);CHKERRQ(ierr);
37   }
38   if (aij) {
39     if (bs == 1) {
40       ierr = MatSeqAIJSetPreallocation(*J,slice->d_nz,slice->d_nnz);CHKERRQ(ierr);
41       ierr = MatMPIAIJSetPreallocation(*J,slice->d_nz,slice->d_nnz,slice->o_nz,slice->o_nnz);CHKERRQ(ierr);
42     } else if (!slice->d_nnz) {
43       ierr = MatSeqAIJSetPreallocation(*J,slice->d_nz*bs,NULL);CHKERRQ(ierr);
44       ierr = MatMPIAIJSetPreallocation(*J,slice->d_nz*bs,NULL,slice->o_nz*bs,NULL);CHKERRQ(ierr);
45     } else {
46       /* The user has provided preallocation per block-row, convert it to per scalar-row respecting DMSlicedSetBlockFills() if applicable */
47       ierr = PetscMalloc2(slice->n*bs,&sd_nnz,(!!slice->o_nnz)*slice->n*bs,&so_nnz);CHKERRQ(ierr);
48       for (i=0; i<slice->n*bs; i++) {
49         sd_nnz[i] = (slice->d_nnz[i/bs]-1) * (slice->ofill ? slice->ofill->i[i%bs+1]-slice->ofill->i[i%bs] : bs)
50                                            + (slice->dfill ? slice->dfill->i[i%bs+1]-slice->dfill->i[i%bs] : bs);
51         if (so_nnz) {
52           so_nnz[i] = slice->o_nnz[i/bs] * (slice->ofill ? slice->ofill->i[i%bs+1]-slice->ofill->i[i%bs] : bs);
53         }
54       }
55       ierr = MatSeqAIJSetPreallocation(*J,slice->d_nz*bs,sd_nnz);CHKERRQ(ierr);
56       ierr = MatMPIAIJSetPreallocation(*J,slice->d_nz*bs,sd_nnz,slice->o_nz*bs,so_nnz);CHKERRQ(ierr);
57       ierr = PetscFree2(sd_nnz,so_nnz);CHKERRQ(ierr);
58     }
59   }
60 
61   /* Set up the local to global map.  For the scalar map, we have to translate to entry-wise indexing instead of block-wise. */
62   ierr = PetscMalloc1(slice->n+slice->Nghosts,&globals);CHKERRQ(ierr);
63   ierr = MatGetOwnershipRange(*J,&rstart,NULL);CHKERRQ(ierr);
64   for (i=0; i<slice->n; i++) globals[i] = rstart/bs + i;
65 
66   for (i=0; i<slice->Nghosts; i++) {
67     globals[slice->n+i] = slice->ghosts[i];
68   }
69   ierr = ISLocalToGlobalMappingCreate(PETSC_COMM_SELF,bs,slice->n+slice->Nghosts,globals,PETSC_OWN_POINTER,&lmap);CHKERRQ(ierr);
70   ierr = MatSetLocalToGlobalMapping(*J,lmap,lmap);CHKERRQ(ierr);
71   ierr = ISLocalToGlobalMappingDestroy(&lmap);CHKERRQ(ierr);
72   ierr = MatSetDM(*J,dm);CHKERRQ(ierr);
73   PetscFunctionReturn(0);
74 }
75 
76 /*@C
77     DMSlicedSetGhosts - Sets the global indices of other processes elements that will
78       be ghosts on this process
79 
80     Not Collective
81 
82     Input Parameters:
83 +    slice - the DM object
84 .    bs - block size
85 .    nlocal - number of local (owned, non-ghost) blocks
86 .    Nghosts - number of ghost blocks on this process
87 -    ghosts - global indices of each ghost block
88 
89     Level: advanced
90 
91 .seealso DMDestroy(), DMCreateGlobalVector()
92 
93 @*/
94 PetscErrorCode  DMSlicedSetGhosts(DM dm,PetscInt bs,PetscInt nlocal,PetscInt Nghosts,const PetscInt ghosts[])
95 {
96   PetscErrorCode ierr;
97   DM_Sliced      *slice = (DM_Sliced*)dm->data;
98 
99   PetscFunctionBegin;
100   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
101   ierr           = PetscFree(slice->ghosts);CHKERRQ(ierr);
102   ierr           = PetscMalloc1(Nghosts,&slice->ghosts);CHKERRQ(ierr);
103   ierr           = PetscArraycpy(slice->ghosts,ghosts,Nghosts);CHKERRQ(ierr);
104   slice->bs      = bs;
105   slice->n       = nlocal;
106   slice->Nghosts = Nghosts;
107   PetscFunctionReturn(0);
108 }
109 
110 /*@C
111     DMSlicedSetPreallocation - sets the matrix memory preallocation for matrices computed by DMSliced
112 
113     Not Collective
114 
115     Input Parameters:
116 +    slice - the DM object
117 .    d_nz  - number of block nonzeros per block row in diagonal portion of local
118            submatrix  (same for all local rows)
119 .    d_nnz - array containing the number of block nonzeros in the various block rows
120            of the in diagonal portion of the local (possibly different for each block
121            row) or NULL.
122 .    o_nz  - number of block nonzeros per block row in the off-diagonal portion of local
123            submatrix (same for all local rows).
124 -    o_nnz - array containing the number of nonzeros in the various block rows of the
125            off-diagonal portion of the local submatrix (possibly different for
126            each block row) or NULL.
127 
128     Notes:
129     See MatMPIBAIJSetPreallocation() for more details on preallocation.  If a scalar matrix (AIJ) is
130     obtained with DMSlicedGetMatrix(), the correct preallocation will be set, respecting DMSlicedSetBlockFills().
131 
132     Level: advanced
133 
134 .seealso DMDestroy(), DMCreateGlobalVector(), MatMPIAIJSetPreallocation(),
135          MatMPIBAIJSetPreallocation(), DMSlicedGetMatrix(), DMSlicedSetBlockFills()
136 
137 @*/
138 PetscErrorCode  DMSlicedSetPreallocation(DM dm,PetscInt d_nz,const PetscInt d_nnz[],PetscInt o_nz,const PetscInt o_nnz[])
139 {
140   DM_Sliced *slice = (DM_Sliced*)dm->data;
141 
142   PetscFunctionBegin;
143   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
144   slice->d_nz  = d_nz;
145   slice->d_nnz = (PetscInt*)d_nnz;
146   slice->o_nz  = o_nz;
147   slice->o_nnz = (PetscInt*)o_nnz;
148   PetscFunctionReturn(0);
149 }
150 
151 static PetscErrorCode DMSlicedSetBlockFills_Private(PetscInt bs,const PetscInt *fill,DMSlicedBlockFills **inf)
152 {
153   PetscErrorCode     ierr;
154   PetscInt           i,j,nz,*fi,*fj;
155   DMSlicedBlockFills *f;
156 
157   PetscFunctionBegin;
158   PetscValidPointer(inf,3);
159   if (*inf) {ierr = PetscFree3(*inf,(*inf)->i,(*inf)->j);CHKERRQ(ierr);}
160   if (!fill) PetscFunctionReturn(0);
161   for (i=0,nz=0; i<bs*bs; i++) if (fill[i]) nz++;
162   ierr  = PetscMalloc3(1,&f,bs+1,&fi,nz,&fj);CHKERRQ(ierr);
163   f->bs = bs;
164   f->nz = nz;
165   f->i  = fi;
166   f->j  = fj;
167   for (i=0,nz=0; i<bs; i++) {
168     fi[i] = nz;
169     for (j=0; j<bs; j++) if (fill[i*bs+j]) fj[nz++] = j;
170   }
171   fi[i] = nz;
172   *inf  = f;
173   PetscFunctionReturn(0);
174 }
175 
176 /*@C
177     DMSlicedSetBlockFills - Sets the fill pattern in each block for a multi-component problem
178     of the matrix returned by DMSlicedGetMatrix().
179 
180     Logically Collective on dm
181 
182     Input Parameter:
183 +   sliced - the DM object
184 .   dfill - the fill pattern in the diagonal block (may be NULL, means use dense block)
185 -   ofill - the fill pattern in the off-diagonal blocks
186 
187     Notes:
188     This only makes sense for multicomponent problems using scalar matrix formats (AIJ).
189     See DMDASetBlockFills() for example usage.
190 
191     Level: advanced
192 
193 .seealso DMSlicedGetMatrix(), DMDASetBlockFills()
194 @*/
195 PetscErrorCode  DMSlicedSetBlockFills(DM dm,const PetscInt *dfill,const PetscInt *ofill)
196 {
197   DM_Sliced      *slice = (DM_Sliced*)dm->data;
198   PetscErrorCode ierr;
199 
200   PetscFunctionBegin;
201   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
202   ierr = DMSlicedSetBlockFills_Private(slice->bs,dfill,&slice->dfill);CHKERRQ(ierr);
203   ierr = DMSlicedSetBlockFills_Private(slice->bs,ofill,&slice->ofill);CHKERRQ(ierr);
204   PetscFunctionReturn(0);
205 }
206 
207 static PetscErrorCode  DMDestroy_Sliced(DM dm)
208 {
209   PetscErrorCode ierr;
210   DM_Sliced      *slice = (DM_Sliced*)dm->data;
211 
212   PetscFunctionBegin;
213   ierr = PetscFree(slice->ghosts);CHKERRQ(ierr);
214   if (slice->dfill) {ierr = PetscFree3(slice->dfill,slice->dfill->i,slice->dfill->j);CHKERRQ(ierr);}
215   if (slice->ofill) {ierr = PetscFree3(slice->ofill,slice->ofill->i,slice->ofill->j);CHKERRQ(ierr);}
216   /* This was originally freed in DMDestroy(), but that prevents reference counting of backend objects */
217   ierr = PetscFree(slice);CHKERRQ(ierr);
218   PetscFunctionReturn(0);
219 }
220 
221 static PetscErrorCode  DMCreateGlobalVector_Sliced(DM dm,Vec *gvec)
222 {
223   PetscErrorCode ierr;
224   DM_Sliced      *slice = (DM_Sliced*)dm->data;
225 
226   PetscFunctionBegin;
227   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
228   PetscValidPointer(gvec,2);
229   *gvec = NULL;
230   ierr  = VecCreateGhostBlock(PetscObjectComm((PetscObject)dm),slice->bs,slice->n*slice->bs,PETSC_DETERMINE,slice->Nghosts,slice->ghosts,gvec);CHKERRQ(ierr);
231   ierr  = VecSetDM(*gvec,dm);CHKERRQ(ierr);
232   PetscFunctionReturn(0);
233 }
234 
235 static PetscErrorCode  DMGlobalToLocalBegin_Sliced(DM da,Vec g,InsertMode mode,Vec l)
236 {
237   PetscErrorCode ierr;
238   PetscBool      flg;
239 
240   PetscFunctionBegin;
241   ierr = VecGhostIsLocalForm(g,l,&flg);CHKERRQ(ierr);
242   if (!flg) SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_ARG_WRONG,"Local vector is not local form of global vector");
243   ierr = VecGhostUpdateEnd(g,mode,SCATTER_FORWARD);CHKERRQ(ierr);
244   ierr = VecGhostUpdateBegin(g,mode,SCATTER_FORWARD);CHKERRQ(ierr);
245   PetscFunctionReturn(0);
246 }
247 
248 static PetscErrorCode  DMGlobalToLocalEnd_Sliced(DM da,Vec g,InsertMode mode,Vec l)
249 {
250   PetscErrorCode ierr;
251   PetscBool      flg;
252 
253   PetscFunctionBegin;
254   ierr = VecGhostIsLocalForm(g,l,&flg);CHKERRQ(ierr);
255   if (!flg) SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_ARG_WRONG,"Local vector is not local form of global vector");
256   ierr = VecGhostUpdateEnd(g,mode,SCATTER_FORWARD);CHKERRQ(ierr);
257   PetscFunctionReturn(0);
258 }
259 
260 /*MC
261    DMSLICED = "sliced" - A DM object that is used to manage data for a general graph. Uses VecCreateGhost() ghosted vectors for storing the fields
262 
263    See DMCreateSliced() for details.
264 
265   Level: intermediate
266 
267 .seealso: DMType, DMCOMPOSITE, DMCreateSliced(), DMCreate()
268 M*/
269 
270 PETSC_EXTERN PetscErrorCode DMCreate_Sliced(DM p)
271 {
272   PetscErrorCode ierr;
273   DM_Sliced      *slice;
274 
275   PetscFunctionBegin;
276   ierr    = PetscNewLog(p,&slice);CHKERRQ(ierr);
277   p->data = slice;
278 
279   p->ops->createglobalvector = DMCreateGlobalVector_Sliced;
280   p->ops->creatematrix       = DMCreateMatrix_Sliced;
281   p->ops->globaltolocalbegin = DMGlobalToLocalBegin_Sliced;
282   p->ops->globaltolocalend   = DMGlobalToLocalEnd_Sliced;
283   p->ops->destroy            = DMDestroy_Sliced;
284   PetscFunctionReturn(0);
285 }
286 
287 /*@C
288     DMSlicedCreate - Creates a DM object, used to manage data for a unstructured problem
289 
290     Collective
291 
292     Input Parameter:
293 +   comm - the processors that will share the global vector
294 .   bs - the block size
295 .   nlocal - number of vector entries on this process
296 .   Nghosts - number of ghost points needed on this process
297 .   ghosts - global indices of all ghost points for this process
298 .   d_nnz - matrix preallocation information representing coupling within this process
299 -   o_nnz - matrix preallocation information representing coupling between this process and other processes
300 
301     Output Parameters:
302 .   slice - the slice object
303 
304     Notes:
305         This DM does not support DMCreateLocalVector(), DMGlobalToLocalBegin(), and DMGlobalToLocalEnd() instead one directly uses
306         VecGhostGetLocalForm() and VecGhostRestoreLocalForm() to access the local representation and VecGhostUpdateBegin() and VecGhostUpdateEnd() to update
307         the ghost points.
308 
309         One can use DMGlobalToLocalBegin(), and DMGlobalToLocalEnd() instead of VecGhostUpdateBegin() and VecGhostUpdateEnd().
310 
311     Level: advanced
312 
313 .seealso DMDestroy(), DMCreateGlobalVector(), DMSetType(), DMSLICED, DMSlicedSetGhosts(), DMSlicedSetPreallocation(), VecGhostUpdateBegin(), VecGhostUpdateEnd(),
314          VecGhostGetLocalForm(), VecGhostRestoreLocalForm()
315 
316 @*/
317 PetscErrorCode  DMSlicedCreate(MPI_Comm comm,PetscInt bs,PetscInt nlocal,PetscInt Nghosts,const PetscInt ghosts[], const PetscInt d_nnz[],const PetscInt o_nnz[],DM *dm)
318 {
319   PetscErrorCode ierr;
320 
321   PetscFunctionBegin;
322   PetscValidPointer(dm,8);
323   ierr = DMCreate(comm,dm);CHKERRQ(ierr);
324   ierr = DMSetType(*dm,DMSLICED);CHKERRQ(ierr);
325   ierr = DMSlicedSetGhosts(*dm,bs,nlocal,Nghosts,ghosts);CHKERRQ(ierr);
326   if (d_nnz) {
327     ierr = DMSlicedSetPreallocation(*dm,0, d_nnz,0,o_nnz);CHKERRQ(ierr);
328   }
329   PetscFunctionReturn(0);
330 }
331 
332