xref: /petsc/src/dm/impls/sliced/sliced.c (revision 534a8f05a7a8aff70dd8cfd53d9cd834400a8dbf)
1 #include <petscdmsliced.h>      /*I      "petscdmsliced.h" I*/
2 #include <petscmat.h>
3 #include <petsc/private/dmimpl.h>
4 
5 /* CSR storage of the nonzero structure of a bs*bs matrix */
6 typedef struct {
7   PetscInt bs,nz,*i,*j;
8 } DMSlicedBlockFills;
9 
10 typedef struct  {
11   PetscInt           bs,n,N,Nghosts,*ghosts;
12   PetscInt           d_nz,o_nz,*d_nnz,*o_nnz;
13   DMSlicedBlockFills *dfill,*ofill;
14 } DM_Sliced;
15 
16 PetscErrorCode  DMCreateMatrix_Sliced(DM dm, Mat *J)
17 {
18   PetscErrorCode         ierr;
19   PetscInt               *globals,*sd_nnz,*so_nnz,rstart,bs,i;
20   ISLocalToGlobalMapping lmap;
21   void                   (*aij)(void) = NULL;
22   DM_Sliced              *slice = (DM_Sliced*)dm->data;
23 
24   PetscFunctionBegin;
25   bs   = slice->bs;
26   ierr = MatCreate(PetscObjectComm((PetscObject)dm),J);CHKERRQ(ierr);
27   ierr = MatSetSizes(*J,slice->n*bs,slice->n*bs,PETSC_DETERMINE,PETSC_DETERMINE);CHKERRQ(ierr);
28   ierr = MatSetBlockSize(*J,bs);CHKERRQ(ierr);
29   ierr = MatSetType(*J,dm->mattype);CHKERRQ(ierr);
30   ierr = MatSeqBAIJSetPreallocation(*J,bs,slice->d_nz,slice->d_nnz);CHKERRQ(ierr);
31   ierr = MatMPIBAIJSetPreallocation(*J,bs,slice->d_nz,slice->d_nnz,slice->o_nz,slice->o_nnz);CHKERRQ(ierr);
32   /* In general, we have to do extra work to preallocate for scalar (AIJ) matrices so we check whether it will do any
33   * good before going on with it. */
34   ierr = PetscObjectQueryFunction((PetscObject)*J,"MatMPIAIJSetPreallocation_C",&aij);CHKERRQ(ierr);
35   if (!aij) {
36     ierr = PetscObjectQueryFunction((PetscObject)*J,"MatSeqAIJSetPreallocation_C",&aij);CHKERRQ(ierr);
37   }
38   if (aij) {
39     if (bs == 1) {
40       ierr = MatSeqAIJSetPreallocation(*J,slice->d_nz,slice->d_nnz);CHKERRQ(ierr);
41       ierr = MatMPIAIJSetPreallocation(*J,slice->d_nz,slice->d_nnz,slice->o_nz,slice->o_nnz);CHKERRQ(ierr);
42     } else if (!slice->d_nnz) {
43       ierr = MatSeqAIJSetPreallocation(*J,slice->d_nz*bs,NULL);CHKERRQ(ierr);
44       ierr = MatMPIAIJSetPreallocation(*J,slice->d_nz*bs,NULL,slice->o_nz*bs,NULL);CHKERRQ(ierr);
45     } else {
46       /* The user has provided preallocation per block-row, convert it to per scalar-row respecting DMSlicedSetBlockFills() if applicable */
47       ierr = PetscMalloc2(slice->n*bs,&sd_nnz,(!!slice->o_nnz)*slice->n*bs,&so_nnz);CHKERRQ(ierr);
48       for (i=0; i<slice->n*bs; i++) {
49         sd_nnz[i] = (slice->d_nnz[i/bs]-1) * (slice->ofill ? slice->ofill->i[i%bs+1]-slice->ofill->i[i%bs] : bs)
50                                            + (slice->dfill ? slice->dfill->i[i%bs+1]-slice->dfill->i[i%bs] : bs);
51         if (so_nnz) {
52           so_nnz[i] = slice->o_nnz[i/bs] * (slice->ofill ? slice->ofill->i[i%bs+1]-slice->ofill->i[i%bs] : bs);
53         }
54       }
55       ierr = MatSeqAIJSetPreallocation(*J,slice->d_nz*bs,sd_nnz);CHKERRQ(ierr);
56       ierr = MatMPIAIJSetPreallocation(*J,slice->d_nz*bs,sd_nnz,slice->o_nz*bs,so_nnz);CHKERRQ(ierr);
57       ierr = PetscFree2(sd_nnz,so_nnz);CHKERRQ(ierr);
58     }
59   }
60 
61   /* Set up the local to global map.  For the scalar map, we have to translate to entry-wise indexing instead of block-wise. */
62   ierr = PetscMalloc1(slice->n+slice->Nghosts,&globals);CHKERRQ(ierr);
63   ierr = MatGetOwnershipRange(*J,&rstart,NULL);CHKERRQ(ierr);
64   for (i=0; i<slice->n; i++) globals[i] = rstart/bs + i;
65 
66   for (i=0; i<slice->Nghosts; i++) {
67     globals[slice->n+i] = slice->ghosts[i];
68   }
69   ierr = ISLocalToGlobalMappingCreate(PETSC_COMM_SELF,bs,slice->n+slice->Nghosts,globals,PETSC_OWN_POINTER,&lmap);CHKERRQ(ierr);
70   ierr = MatSetLocalToGlobalMapping(*J,lmap,lmap);CHKERRQ(ierr);
71   ierr = ISLocalToGlobalMappingDestroy(&lmap);CHKERRQ(ierr);
72   PetscFunctionReturn(0);
73 }
74 
75 /*@C
76     DMSlicedSetGhosts - Sets the global indices of other processes elements that will
77       be ghosts on this process
78 
79     Not Collective
80 
81     Input Parameters:
82 +    slice - the DM object
83 .    bs - block size
84 .    nlocal - number of local (owned, non-ghost) blocks
85 .    Nghosts - number of ghost blocks on this process
86 -    ghosts - global indices of each ghost block
87 
88     Level: advanced
89 
90 .seealso DMDestroy(), DMCreateGlobalVector()
91 
92 @*/
93 PetscErrorCode  DMSlicedSetGhosts(DM dm,PetscInt bs,PetscInt nlocal,PetscInt Nghosts,const PetscInt ghosts[])
94 {
95   PetscErrorCode ierr;
96   DM_Sliced      *slice = (DM_Sliced*)dm->data;
97 
98   PetscFunctionBegin;
99   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
100   ierr           = PetscFree(slice->ghosts);CHKERRQ(ierr);
101   ierr           = PetscMalloc1(Nghosts,&slice->ghosts);CHKERRQ(ierr);
102   ierr           = PetscArraycpy(slice->ghosts,ghosts,Nghosts);CHKERRQ(ierr);
103   slice->bs      = bs;
104   slice->n       = nlocal;
105   slice->Nghosts = Nghosts;
106   PetscFunctionReturn(0);
107 }
108 
109 /*@C
110     DMSlicedSetPreallocation - sets the matrix memory preallocation for matrices computed by DMSliced
111 
112     Not Collective
113 
114     Input Parameters:
115 +    slice - the DM object
116 .    d_nz  - number of block nonzeros per block row in diagonal portion of local
117            submatrix  (same for all local rows)
118 .    d_nnz - array containing the number of block nonzeros in the various block rows
119            of the in diagonal portion of the local (possibly different for each block
120            row) or NULL.
121 .    o_nz  - number of block nonzeros per block row in the off-diagonal portion of local
122            submatrix (same for all local rows).
123 -    o_nnz - array containing the number of nonzeros in the various block rows of the
124            off-diagonal portion of the local submatrix (possibly different for
125            each block row) or NULL.
126 
127     Notes:
128     See MatMPIBAIJSetPreallocation() for more details on preallocation.  If a scalar matrix (AIJ) is
129     obtained with DMSlicedGetMatrix(), the correct preallocation will be set, respecting DMSlicedSetBlockFills().
130 
131     Level: advanced
132 
133 .seealso DMDestroy(), DMCreateGlobalVector(), MatMPIAIJSetPreallocation(),
134          MatMPIBAIJSetPreallocation(), DMSlicedGetMatrix(), DMSlicedSetBlockFills()
135 
136 @*/
137 PetscErrorCode  DMSlicedSetPreallocation(DM dm,PetscInt d_nz,const PetscInt d_nnz[],PetscInt o_nz,const PetscInt o_nnz[])
138 {
139   DM_Sliced *slice = (DM_Sliced*)dm->data;
140 
141   PetscFunctionBegin;
142   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
143   slice->d_nz  = d_nz;
144   slice->d_nnz = (PetscInt*)d_nnz;
145   slice->o_nz  = o_nz;
146   slice->o_nnz = (PetscInt*)o_nnz;
147   PetscFunctionReturn(0);
148 }
149 
150 static PetscErrorCode DMSlicedSetBlockFills_Private(PetscInt bs,const PetscInt *fill,DMSlicedBlockFills **inf)
151 {
152   PetscErrorCode     ierr;
153   PetscInt           i,j,nz,*fi,*fj;
154   DMSlicedBlockFills *f;
155 
156   PetscFunctionBegin;
157   PetscValidPointer(inf,3);
158   if (*inf) {ierr = PetscFree3(*inf,(*inf)->i,(*inf)->j);CHKERRQ(ierr);}
159   if (!fill) PetscFunctionReturn(0);
160   for (i=0,nz=0; i<bs*bs; i++) if (fill[i]) nz++;
161   ierr  = PetscMalloc3(1,&f,bs+1,&fi,nz,&fj);CHKERRQ(ierr);
162   f->bs = bs;
163   f->nz = nz;
164   f->i  = fi;
165   f->j  = fj;
166   for (i=0,nz=0; i<bs; i++) {
167     fi[i] = nz;
168     for (j=0; j<bs; j++) if (fill[i*bs+j]) fj[nz++] = j;
169   }
170   fi[i] = nz;
171   *inf  = f;
172   PetscFunctionReturn(0);
173 }
174 
175 /*@C
176     DMSlicedSetBlockFills - Sets the fill pattern in each block for a multi-component problem
177     of the matrix returned by DMSlicedGetMatrix().
178 
179     Logically Collective on dm
180 
181     Input Parameter:
182 +   sliced - the DM object
183 .   dfill - the fill pattern in the diagonal block (may be NULL, means use dense block)
184 -   ofill - the fill pattern in the off-diagonal blocks
185 
186     Notes:
187     This only makes sense for multicomponent problems using scalar matrix formats (AIJ).
188     See DMDASetBlockFills() for example usage.
189 
190     Level: advanced
191 
192 .seealso DMSlicedGetMatrix(), DMDASetBlockFills()
193 @*/
194 PetscErrorCode  DMSlicedSetBlockFills(DM dm,const PetscInt *dfill,const PetscInt *ofill)
195 {
196   DM_Sliced      *slice = (DM_Sliced*)dm->data;
197   PetscErrorCode ierr;
198 
199   PetscFunctionBegin;
200   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
201   ierr = DMSlicedSetBlockFills_Private(slice->bs,dfill,&slice->dfill);CHKERRQ(ierr);
202   ierr = DMSlicedSetBlockFills_Private(slice->bs,ofill,&slice->ofill);CHKERRQ(ierr);
203   PetscFunctionReturn(0);
204 }
205 
206 static PetscErrorCode  DMDestroy_Sliced(DM dm)
207 {
208   PetscErrorCode ierr;
209   DM_Sliced      *slice = (DM_Sliced*)dm->data;
210 
211   PetscFunctionBegin;
212   ierr = PetscFree(slice->ghosts);CHKERRQ(ierr);
213   if (slice->dfill) {ierr = PetscFree3(slice->dfill,slice->dfill->i,slice->dfill->j);CHKERRQ(ierr);}
214   if (slice->ofill) {ierr = PetscFree3(slice->ofill,slice->ofill->i,slice->ofill->j);CHKERRQ(ierr);}
215   /* This was originally freed in DMDestroy(), but that prevents reference counting of backend objects */
216   ierr = PetscFree(slice);CHKERRQ(ierr);
217   PetscFunctionReturn(0);
218 }
219 
220 static PetscErrorCode  DMCreateGlobalVector_Sliced(DM dm,Vec *gvec)
221 {
222   PetscErrorCode ierr;
223   DM_Sliced      *slice = (DM_Sliced*)dm->data;
224 
225   PetscFunctionBegin;
226   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
227   PetscValidPointer(gvec,2);
228   *gvec = 0;
229   ierr  = VecCreateGhostBlock(PetscObjectComm((PetscObject)dm),slice->bs,slice->n*slice->bs,PETSC_DETERMINE,slice->Nghosts,slice->ghosts,gvec);CHKERRQ(ierr);
230   ierr  = VecSetDM(*gvec,dm);CHKERRQ(ierr);
231   PetscFunctionReturn(0);
232 }
233 
234 static PetscErrorCode  DMGlobalToLocalBegin_Sliced(DM da,Vec g,InsertMode mode,Vec l)
235 {
236   PetscErrorCode ierr;
237   PetscBool      flg;
238 
239   PetscFunctionBegin;
240   ierr = VecGhostIsLocalForm(g,l,&flg);CHKERRQ(ierr);
241   if (!flg) SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_ARG_WRONG,"Local vector is not local form of global vector");
242   ierr = VecGhostUpdateEnd(g,mode,SCATTER_FORWARD);CHKERRQ(ierr);
243   ierr = VecGhostUpdateBegin(g,mode,SCATTER_FORWARD);CHKERRQ(ierr);
244   PetscFunctionReturn(0);
245 }
246 
247 static PetscErrorCode  DMGlobalToLocalEnd_Sliced(DM da,Vec g,InsertMode mode,Vec l)
248 {
249   PetscErrorCode ierr;
250   PetscBool      flg;
251 
252   PetscFunctionBegin;
253   ierr = VecGhostIsLocalForm(g,l,&flg);CHKERRQ(ierr);
254   if (!flg) SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_ARG_WRONG,"Local vector is not local form of global vector");
255   ierr = VecGhostUpdateEnd(g,mode,SCATTER_FORWARD);CHKERRQ(ierr);
256   PetscFunctionReturn(0);
257 }
258 
259 /*MC
260    DMSLICED = "sliced" - A DM object that is used to manage data for a general graph. Uses VecCreateGhost() ghosted vectors for storing the fields
261 
262    See DMCreateSliced() for details.
263 
264   Level: intermediate
265 
266 .seealso: DMType, DMCOMPOSITE, DMCreateSliced(), DMCreate()
267 M*/
268 
269 PETSC_EXTERN PetscErrorCode DMCreate_Sliced(DM p)
270 {
271   PetscErrorCode ierr;
272   DM_Sliced      *slice;
273 
274   PetscFunctionBegin;
275   ierr    = PetscNewLog(p,&slice);CHKERRQ(ierr);
276   p->data = slice;
277 
278   p->ops->createglobalvector = DMCreateGlobalVector_Sliced;
279   p->ops->creatematrix       = DMCreateMatrix_Sliced;
280   p->ops->globaltolocalbegin = DMGlobalToLocalBegin_Sliced;
281   p->ops->globaltolocalend   = DMGlobalToLocalEnd_Sliced;
282   p->ops->destroy            = DMDestroy_Sliced;
283   PetscFunctionReturn(0);
284 }
285 
286 /*@C
287     DMSlicedCreate - Creates a DM object, used to manage data for a unstructured problem
288 
289     Collective
290 
291     Input Parameter:
292 +   comm - the processors that will share the global vector
293 .   bs - the block size
294 .   nlocal - number of vector entries on this process
295 .   Nghosts - number of ghost points needed on this process
296 .   ghosts - global indices of all ghost points for this process
297 .   d_nnz - matrix preallocation information representing coupling within this process
298 -   o_nnz - matrix preallocation information representing coupling between this process and other processes
299 
300     Output Parameters:
301 .   slice - the slice object
302 
303     Notes:
304         This DM does not support DMCreateLocalVector(), DMGlobalToLocalBegin(), and DMGlobalToLocalEnd() instead one directly uses
305         VecGhostGetLocalForm() and VecGhostRestoreLocalForm() to access the local representation and VecGhostUpdateBegin() and VecGhostUpdateEnd() to update
306         the ghost points.
307 
308         One can use DMGlobalToLocalBegin(), and DMGlobalToLocalEnd() instead of VecGhostUpdateBegin() and VecGhostUpdateEnd().
309 
310     Level: advanced
311 
312 .seealso DMDestroy(), DMCreateGlobalVector(), DMSetType(), DMSLICED, DMSlicedSetGhosts(), DMSlicedSetPreallocation(), VecGhostUpdateBegin(), VecGhostUpdateEnd(),
313          VecGhostGetLocalForm(), VecGhostRestoreLocalForm()
314 
315 @*/
316 PetscErrorCode  DMSlicedCreate(MPI_Comm comm,PetscInt bs,PetscInt nlocal,PetscInt Nghosts,const PetscInt ghosts[], const PetscInt d_nnz[],const PetscInt o_nnz[],DM *dm)
317 {
318   PetscErrorCode ierr;
319 
320   PetscFunctionBegin;
321   PetscValidPointer(dm,2);
322   ierr = DMCreate(comm,dm);CHKERRQ(ierr);
323   ierr = DMSetType(*dm,DMSLICED);CHKERRQ(ierr);
324   ierr = DMSlicedSetGhosts(*dm,bs,nlocal,Nghosts,ghosts);CHKERRQ(ierr);
325   if (d_nnz) {
326     ierr = DMSlicedSetPreallocation(*dm,0, d_nnz,0,o_nnz);CHKERRQ(ierr);
327   }
328   PetscFunctionReturn(0);
329 }
330 
331