xref: /petsc/src/dm/impls/sliced/sliced.c (revision bebe2cf65d55febe21a5af8db2bd2e168caaa2e7) !
1 #include <petscdmsliced.h>      /*I      "petscdmsliced.h" I*/
2 #include <petscmat.h>
3 #include <petsc/private/dmimpl.h>
4 
5 /* CSR storage of the nonzero structure of a bs*bs matrix */
6 typedef struct {
7   PetscInt bs,nz,*i,*j;
8 } DMSlicedBlockFills;
9 
10 typedef struct  {
11   PetscInt           bs,n,N,Nghosts,*ghosts;
12   PetscInt           d_nz,o_nz,*d_nnz,*o_nnz;
13   DMSlicedBlockFills *dfill,*ofill;
14 } DM_Sliced;
15 
16 #undef __FUNCT__
17 #define __FUNCT__ "DMCreateMatrix_Sliced"
18 PetscErrorCode  DMCreateMatrix_Sliced(DM dm, Mat *J)
19 {
20   PetscErrorCode         ierr;
21   PetscInt               *globals,*sd_nnz,*so_nnz,rstart,bs,i;
22   ISLocalToGlobalMapping lmap;
23   void                   (*aij)(void) = NULL;
24   DM_Sliced              *slice = (DM_Sliced*)dm->data;
25 
26   PetscFunctionBegin;
27   bs   = slice->bs;
28   ierr = MatCreate(PetscObjectComm((PetscObject)dm),J);CHKERRQ(ierr);
29   ierr = MatSetSizes(*J,slice->n*bs,slice->n*bs,PETSC_DETERMINE,PETSC_DETERMINE);CHKERRQ(ierr);
30   ierr = MatSetBlockSize(*J,bs);CHKERRQ(ierr);
31   ierr = MatSetType(*J,dm->mattype);CHKERRQ(ierr);
32   ierr = MatSeqBAIJSetPreallocation(*J,bs,slice->d_nz,slice->d_nnz);CHKERRQ(ierr);
33   ierr = MatMPIBAIJSetPreallocation(*J,bs,slice->d_nz,slice->d_nnz,slice->o_nz,slice->o_nnz);CHKERRQ(ierr);
34   /* In general, we have to do extra work to preallocate for scalar (AIJ) matrices so we check whether it will do any
35   * good before going on with it. */
36   ierr = PetscObjectQueryFunction((PetscObject)*J,"MatMPIAIJSetPreallocation_C",&aij);CHKERRQ(ierr);
37   if (!aij) {
38     ierr = PetscObjectQueryFunction((PetscObject)*J,"MatSeqAIJSetPreallocation_C",&aij);CHKERRQ(ierr);
39   }
40   if (aij) {
41     if (bs == 1) {
42       ierr = MatSeqAIJSetPreallocation(*J,slice->d_nz,slice->d_nnz);CHKERRQ(ierr);
43       ierr = MatMPIAIJSetPreallocation(*J,slice->d_nz,slice->d_nnz,slice->o_nz,slice->o_nnz);CHKERRQ(ierr);
44     } else if (!slice->d_nnz) {
45       ierr = MatSeqAIJSetPreallocation(*J,slice->d_nz*bs,NULL);CHKERRQ(ierr);
46       ierr = MatMPIAIJSetPreallocation(*J,slice->d_nz*bs,NULL,slice->o_nz*bs,NULL);CHKERRQ(ierr);
47     } else {
48       /* The user has provided preallocation per block-row, convert it to per scalar-row respecting DMSlicedSetBlockFills() if applicable */
49       ierr = PetscMalloc2(slice->n*bs,&sd_nnz,(!!slice->o_nnz)*slice->n*bs,&so_nnz);CHKERRQ(ierr);
50       for (i=0; i<slice->n*bs; i++) {
51         sd_nnz[i] = (slice->d_nnz[i/bs]-1) * (slice->ofill ? slice->ofill->i[i%bs+1]-slice->ofill->i[i%bs] : bs)
52                                            + (slice->dfill ? slice->dfill->i[i%bs+1]-slice->dfill->i[i%bs] : bs);
53         if (so_nnz) {
54           so_nnz[i] = slice->o_nnz[i/bs] * (slice->ofill ? slice->ofill->i[i%bs+1]-slice->ofill->i[i%bs] : bs);
55         }
56       }
57       ierr = MatSeqAIJSetPreallocation(*J,slice->d_nz*bs,sd_nnz);CHKERRQ(ierr);
58       ierr = MatMPIAIJSetPreallocation(*J,slice->d_nz*bs,sd_nnz,slice->o_nz*bs,so_nnz);CHKERRQ(ierr);
59       ierr = PetscFree2(sd_nnz,so_nnz);CHKERRQ(ierr);
60     }
61   }
62 
63   /* Set up the local to global map.  For the scalar map, we have to translate to entry-wise indexing instead of block-wise. */
64   ierr = PetscMalloc1(slice->n+slice->Nghosts,&globals);CHKERRQ(ierr);
65   ierr = MatGetOwnershipRange(*J,&rstart,NULL);CHKERRQ(ierr);
66   for (i=0; i<slice->n; i++) globals[i] = rstart/bs + i;
67 
68   for (i=0; i<slice->Nghosts; i++) {
69     globals[slice->n+i] = slice->ghosts[i];
70   }
71   ierr = ISLocalToGlobalMappingCreate(PETSC_COMM_SELF,bs,slice->n+slice->Nghosts,globals,PETSC_OWN_POINTER,&lmap);CHKERRQ(ierr);
72   ierr = MatSetLocalToGlobalMapping(*J,lmap,lmap);CHKERRQ(ierr);
73   ierr = ISLocalToGlobalMappingDestroy(&lmap);CHKERRQ(ierr);
74   PetscFunctionReturn(0);
75 }
76 
77 #undef __FUNCT__
78 #define __FUNCT__ "DMSlicedSetGhosts"
79 /*@C
80     DMSlicedSetGhosts - Sets the global indices of other processes elements that will
81       be ghosts on this process
82 
83     Not Collective
84 
85     Input Parameters:
86 +    slice - the DM object
87 .    bs - block size
88 .    nlocal - number of local (owned, non-ghost) blocks
89 .    Nghosts - number of ghost blocks on this process
90 -    ghosts - global indices of each ghost block
91 
92     Level: advanced
93 
94 .seealso DMDestroy(), DMCreateGlobalVector()
95 
96 @*/
97 PetscErrorCode  DMSlicedSetGhosts(DM dm,PetscInt bs,PetscInt nlocal,PetscInt Nghosts,const PetscInt ghosts[])
98 {
99   PetscErrorCode ierr;
100   DM_Sliced      *slice = (DM_Sliced*)dm->data;
101 
102   PetscFunctionBegin;
103   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
104   ierr           = PetscFree(slice->ghosts);CHKERRQ(ierr);
105   ierr           = PetscMalloc1(Nghosts,&slice->ghosts);CHKERRQ(ierr);
106   ierr           = PetscMemcpy(slice->ghosts,ghosts,Nghosts*sizeof(PetscInt));CHKERRQ(ierr);
107   slice->bs      = bs;
108   slice->n       = nlocal;
109   slice->Nghosts = Nghosts;
110   PetscFunctionReturn(0);
111 }
112 
113 #undef __FUNCT__
114 #define __FUNCT__ "DMSlicedSetPreallocation"
115 /*@C
116     DMSlicedSetPreallocation - sets the matrix memory preallocation for matrices computed by DMSliced
117 
118     Not Collective
119 
120     Input Parameters:
121 +    slice - the DM object
122 .    d_nz  - number of block nonzeros per block row in diagonal portion of local
123            submatrix  (same for all local rows)
124 .    d_nnz - array containing the number of block nonzeros in the various block rows
125            of the in diagonal portion of the local (possibly different for each block
126            row) or NULL.
127 .    o_nz  - number of block nonzeros per block row in the off-diagonal portion of local
128            submatrix (same for all local rows).
129 -    o_nnz - array containing the number of nonzeros in the various block rows of the
130            off-diagonal portion of the local submatrix (possibly different for
131            each block row) or NULL.
132 
133     Notes:
134     See MatMPIBAIJSetPreallocation() for more details on preallocation.  If a scalar matrix (AIJ) is
135     obtained with DMSlicedGetMatrix(), the correct preallocation will be set, respecting DMSlicedSetBlockFills().
136 
137     Level: advanced
138 
139 .seealso DMDestroy(), DMCreateGlobalVector(), MatMPIAIJSetPreallocation(),
140          MatMPIBAIJSetPreallocation(), DMSlicedGetMatrix(), DMSlicedSetBlockFills()
141 
142 @*/
143 PetscErrorCode  DMSlicedSetPreallocation(DM dm,PetscInt d_nz,const PetscInt d_nnz[],PetscInt o_nz,const PetscInt o_nnz[])
144 {
145   DM_Sliced *slice = (DM_Sliced*)dm->data;
146 
147   PetscFunctionBegin;
148   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
149   slice->d_nz  = d_nz;
150   slice->d_nnz = (PetscInt*)d_nnz;
151   slice->o_nz  = o_nz;
152   slice->o_nnz = (PetscInt*)o_nnz;
153   PetscFunctionReturn(0);
154 }
155 
156 #undef __FUNCT__
157 #define __FUNCT__ "DMSlicedSetBlockFills_Private"
158 static PetscErrorCode DMSlicedSetBlockFills_Private(PetscInt bs,const PetscInt *fill,DMSlicedBlockFills **inf)
159 {
160   PetscErrorCode     ierr;
161   PetscInt           i,j,nz,*fi,*fj;
162   DMSlicedBlockFills *f;
163 
164   PetscFunctionBegin;
165   PetscValidPointer(inf,3);
166   if (*inf) {ierr = PetscFree3((*inf)->i,(*inf)->j,*inf);CHKERRQ(ierr);}
167   if (!fill) PetscFunctionReturn(0);
168   for (i=0,nz=0; i<bs*bs; i++) if (fill[i]) nz++;
169   ierr  = PetscMalloc3(1,&f,bs+1,&fi,nz,&fj);CHKERRQ(ierr);
170   f->bs = bs;
171   f->nz = nz;
172   f->i  = fi;
173   f->j  = fj;
174   for (i=0,nz=0; i<bs; i++) {
175     fi[i] = nz;
176     for (j=0; j<bs; j++) if (fill[i*bs+j]) fj[nz++] = j;
177   }
178   fi[i] = nz;
179   *inf  = f;
180   PetscFunctionReturn(0);
181 }
182 
183 #undef __FUNCT__
184 #define __FUNCT__ "DMSlicedSetBlockFills"
185 /*@C
186     DMSlicedSetBlockFills - Sets the fill pattern in each block for a multi-component problem
187     of the matrix returned by DMSlicedGetMatrix().
188 
189     Logically Collective on DM
190 
191     Input Parameter:
192 +   sliced - the DM object
193 .   dfill - the fill pattern in the diagonal block (may be NULL, means use dense block)
194 -   ofill - the fill pattern in the off-diagonal blocks
195 
196     Notes:
197     This only makes sense for multicomponent problems using scalar matrix formats (AIJ).
198     See DMDASetBlockFills() for example usage.
199 
200     Level: advanced
201 
202 .seealso DMSlicedGetMatrix(), DMDASetBlockFills()
203 @*/
204 PetscErrorCode  DMSlicedSetBlockFills(DM dm,const PetscInt *dfill,const PetscInt *ofill)
205 {
206   DM_Sliced      *slice = (DM_Sliced*)dm->data;
207   PetscErrorCode ierr;
208 
209   PetscFunctionBegin;
210   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
211   ierr = DMSlicedSetBlockFills_Private(slice->bs,dfill,&slice->dfill);CHKERRQ(ierr);
212   ierr = DMSlicedSetBlockFills_Private(slice->bs,ofill,&slice->ofill);CHKERRQ(ierr);
213   PetscFunctionReturn(0);
214 }
215 
216 #undef __FUNCT__
217 #define __FUNCT__ "DMDestroy_Sliced"
218 static PetscErrorCode  DMDestroy_Sliced(DM dm)
219 {
220   PetscErrorCode ierr;
221   DM_Sliced      *slice = (DM_Sliced*)dm->data;
222 
223   PetscFunctionBegin;
224   ierr = PetscFree(slice->ghosts);CHKERRQ(ierr);
225   if (slice->dfill) {ierr = PetscFree3(slice->dfill,slice->dfill->i,slice->dfill->j);CHKERRQ(ierr);}
226   if (slice->ofill) {ierr = PetscFree3(slice->ofill,slice->ofill->i,slice->ofill->j);CHKERRQ(ierr);}
227   /* This was originally freed in DMDestroy(), but that prevents reference counting of backend objects */
228   ierr = PetscFree(slice);CHKERRQ(ierr);
229   PetscFunctionReturn(0);
230 }
231 
232 #undef __FUNCT__
233 #define __FUNCT__ "DMCreateGlobalVector_Sliced"
234 static PetscErrorCode  DMCreateGlobalVector_Sliced(DM dm,Vec *gvec)
235 {
236   PetscErrorCode ierr;
237   DM_Sliced      *slice = (DM_Sliced*)dm->data;
238 
239   PetscFunctionBegin;
240   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
241   PetscValidPointer(gvec,2);
242   *gvec = 0;
243   ierr  = VecCreateGhostBlock(PetscObjectComm((PetscObject)dm),slice->bs,slice->n*slice->bs,PETSC_DETERMINE,slice->Nghosts,slice->ghosts,gvec);CHKERRQ(ierr);
244   ierr  = VecSetDM(*gvec,dm);CHKERRQ(ierr);
245   PetscFunctionReturn(0);
246 }
247 
248 #undef __FUNCT__
249 #define __FUNCT__ "DMGlobalToLocalBegin_Sliced"
250 static PetscErrorCode  DMGlobalToLocalBegin_Sliced(DM da,Vec g,InsertMode mode,Vec l)
251 {
252   PetscErrorCode ierr;
253   PetscBool      flg;
254 
255   PetscFunctionBegin;
256   ierr = VecGhostIsLocalForm(g,l,&flg);CHKERRQ(ierr);
257   if (!flg) SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_ARG_WRONG,"Local vector is not local form of global vector");
258   ierr = VecGhostUpdateEnd(g,mode,SCATTER_FORWARD);CHKERRQ(ierr);
259   ierr = VecGhostUpdateBegin(g,mode,SCATTER_FORWARD);CHKERRQ(ierr);
260   PetscFunctionReturn(0);
261 }
262 
263 #undef __FUNCT__
264 #define __FUNCT__ "DMGlobalToLocalEnd_Sliced"
265 static PetscErrorCode  DMGlobalToLocalEnd_Sliced(DM da,Vec g,InsertMode mode,Vec l)
266 {
267   PetscErrorCode ierr;
268   PetscBool      flg;
269 
270   PetscFunctionBegin;
271   ierr = VecGhostIsLocalForm(g,l,&flg);CHKERRQ(ierr);
272   if (!flg) SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_ARG_WRONG,"Local vector is not local form of global vector");
273   ierr = VecGhostUpdateEnd(g,mode,SCATTER_FORWARD);CHKERRQ(ierr);
274   PetscFunctionReturn(0);
275 }
276 
277 /*MC
278    DMSLICED = "sliced" - A DM object that is used to manage data for a general graph. Uses VecCreateGhost() ghosted vectors for storing the fields
279 
280    See DMCreateSliced() for details.
281 
282   Level: intermediate
283 
284 .seealso: DMType, DMCOMPOSITE, DMCreateSliced(), DMCreate()
285 M*/
286 
287 #undef __FUNCT__
288 #define __FUNCT__ "DMCreate_Sliced"
289 PETSC_EXTERN PetscErrorCode DMCreate_Sliced(DM p)
290 {
291   PetscErrorCode ierr;
292   DM_Sliced      *slice;
293 
294   PetscFunctionBegin;
295   ierr    = PetscNewLog(p,&slice);CHKERRQ(ierr);
296   p->data = slice;
297 
298   ierr = PetscObjectChangeTypeName((PetscObject)p,DMSLICED);CHKERRQ(ierr);
299 
300   p->ops->createglobalvector = DMCreateGlobalVector_Sliced;
301   p->ops->creatematrix       = DMCreateMatrix_Sliced;
302   p->ops->globaltolocalbegin = DMGlobalToLocalBegin_Sliced;
303   p->ops->globaltolocalend   = DMGlobalToLocalEnd_Sliced;
304   p->ops->destroy            = DMDestroy_Sliced;
305   PetscFunctionReturn(0);
306 }
307 
308 #undef __FUNCT__
309 #define __FUNCT__ "DMSlicedCreate"
310 /*@C
311     DMSlicedCreate - Creates a DM object, used to manage data for a unstructured problem
312 
313     Collective on MPI_Comm
314 
315     Input Parameter:
316 +   comm - the processors that will share the global vector
317 .   bs - the block size
318 .   nlocal - number of vector entries on this process
319 .   Nghosts - number of ghost points needed on this process
320 .   ghosts - global indices of all ghost points for this process
321 .   d_nnz - matrix preallocation information representing coupling within this process
322 -   o_nnz - matrix preallocation information representing coupling between this process and other processes
323 
324     Output Parameters:
325 .   slice - the slice object
326 
327     Notes:
328         This DM does not support DMCreateLocalVector(), DMGlobalToLocalBegin(), and DMGlobalToLocalEnd() instead one directly uses
329         VecGhostGetLocalForm() and VecGhostRestoreLocalForm() to access the local representation and VecGhostUpdateBegin() and VecGhostUpdateEnd() to update
330         the ghost points.
331 
332         One can use DMGlobalToLocalBegin(), and DMGlobalToLocalEnd() instead of VecGhostUpdateBegin() and VecGhostUpdateEnd().
333 
334     Level: advanced
335 
336 .seealso DMDestroy(), DMCreateGlobalVector(), DMSetType(), DMSLICED, DMSlicedSetGhosts(), DMSlicedSetPreallocation(), VecGhostUpdateBegin(), VecGhostUpdateEnd(),
337          VecGhostGetLocalForm(), VecGhostRestoreLocalForm()
338 
339 @*/
340 PetscErrorCode  DMSlicedCreate(MPI_Comm comm,PetscInt bs,PetscInt nlocal,PetscInt Nghosts,const PetscInt ghosts[], const PetscInt d_nnz[],const PetscInt o_nnz[],DM *dm)
341 {
342   PetscErrorCode ierr;
343 
344   PetscFunctionBegin;
345   PetscValidPointer(dm,2);
346   ierr = DMCreate(comm,dm);CHKERRQ(ierr);
347   ierr = DMSetType(*dm,DMSLICED);CHKERRQ(ierr);
348   ierr = DMSlicedSetGhosts(*dm,bs,nlocal,Nghosts,ghosts);CHKERRQ(ierr);
349   if (d_nnz) {
350     ierr = DMSlicedSetPreallocation(*dm,0, d_nnz,0,o_nnz);CHKERRQ(ierr);
351   }
352   PetscFunctionReturn(0);
353 }
354 
355