xref: /petsc/src/dm/impls/sliced/sliced.c (revision db31f6defe33e3dfd9985704e6b8f54a7ffb6553)
1 #include <petscdmsliced.h>      /*I      "petscdmsliced.h" I*/
2 #include <petscmat.h>           /*I      "petscmat.h"      I*/
3 #include <petsc-private/dmimpl.h>     /*I      "petscdm.h"       I*/
4 
5 /* CSR storage of the nonzero structure of a bs*bs matrix */
6 typedef struct {
7   PetscInt bs,nz,*i,*j;
8 } DMSlicedBlockFills;
9 
10 typedef struct  {
11   PetscInt           bs,n,N,Nghosts,*ghosts;
12   PetscInt           d_nz,o_nz,*d_nnz,*o_nnz;
13   DMSlicedBlockFills *dfill,*ofill;
14 } DM_Sliced;
15 
16 #undef __FUNCT__
17 #define __FUNCT__ "DMCreateMatrix_Sliced"
18 PetscErrorCode  DMCreateMatrix_Sliced(DM dm, const MatType mtype,Mat *J)
19 {
20   PetscErrorCode         ierr;
21   PetscInt               *globals,*sd_nnz,*so_nnz,rstart,bs,i;
22   ISLocalToGlobalMapping lmap,blmap;
23   void                   (*aij)(void) = PETSC_NULL;
24   DM_Sliced              *slice = (DM_Sliced*)dm->data;
25 
26   PetscFunctionBegin;
27   bs = slice->bs;
28   ierr = MatCreate(((PetscObject)dm)->comm,J);CHKERRQ(ierr);
29   ierr = MatSetSizes(*J,slice->n*bs,slice->n*bs,PETSC_DETERMINE,PETSC_DETERMINE);CHKERRQ(ierr);
30   ierr = MatSetBlockSize(*J,bs);CHKERRQ(ierr);
31   ierr = MatSetType(*J,mtype);CHKERRQ(ierr);
32   ierr = MatSeqBAIJSetPreallocation(*J,bs,slice->d_nz,slice->d_nnz);CHKERRQ(ierr);
33   ierr = MatMPIBAIJSetPreallocation(*J,bs,slice->d_nz,slice->d_nnz,slice->o_nz,slice->o_nnz);CHKERRQ(ierr);
34   /* In general, we have to do extra work to preallocate for scalar (AIJ) matrices so we check whether it will do any
35   * good before going on with it. */
36   ierr = PetscObjectQueryFunction((PetscObject)*J,"MatMPIAIJSetPreallocation_C",&aij);CHKERRQ(ierr);
37   if (!aij) {
38     ierr = PetscObjectQueryFunction((PetscObject)*J,"MatSeqAIJSetPreallocation_C",&aij);CHKERRQ(ierr);
39   }
40   if (aij) {
41     if (bs == 1) {
42       ierr = MatSeqAIJSetPreallocation(*J,slice->d_nz,slice->d_nnz);CHKERRQ(ierr);
43       ierr = MatMPIAIJSetPreallocation(*J,slice->d_nz,slice->d_nnz,slice->o_nz,slice->o_nnz);CHKERRQ(ierr);
44     } else if (!slice->d_nnz) {
45       ierr = MatSeqAIJSetPreallocation(*J,slice->d_nz*bs,PETSC_NULL);CHKERRQ(ierr);
46       ierr = MatMPIAIJSetPreallocation(*J,slice->d_nz*bs,PETSC_NULL,slice->o_nz*bs,PETSC_NULL);CHKERRQ(ierr);
47     } else {
48       /* The user has provided preallocation per block-row, convert it to per scalar-row respecting DMSlicedSetBlockFills() if applicable */
49       ierr = PetscMalloc2(slice->n*bs,PetscInt,&sd_nnz,(!!slice->o_nnz)*slice->n*bs,PetscInt,&so_nnz);CHKERRQ(ierr);
50       for (i=0; i<slice->n*bs; i++) {
51         sd_nnz[i] = (slice->d_nnz[i/bs]-1) * (slice->ofill ? slice->ofill->i[i%bs+1]-slice->ofill->i[i%bs] : bs)
52                                            + (slice->dfill ? slice->dfill->i[i%bs+1]-slice->dfill->i[i%bs] : bs);
53         if (so_nnz) {
54           so_nnz[i] = slice->o_nnz[i/bs] * (slice->ofill ? slice->ofill->i[i%bs+1]-slice->ofill->i[i%bs] : bs);
55         }
56       }
57       ierr = MatSeqAIJSetPreallocation(*J,slice->d_nz*bs,sd_nnz);CHKERRQ(ierr);
58       ierr = MatMPIAIJSetPreallocation(*J,slice->d_nz*bs,sd_nnz,slice->o_nz*bs,so_nnz);CHKERRQ(ierr);
59       ierr = PetscFree2(sd_nnz,so_nnz);CHKERRQ(ierr);
60     }
61   }
62 
63   /* Set up the local to global map.  For the scalar map, we have to translate to entry-wise indexing instead of block-wise. */
64   ierr = PetscMalloc((slice->n+slice->Nghosts)*bs*sizeof(PetscInt),&globals);CHKERRQ(ierr);
65   ierr = MatGetOwnershipRange(*J,&rstart,PETSC_NULL);CHKERRQ(ierr);
66   for (i=0; i<slice->n*bs; i++) {
67     globals[i] = rstart + i;
68   }
69   for (i=0; i<slice->Nghosts*bs; i++) {
70     globals[slice->n*bs+i] = slice->ghosts[i/bs]*bs + i%bs;
71   }
72   ierr = ISLocalToGlobalMappingCreate(PETSC_COMM_SELF,(slice->n+slice->Nghosts)*bs,globals,PETSC_OWN_POINTER,&lmap);CHKERRQ(ierr);
73   ierr = ISLocalToGlobalMappingBlock(lmap,bs,&blmap);CHKERRQ(ierr);
74   ierr = MatSetLocalToGlobalMapping(*J,lmap,lmap);CHKERRQ(ierr);
75   ierr = MatSetLocalToGlobalMappingBlock(*J,blmap,blmap);CHKERRQ(ierr);
76   ierr = ISLocalToGlobalMappingDestroy(&lmap);CHKERRQ(ierr);
77   ierr = ISLocalToGlobalMappingDestroy(&blmap);CHKERRQ(ierr);
78   PetscFunctionReturn(0);
79 }
80 
81 #undef __FUNCT__
82 #define __FUNCT__ "DMSlicedSetGhosts"
83 /*@C
84     DMSlicedSetGhosts - Sets the global indices of other processes elements that will
85       be ghosts on this process
86 
87     Not Collective
88 
89     Input Parameters:
90 +    slice - the DM object
91 .    bs - block size
92 .    nlocal - number of local (owned, non-ghost) blocks
93 .    Nghosts - number of ghost blocks on this process
94 -    ghosts - global indices of each ghost block
95 
96     Level: advanced
97 
98 .seealso DMDestroy(), DMCreateGlobalVector()
99 
100 @*/
101 PetscErrorCode  DMSlicedSetGhosts(DM dm,PetscInt bs,PetscInt nlocal,PetscInt Nghosts,const PetscInt ghosts[])
102 {
103   PetscErrorCode ierr;
104   DM_Sliced      *slice = (DM_Sliced*)dm->data;
105 
106   PetscFunctionBegin;
107   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
108   ierr = PetscFree(slice->ghosts);CHKERRQ(ierr);
109   ierr = PetscMalloc(Nghosts*sizeof(PetscInt),&slice->ghosts);CHKERRQ(ierr);
110   ierr = PetscMemcpy(slice->ghosts,ghosts,Nghosts*sizeof(PetscInt));CHKERRQ(ierr);
111   slice->bs      = bs;
112   slice->n       = nlocal;
113   slice->Nghosts = Nghosts;
114   PetscFunctionReturn(0);
115 }
116 
117 #undef __FUNCT__
118 #define __FUNCT__ "DMSlicedSetPreallocation"
119 /*@C
120     DMSlicedSetPreallocation - sets the matrix memory preallocation for matrices computed by DMSliced
121 
122     Not Collective
123 
124     Input Parameters:
125 +    slice - the DM object
126 .    d_nz  - number of block nonzeros per block row in diagonal portion of local
127            submatrix  (same for all local rows)
128 .    d_nnz - array containing the number of block nonzeros in the various block rows
129            of the in diagonal portion of the local (possibly different for each block
130            row) or PETSC_NULL.
131 .    o_nz  - number of block nonzeros per block row in the off-diagonal portion of local
132            submatrix (same for all local rows).
133 -    o_nnz - array containing the number of nonzeros in the various block rows of the
134            off-diagonal portion of the local submatrix (possibly different for
135            each block row) or PETSC_NULL.
136 
137     Notes:
138     See MatMPIBAIJSetPreallocation() for more details on preallocation.  If a scalar matrix (AIJ) is
139     obtained with DMSlicedGetMatrix(), the correct preallocation will be set, respecting DMSlicedSetBlockFills().
140 
141     Level: advanced
142 
143 .seealso DMDestroy(), DMCreateGlobalVector(), MatMPIAIJSetPreallocation(),
144          MatMPIBAIJSetPreallocation(), DMSlicedGetMatrix(), DMSlicedSetBlockFills()
145 
146 @*/
147 PetscErrorCode  DMSlicedSetPreallocation(DM dm,PetscInt d_nz,const PetscInt d_nnz[],PetscInt o_nz,const PetscInt o_nnz[])
148 {
149   DM_Sliced *slice = (DM_Sliced*)dm->data;
150 
151   PetscFunctionBegin;
152   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
153   slice->d_nz  = d_nz;
154   slice->d_nnz = (PetscInt*)d_nnz;
155   slice->o_nz  = o_nz;
156   slice->o_nnz = (PetscInt*)o_nnz;
157   PetscFunctionReturn(0);
158 }
159 
160 #undef __FUNCT__
161 #define __FUNCT__ "DMSlicedSetBlockFills_Private"
162 static PetscErrorCode DMSlicedSetBlockFills_Private(PetscInt bs,const PetscInt *fill,DMSlicedBlockFills **inf)
163 {
164   PetscErrorCode     ierr;
165   PetscInt           i,j,nz,*fi,*fj;
166   DMSlicedBlockFills *f;
167 
168   PetscFunctionBegin;
169   PetscValidPointer(inf,3);
170   if (*inf) {ierr = PetscFree3((*inf)->i,(*inf)->j,*inf);CHKERRQ(ierr);}
171   if (!fill) PetscFunctionReturn(0);
172   for (i=0,nz=0; i<bs*bs; i++) if (fill[i]) nz++;
173   ierr = PetscMalloc3(1,DMSlicedBlockFills,&f,bs+1,PetscInt,&fi,nz,PetscInt,&fj);CHKERRQ(ierr);
174   f->bs = bs;
175   f->nz = nz;
176   f->i  = fi;
177   f->j  = fj;
178   for (i=0,nz=0; i<bs; i++) {
179     fi[i] = nz;
180     for (j=0; j<bs; j++) if (fill[i*bs+j]) fj[nz++] = j;
181   }
182   fi[i] = nz;
183   *inf = f;
184   PetscFunctionReturn(0);
185 }
186 
187 #undef __FUNCT__
188 #define __FUNCT__ "DMSlicedSetBlockFills"
189 /*@C
190     DMSlicedSetBlockFills - Sets the fill pattern in each block for a multi-component problem
191     of the matrix returned by DMSlicedGetMatrix().
192 
193     Logically Collective on DM
194 
195     Input Parameter:
196 +   sliced - the DM object
197 .   dfill - the fill pattern in the diagonal block (may be PETSC_NULL, means use dense block)
198 -   ofill - the fill pattern in the off-diagonal blocks
199 
200     Notes:
201     This only makes sense for multicomponent problems using scalar matrix formats (AIJ).
202     See DMDASetBlockFills() for example usage.
203 
204     Level: advanced
205 
206 .seealso DMSlicedGetMatrix(), DMDASetBlockFills()
207 @*/
208 PetscErrorCode  DMSlicedSetBlockFills(DM dm,const PetscInt *dfill,const PetscInt *ofill)
209 {
210   DM_Sliced      *slice = (DM_Sliced*)dm->data;
211   PetscErrorCode ierr;
212 
213   PetscFunctionBegin;
214   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
215   ierr = DMSlicedSetBlockFills_Private(slice->bs,dfill,&slice->dfill);CHKERRQ(ierr);
216   ierr = DMSlicedSetBlockFills_Private(slice->bs,ofill,&slice->ofill);CHKERRQ(ierr);
217   PetscFunctionReturn(0);
218 }
219 
220 #undef __FUNCT__
221 #define __FUNCT__ "DMDestroy_Sliced"
222 PetscErrorCode  DMDestroy_Sliced(DM dm)
223 {
224   PetscErrorCode ierr;
225   DM_Sliced      *slice = (DM_Sliced*)dm->data;
226 
227   PetscFunctionBegin;
228   ierr = PetscFree(slice->ghosts);CHKERRQ(ierr);
229   if (slice->dfill) {ierr = PetscFree3(slice->dfill,slice->dfill->i,slice->dfill->j);CHKERRQ(ierr);}
230   if (slice->ofill) {ierr = PetscFree3(slice->ofill,slice->ofill->i,slice->ofill->j);CHKERRQ(ierr);}
231   PetscFunctionReturn(0);
232 }
233 
234 #undef __FUNCT__
235 #define __FUNCT__ "DMCreateGlobalVector_Sliced"
236 PetscErrorCode  DMCreateGlobalVector_Sliced(DM dm,Vec *gvec)
237 {
238   PetscErrorCode     ierr;
239   DM_Sliced          *slice = (DM_Sliced*)dm->data;
240 
241   PetscFunctionBegin;
242   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
243   PetscValidPointer(gvec,2);
244   *gvec = 0;
245   ierr = VecCreateGhostBlock(((PetscObject)dm)->comm,slice->bs,slice->n*slice->bs,PETSC_DETERMINE,slice->Nghosts,slice->ghosts,gvec);CHKERRQ(ierr);
246   ierr = PetscObjectCompose((PetscObject)*gvec,"DM",(PetscObject)dm);CHKERRQ(ierr);
247   PetscFunctionReturn(0);
248 }
249 
250 EXTERN_C_BEGIN
251 #undef __FUNCT__
252 #define __FUNCT__ "DMCreate_Sliced"
253 PetscErrorCode  DMCreate_Sliced(DM p)
254 {
255   PetscErrorCode ierr;
256   DM_Sliced      *slice;
257 
258   PetscFunctionBegin;
259   ierr = PetscNewLog(p,DM_Sliced,&slice);CHKERRQ(ierr);
260   p->data = slice;
261 
262   ierr = PetscObjectChangeTypeName((PetscObject)p,DMSLICED);CHKERRQ(ierr);
263   p->ops->createglobalvector = DMCreateGlobalVector_Sliced;
264   p->ops->creatematrix          = DMCreateMatrix_Sliced;
265   p->ops->destroy            = DMDestroy_Sliced;
266   PetscFunctionReturn(0);
267 }
268 EXTERN_C_END
269 
270 #undef __FUNCT__
271 #define __FUNCT__ "DMSlicedCreate"
272 /*@C
273     DMSlicedCreate - Creates a DM object, used to manage data for a unstructured problem
274 
275     Collective on MPI_Comm
276 
277     Input Parameter:
278 .   comm - the processors that will share the global vector
279 
280     Output Parameters:
281 .   slice - the slice object
282 
283     Level: advanced
284 
285 .seealso DMDestroy(), DMCreateGlobalVector()
286 
287 @*/
288 PetscErrorCode  DMSlicedCreate(MPI_Comm comm,DM *dm)
289 {
290   PetscErrorCode ierr;
291 
292   PetscFunctionBegin;
293   PetscValidPointer(dm,2);
294   ierr = DMCreate(comm,dm);CHKERRQ(ierr);
295   ierr = DMSetType(*dm,DMSLICED);CHKERRQ(ierr);
296   PetscFunctionReturn(0);
297 }
298 
299 /* Explanation of the missing functions for DMDA-style handling of the local vector:
300 
301    DMSlicedCreateLocalVector()
302    DMSlicedGlobalToLocalBegin()
303    DMSlicedGlobalToLocalEnd()
304 
305  There is no way to get the global form from a local form, so DMSlicedCreateLocalVector() is a memory leak without
306  external accounting for the global vector.  Also, DMSliced intends the user to work with the VecGhost interface since the
307  ghosts are already ordered after the owned entries.  Contrast this to a DMDA where the local vector has a special
308  ordering described by the structured grid, hence it cannot share memory with the global form.  For this reason, users
309  of DMSliced should work with the global vector and use
310 
311    VecGhostGetLocalForm(), VecGhostRestoreLocalForm()
312    VecGhostUpdateBegin(), VecGhostUpdateEnd()
313 
314  rather than the missing DMDA-style functions.  This is conceptually simpler and offers better performance than is
315  possible with the DMDA-style interface.
316 */
317