xref: /petsc/src/dm/impls/sliced/sliced.c (revision 7d0a6c19129e7069c8a40e210b34ed62989173db)
1 
2 #include "petscdm.h"     /*I      "petscdm.h"     I*/
3 #include "petscmat.h"    /*I      "petscmat.h"    I*/
4 #include "private/dmimpl.h"    /*I      "petscmat.h"    I*/
5 
6 /* CSR storage of the nonzero structure of a bs*bs matrix */
7 typedef struct {
8   PetscInt bs,nz,*i,*j;
9 } DMSlicedBlockFills;
10 
11 typedef struct  {
12   Vec                globalvector;
13   PetscInt           bs,n,N,Nghosts,*ghosts;
14   PetscInt           d_nz,o_nz,*d_nnz,*o_nnz;
15   DMSlicedBlockFills *dfill,*ofill;
16 } DM_Sliced;
17 
18 #undef __FUNCT__
19 #define __FUNCT__ "DMGetMatrix_Sliced"
20 PetscErrorCode  DMGetMatrix_Sliced(DM dm, const MatType mtype,Mat *J)
21 {
22   PetscErrorCode         ierr;
23   PetscInt               *globals,*sd_nnz,*so_nnz,rstart,bs,i;
24   ISLocalToGlobalMapping lmap,blmap;
25   void                   (*aij)(void) = PETSC_NULL;
26   DM_Sliced              *slice = (DM_Sliced*)dm->data;
27 
28   PetscFunctionBegin;
29   bs = slice->bs;
30   ierr = MatCreate(((PetscObject)dm)->comm,J);CHKERRQ(ierr);
31   ierr = MatSetSizes(*J,slice->n*bs,slice->n*bs,PETSC_DETERMINE,PETSC_DETERMINE);CHKERRQ(ierr);
32   ierr = MatSetType(*J,mtype);CHKERRQ(ierr);
33   ierr = MatSeqBAIJSetPreallocation(*J,bs,slice->d_nz,slice->d_nnz);CHKERRQ(ierr);
34   ierr = MatMPIBAIJSetPreallocation(*J,bs,slice->d_nz,slice->d_nnz,slice->o_nz,slice->o_nnz);CHKERRQ(ierr);
35   /* In general, we have to do extra work to preallocate for scalar (AIJ) matrices so we check whether it will do any
36   * good before going on with it. */
37   ierr = PetscObjectQueryFunction((PetscObject)*J,"MatMPIAIJSetPreallocation_C",&aij);CHKERRQ(ierr);
38   if (!aij) {
39     ierr = PetscObjectQueryFunction((PetscObject)*J,"MatSeqAIJSetPreallocation_C",&aij);CHKERRQ(ierr);
40   }
41   if (aij) {
42     if (bs == 1) {
43       ierr = MatSeqAIJSetPreallocation(*J,slice->d_nz,slice->d_nnz);CHKERRQ(ierr);
44       ierr = MatMPIAIJSetPreallocation(*J,slice->d_nz,slice->d_nnz,slice->o_nz,slice->o_nnz);CHKERRQ(ierr);
45     } else if (!slice->d_nnz) {
46       ierr = MatSeqAIJSetPreallocation(*J,slice->d_nz*bs,PETSC_NULL);CHKERRQ(ierr);
47       ierr = MatMPIAIJSetPreallocation(*J,slice->d_nz*bs,PETSC_NULL,slice->o_nz*bs,PETSC_NULL);CHKERRQ(ierr);
48     } else {
49       /* The user has provided preallocation per block-row, convert it to per scalar-row respecting DMSlicedSetBlockFills() if applicable */
50       ierr = PetscMalloc2(slice->n*bs,PetscInt,&sd_nnz,(!!slice->o_nnz)*slice->n*bs,PetscInt,&so_nnz);CHKERRQ(ierr);
51       for (i=0; i<slice->n*bs; i++) {
52         sd_nnz[i] = (slice->d_nnz[i/bs]-1) * (slice->ofill ? slice->ofill->i[i%bs+1]-slice->ofill->i[i%bs] : bs)
53                                            + (slice->dfill ? slice->dfill->i[i%bs+1]-slice->dfill->i[i%bs] : bs);
54         if (so_nnz) {
55           so_nnz[i] = slice->o_nnz[i/bs] * (slice->ofill ? slice->ofill->i[i%bs+1]-slice->ofill->i[i%bs] : bs);
56         }
57       }
58       ierr = MatSeqAIJSetPreallocation(*J,slice->d_nz*bs,sd_nnz);CHKERRQ(ierr);
59       ierr = MatMPIAIJSetPreallocation(*J,slice->d_nz*bs,sd_nnz,slice->o_nz*bs,so_nnz);CHKERRQ(ierr);
60       ierr = PetscFree2(sd_nnz,so_nnz);CHKERRQ(ierr);
61     }
62   }
63 
64   ierr = MatSetBlockSize(*J,bs);CHKERRQ(ierr);
65 
66   /* Set up the local to global map.  For the scalar map, we have to translate to entry-wise indexing instead of block-wise. */
67   ierr = PetscMalloc((slice->n+slice->Nghosts)*bs*sizeof(PetscInt),&globals);CHKERRQ(ierr);
68   ierr = MatGetOwnershipRange(*J,&rstart,PETSC_NULL);CHKERRQ(ierr);
69   for (i=0; i<slice->n*bs; i++) {
70     globals[i] = rstart + i;
71   }
72   for (i=0; i<slice->Nghosts*bs; i++) {
73     globals[slice->n*bs+i] = slice->ghosts[i/bs]*bs + i%bs;
74   }
75   ierr = ISLocalToGlobalMappingCreate(PETSC_COMM_SELF,(slice->n+slice->Nghosts)*bs,globals,PETSC_OWN_POINTER,&lmap);CHKERRQ(ierr);
76   ierr = ISLocalToGlobalMappingBlock(lmap,bs,&blmap);CHKERRQ(ierr);
77   ierr = MatSetLocalToGlobalMapping(*J,lmap,lmap);CHKERRQ(ierr);
78   ierr = MatSetLocalToGlobalMappingBlock(*J,blmap,blmap);CHKERRQ(ierr);
79   ierr = ISLocalToGlobalMappingDestroy(lmap);CHKERRQ(ierr);
80   ierr = ISLocalToGlobalMappingDestroy(blmap);CHKERRQ(ierr);
81   PetscFunctionReturn(0);
82 }
83 
84 #undef __FUNCT__
85 #define __FUNCT__ "DMSlicedSetGhosts"
86 /*@C
87     DMSlicedSetGhosts - Sets the global indices of other processes elements that will
88       be ghosts on this process
89 
90     Not Collective
91 
92     Input Parameters:
93 +    slice - the DM object
94 .    bs - block size
95 .    nlocal - number of local (owned, non-ghost) blocks
96 .    Nghosts - number of ghost blocks on this process
97 -    ghosts - global indices of each ghost block
98 
99     Level: advanced
100 
101 .seealso DMDestroy(), DMCreateGlobalVector(), DMSlicedGetGlobalIndices()
102 
103 @*/
104 PetscErrorCode  DMSlicedSetGhosts(DM dm,PetscInt bs,PetscInt nlocal,PetscInt Nghosts,const PetscInt ghosts[])
105 {
106   PetscErrorCode ierr;
107   DM_Sliced      *slice = (DM_Sliced*)dm->data;
108 
109   PetscFunctionBegin;
110   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
111   ierr = PetscFree(slice->ghosts);CHKERRQ(ierr);
112   ierr = PetscMalloc(Nghosts*sizeof(PetscInt),&slice->ghosts);CHKERRQ(ierr);
113   ierr = PetscMemcpy(slice->ghosts,ghosts,Nghosts*sizeof(PetscInt));CHKERRQ(ierr);
114   slice->bs      = bs;
115   slice->n       = nlocal;
116   slice->Nghosts = Nghosts;
117   PetscFunctionReturn(0);
118 }
119 
120 #undef __FUNCT__
121 #define __FUNCT__ "DMSlicedSetPreallocation"
122 /*@C
123     DMSlicedSetPreallocation - sets the matrix memory preallocation for matrices computed by DMSliced
124 
125     Not Collective
126 
127     Input Parameters:
128 +    slice - the DM object
129 .    d_nz  - number of block nonzeros per block row in diagonal portion of local
130            submatrix  (same for all local rows)
131 .    d_nnz - array containing the number of block nonzeros in the various block rows
132            of the in diagonal portion of the local (possibly different for each block
133            row) or PETSC_NULL.  You must leave room for the diagonal entry even if it is zero.
134 .    o_nz  - number of block nonzeros per block row in the off-diagonal portion of local
135            submatrix (same for all local rows).
136 -    o_nnz - array containing the number of nonzeros in the various block rows of the
137            off-diagonal portion of the local submatrix (possibly different for
138            each block row) or PETSC_NULL.
139 
140     Notes:
141     See MatMPIBAIJSetPreallocation() for more details on preallocation.  If a scalar matrix (AIJ) is
142     obtained with DMSlicedGetMatrix(), the correct preallocation will be set, respecting DMSlicedSetBlockFills().
143 
144     Level: advanced
145 
146 .seealso DMDestroy(), DMCreateGlobalVector(), DMSlicedGetGlobalIndices(), MatMPIAIJSetPreallocation(),
147          MatMPIBAIJSetPreallocation(), DMSlicedGetMatrix(), DMSlicedSetBlockFills()
148 
149 @*/
150 PetscErrorCode  DMSlicedSetPreallocation(DM dm,PetscInt d_nz,const PetscInt d_nnz[],PetscInt o_nz,const PetscInt o_nnz[])
151 {
152   DM_Sliced *slice = (DM_Sliced*)dm->data;
153 
154   PetscFunctionBegin;
155   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
156   slice->d_nz  = d_nz;
157   slice->d_nnz = (PetscInt*)d_nnz;
158   slice->o_nz  = o_nz;
159   slice->o_nnz = (PetscInt*)o_nnz;
160   PetscFunctionReturn(0);
161 }
162 
163 #undef __FUNCT__
164 #define __FUNCT__ "DMSlicedSetBlockFills_Private"
165 static PetscErrorCode DMSlicedSetBlockFills_Private(PetscInt bs,const PetscInt *fill,DMSlicedBlockFills **inf)
166 {
167   PetscErrorCode     ierr;
168   PetscInt           i,j,nz,*fi,*fj;
169   DMSlicedBlockFills *f;
170 
171   PetscFunctionBegin;
172   PetscValidPointer(inf,3);
173   if (*inf) {ierr = PetscFree3((*inf)->i,(*inf)->j,*inf);CHKERRQ(ierr);}
174   if (!fill) PetscFunctionReturn(0);
175   for (i=0,nz=0; i<bs*bs; i++) if (fill[i]) nz++;
176   ierr = PetscMalloc3(1,DMSlicedBlockFills,&f,bs+1,PetscInt,&fi,nz,PetscInt,&fj);CHKERRQ(ierr);
177   f->bs = bs;
178   f->nz = nz;
179   f->i  = fi;
180   f->j  = fj;
181   for (i=0,nz=0; i<bs; i++) {
182     fi[i] = nz;
183     for (j=0; j<bs; j++) if (fill[i*bs+j]) fj[nz++] = j;
184   }
185   fi[i] = nz;
186   *inf = f;
187   PetscFunctionReturn(0);
188 }
189 
190 #undef __FUNCT__
191 #define __FUNCT__ "DMSlicedSetBlockFills"
192 /*@C
193     DMSlicedSetBlockFills - Sets the fill pattern in each block for a multi-component problem
194     of the matrix returned by DMSlicedGetMatrix().
195 
196     Logically Collective on DM
197 
198     Input Parameter:
199 +   sliced - the DM object
200 .   dfill - the fill pattern in the diagonal block (may be PETSC_NULL, means use dense block)
201 -   ofill - the fill pattern in the off-diagonal blocks
202 
203     Notes:
204     This only makes sense for multicomponent problems using scalar matrix formats (AIJ).
205     See DMDASetBlockFills() for example usage.
206 
207     Level: advanced
208 
209 .seealso DMSlicedGetMatrix(), DMDASetBlockFills()
210 @*/
211 PetscErrorCode  DMSlicedSetBlockFills(DM dm,const PetscInt *dfill,const PetscInt *ofill)
212 {
213   DM_Sliced      *slice = (DM_Sliced*)dm->data;
214   PetscErrorCode ierr;
215 
216   PetscFunctionBegin;
217   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
218   ierr = DMSlicedSetBlockFills_Private(slice->bs,dfill,&slice->dfill);CHKERRQ(ierr);
219   ierr = DMSlicedSetBlockFills_Private(slice->bs,ofill,&slice->ofill);CHKERRQ(ierr);
220   PetscFunctionReturn(0);
221 }
222 
223 extern PetscErrorCode DMDestroy_Private(DM,PetscBool *);
224 
225 #undef __FUNCT__
226 #define __FUNCT__ "DMDestroy_Sliced"
227 PetscErrorCode  DMDestroy_Sliced(DM dm)
228 {
229   PetscErrorCode ierr;
230   PetscBool      done;
231   DM_Sliced      *slice = (DM_Sliced*)dm->data;
232 
233   PetscFunctionBegin;
234   ierr = DMDestroy_Private(dm,&done);CHKERRQ(ierr);
235   if (!done) PetscFunctionReturn(0);
236 
237   if (slice->globalvector) {ierr = VecDestroy(slice->globalvector);CHKERRQ(ierr);}
238   ierr = PetscFree(slice->ghosts);CHKERRQ(ierr);
239   if (slice->dfill) {ierr = PetscFree3(slice->dfill,slice->dfill->i,slice->dfill->j);CHKERRQ(ierr);}
240   if (slice->ofill) {ierr = PetscFree3(slice->ofill,slice->ofill->i,slice->ofill->j);CHKERRQ(ierr);}
241   ierr = PetscFree(dm->data);CHKERRQ(ierr);
242   ierr = PetscHeaderDestroy(dm);CHKERRQ(ierr);
243   PetscFunctionReturn(0);
244 }
245 
246 #undef __FUNCT__
247 #define __FUNCT__ "DMCreateGlobalVector_Sliced"
248 PetscErrorCode  DMCreateGlobalVector_Sliced(DM dm,Vec *gvec)
249 {
250   PetscErrorCode     ierr;
251   PetscInt           bs,cnt;
252   DM_Sliced          *slice = (DM_Sliced*)dm->data;
253 
254   PetscFunctionBegin;
255   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
256   PetscValidPointer(gvec,2);
257   *gvec = 0;
258   if (slice->globalvector) {
259     ierr = PetscObjectGetReference((PetscObject)slice->globalvector,&cnt);CHKERRQ(ierr);
260     if (cnt == 1) {             /* Nobody else has a reference so we can just reference it and give it away */
261       *gvec = slice->globalvector;
262       ierr = PetscObjectReference((PetscObject)*gvec);CHKERRQ(ierr);
263       ierr = VecZeroEntries(*gvec);CHKERRQ(ierr);
264     } else {                    /* Someone else has a reference so we duplicate the global vector */
265       ierr = VecDuplicate(slice->globalvector,gvec);CHKERRQ(ierr);
266     }
267   } else {
268     bs = slice->bs;
269     ierr = VecCreateGhostBlock(((PetscObject)dm)->comm,bs,slice->n*bs,PETSC_DETERMINE,slice->Nghosts,slice->ghosts,&slice->globalvector);CHKERRQ(ierr);
270     *gvec = slice->globalvector;
271     ierr = PetscObjectReference((PetscObject)*gvec);CHKERRQ(ierr);
272   }
273   PetscFunctionReturn(0);
274 }
275 
276 EXTERN_C_BEGIN
277 #undef __FUNCT__
278 #define __FUNCT__ "DMCreate_Sliced"
279 PetscErrorCode  DMCreate_Sliced(DM p)
280 {
281   PetscErrorCode ierr;
282   DM_Sliced      *slice;
283 
284   PetscFunctionBegin;
285   ierr = PetscNewLog(p,DM_Sliced,&slice);CHKERRQ(ierr);
286   p->data = slice;
287 
288   ierr = PetscObjectChangeTypeName((PetscObject)p,"Sliced");CHKERRQ(ierr);
289   p->ops->createglobalvector = DMCreateGlobalVector_Sliced;
290   p->ops->getmatrix          = DMGetMatrix_Sliced;
291   p->ops->destroy            = DMDestroy_Sliced;
292   PetscFunctionReturn(0);
293 }
294 EXTERN_C_END
295 
296 #undef __FUNCT__
297 #define __FUNCT__ "DMSlicedCreate"
298 /*@C
299     DMSlicedCreate - Creates a DM object, used to manage data for a unstructured problem
300 
301     Collective on MPI_Comm
302 
303     Input Parameter:
304 .   comm - the processors that will share the global vector
305 
306     Output Parameters:
307 .   slice - the slice object
308 
309     Level: advanced
310 
311 .seealso DMDestroy(), DMCreateGlobalVector(), DMSlicedGetGlobalIndices()
312 
313 @*/
314 PetscErrorCode  DMSlicedCreate(MPI_Comm comm,DM *dm)
315 {
316   PetscErrorCode ierr;
317 
318   PetscFunctionBegin;
319   PetscValidPointer(dm,2);
320   ierr = DMCreate(comm,dm);CHKERRQ(ierr);
321   ierr = DMSetType(*dm,DMSLICED);CHKERRQ(ierr);
322   PetscFunctionReturn(0);
323 }
324 
325 
326 #undef __FUNCT__
327 #define __FUNCT__ "DMSlicedGetGlobalIndices"
328 /*@C
329     DMSlicedGetGlobalIndices - Gets the global indices for all the local entries
330 
331     Collective on DM
332 
333     Input Parameter:
334 .    slice - the slice object
335 
336     Output Parameters:
337 .    idx - the individual indices for each packed vector/array
338 
339     Level: advanced
340 
341     Notes:
342        The idx parameters should be freed by the calling routine with PetscFree()
343 
344 .seealso DMSlicedDestroy(), DMSlicedCreateGlobalVector(), DMSlicedCreate()
345 
346 @*/
347 PetscErrorCode  DMSlicedGetGlobalIndices(DM dm,PetscInt *idx[])
348 {
349   PetscFunctionReturn(0);
350 }
351 
352 
353 /* Explanation of the missing functions for DMDA-style handling of the local vector:
354 
355    DMSlicedCreateLocalVector()
356    DMSlicedGlobalToLocalBegin()
357    DMSlicedGlobalToLocalEnd()
358 
359  There is no way to get the global form from a local form, so DMSlicedCreateLocalVector() is a memory leak without
360  external accounting for the global vector.  Also, DMSliced intends the user to work with the VecGhost interface since the
361  ghosts are already ordered after the owned entries.  Contrast this to a DMDA where the local vector has a special
362  ordering described by the structured grid, hence it cannot share memory with the global form.  For this reason, users
363  of DMSliced should work with the global vector and use
364 
365    VecGhostGetLocalForm(), VecGhostRestoreLocalForm()
366    VecGhostUpdateBegin(), VecGhostUpdateEnd()
367 
368  rather than the missing DMDA-style functions.  This is conceptually simpler and offers better performance than is
369  possible with the DMDA-style interface.
370 */
371