xref: /petsc/src/dm/impls/sliced/sliced.c (revision 3c48a1e8da19189ff2402a4e41a2fc082d52c349)
1 #include "petscdmsliced.h"      /*I      "petscdmsliced.h"     I*/
2 #include "petscmat.h"           /*I      "petscmat.h"    I*/
3 #include "private/dmimpl.h"     /*I      "petscmat.h"    I*/
4 
5 /* CSR storage of the nonzero structure of a bs*bs matrix */
6 typedef struct {
7   PetscInt bs,nz,*i,*j;
8 } DMSlicedBlockFills;
9 
10 typedef struct  {
11   Vec                globalvector;
12   PetscInt           bs,n,N,Nghosts,*ghosts;
13   PetscInt           d_nz,o_nz,*d_nnz,*o_nnz;
14   DMSlicedBlockFills *dfill,*ofill;
15 } DM_Sliced;
16 
17 #undef __FUNCT__
18 #define __FUNCT__ "DMGetMatrix_Sliced"
19 PetscErrorCode  DMGetMatrix_Sliced(DM dm, const MatType mtype,Mat *J)
20 {
21   PetscErrorCode         ierr;
22   PetscInt               *globals,*sd_nnz,*so_nnz,rstart,bs,i;
23   ISLocalToGlobalMapping lmap,blmap;
24   void                   (*aij)(void) = PETSC_NULL;
25   DM_Sliced              *slice = (DM_Sliced*)dm->data;
26 
27   PetscFunctionBegin;
28   bs = slice->bs;
29   ierr = MatCreate(((PetscObject)dm)->comm,J);CHKERRQ(ierr);
30   ierr = MatSetSizes(*J,slice->n*bs,slice->n*bs,PETSC_DETERMINE,PETSC_DETERMINE);CHKERRQ(ierr);
31   ierr = MatSetType(*J,mtype);CHKERRQ(ierr);
32   ierr = MatSeqBAIJSetPreallocation(*J,bs,slice->d_nz,slice->d_nnz);CHKERRQ(ierr);
33   ierr = MatMPIBAIJSetPreallocation(*J,bs,slice->d_nz,slice->d_nnz,slice->o_nz,slice->o_nnz);CHKERRQ(ierr);
34   /* In general, we have to do extra work to preallocate for scalar (AIJ) matrices so we check whether it will do any
35   * good before going on with it. */
36   ierr = PetscObjectQueryFunction((PetscObject)*J,"MatMPIAIJSetPreallocation_C",&aij);CHKERRQ(ierr);
37   if (!aij) {
38     ierr = PetscObjectQueryFunction((PetscObject)*J,"MatSeqAIJSetPreallocation_C",&aij);CHKERRQ(ierr);
39   }
40   if (aij) {
41     if (bs == 1) {
42       ierr = MatSeqAIJSetPreallocation(*J,slice->d_nz,slice->d_nnz);CHKERRQ(ierr);
43       ierr = MatMPIAIJSetPreallocation(*J,slice->d_nz,slice->d_nnz,slice->o_nz,slice->o_nnz);CHKERRQ(ierr);
44     } else if (!slice->d_nnz) {
45       ierr = MatSeqAIJSetPreallocation(*J,slice->d_nz*bs,PETSC_NULL);CHKERRQ(ierr);
46       ierr = MatMPIAIJSetPreallocation(*J,slice->d_nz*bs,PETSC_NULL,slice->o_nz*bs,PETSC_NULL);CHKERRQ(ierr);
47     } else {
48       /* The user has provided preallocation per block-row, convert it to per scalar-row respecting DMSlicedSetBlockFills() if applicable */
49       ierr = PetscMalloc2(slice->n*bs,PetscInt,&sd_nnz,(!!slice->o_nnz)*slice->n*bs,PetscInt,&so_nnz);CHKERRQ(ierr);
50       for (i=0; i<slice->n*bs; i++) {
51         sd_nnz[i] = (slice->d_nnz[i/bs]-1) * (slice->ofill ? slice->ofill->i[i%bs+1]-slice->ofill->i[i%bs] : bs)
52                                            + (slice->dfill ? slice->dfill->i[i%bs+1]-slice->dfill->i[i%bs] : bs);
53         if (so_nnz) {
54           so_nnz[i] = slice->o_nnz[i/bs] * (slice->ofill ? slice->ofill->i[i%bs+1]-slice->ofill->i[i%bs] : bs);
55         }
56       }
57       ierr = MatSeqAIJSetPreallocation(*J,slice->d_nz*bs,sd_nnz);CHKERRQ(ierr);
58       ierr = MatMPIAIJSetPreallocation(*J,slice->d_nz*bs,sd_nnz,slice->o_nz*bs,so_nnz);CHKERRQ(ierr);
59       ierr = PetscFree2(sd_nnz,so_nnz);CHKERRQ(ierr);
60     }
61   }
62 
63   ierr = MatSetBlockSize(*J,bs);CHKERRQ(ierr);
64 
65   /* Set up the local to global map.  For the scalar map, we have to translate to entry-wise indexing instead of block-wise. */
66   ierr = PetscMalloc((slice->n+slice->Nghosts)*bs*sizeof(PetscInt),&globals);CHKERRQ(ierr);
67   ierr = MatGetOwnershipRange(*J,&rstart,PETSC_NULL);CHKERRQ(ierr);
68   for (i=0; i<slice->n*bs; i++) {
69     globals[i] = rstart + i;
70   }
71   for (i=0; i<slice->Nghosts*bs; i++) {
72     globals[slice->n*bs+i] = slice->ghosts[i/bs]*bs + i%bs;
73   }
74   ierr = ISLocalToGlobalMappingCreate(PETSC_COMM_SELF,(slice->n+slice->Nghosts)*bs,globals,PETSC_OWN_POINTER,&lmap);CHKERRQ(ierr);
75   ierr = ISLocalToGlobalMappingBlock(lmap,bs,&blmap);CHKERRQ(ierr);
76   ierr = MatSetLocalToGlobalMapping(*J,lmap,lmap);CHKERRQ(ierr);
77   ierr = MatSetLocalToGlobalMappingBlock(*J,blmap,blmap);CHKERRQ(ierr);
78   ierr = ISLocalToGlobalMappingDestroy(lmap);CHKERRQ(ierr);
79   ierr = ISLocalToGlobalMappingDestroy(blmap);CHKERRQ(ierr);
80   PetscFunctionReturn(0);
81 }
82 
83 #undef __FUNCT__
84 #define __FUNCT__ "DMSlicedSetGhosts"
85 /*@C
86     DMSlicedSetGhosts - Sets the global indices of other processes elements that will
87       be ghosts on this process
88 
89     Not Collective
90 
91     Input Parameters:
92 +    slice - the DM object
93 .    bs - block size
94 .    nlocal - number of local (owned, non-ghost) blocks
95 .    Nghosts - number of ghost blocks on this process
96 -    ghosts - global indices of each ghost block
97 
98     Level: advanced
99 
100 .seealso DMDestroy(), DMCreateGlobalVector(), DMSlicedGetGlobalIndices()
101 
102 @*/
103 PetscErrorCode  DMSlicedSetGhosts(DM dm,PetscInt bs,PetscInt nlocal,PetscInt Nghosts,const PetscInt ghosts[])
104 {
105   PetscErrorCode ierr;
106   DM_Sliced      *slice = (DM_Sliced*)dm->data;
107 
108   PetscFunctionBegin;
109   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
110   ierr = PetscFree(slice->ghosts);CHKERRQ(ierr);
111   ierr = PetscMalloc(Nghosts*sizeof(PetscInt),&slice->ghosts);CHKERRQ(ierr);
112   ierr = PetscMemcpy(slice->ghosts,ghosts,Nghosts*sizeof(PetscInt));CHKERRQ(ierr);
113   slice->bs      = bs;
114   slice->n       = nlocal;
115   slice->Nghosts = Nghosts;
116   PetscFunctionReturn(0);
117 }
118 
119 #undef __FUNCT__
120 #define __FUNCT__ "DMSlicedSetPreallocation"
121 /*@C
122     DMSlicedSetPreallocation - sets the matrix memory preallocation for matrices computed by DMSliced
123 
124     Not Collective
125 
126     Input Parameters:
127 +    slice - the DM object
128 .    d_nz  - number of block nonzeros per block row in diagonal portion of local
129            submatrix  (same for all local rows)
130 .    d_nnz - array containing the number of block nonzeros in the various block rows
131            of the in diagonal portion of the local (possibly different for each block
132            row) or PETSC_NULL.  You must leave room for the diagonal entry even if it is zero.
133 .    o_nz  - number of block nonzeros per block row in the off-diagonal portion of local
134            submatrix (same for all local rows).
135 -    o_nnz - array containing the number of nonzeros in the various block rows of the
136            off-diagonal portion of the local submatrix (possibly different for
137            each block row) or PETSC_NULL.
138 
139     Notes:
140     See MatMPIBAIJSetPreallocation() for more details on preallocation.  If a scalar matrix (AIJ) is
141     obtained with DMSlicedGetMatrix(), the correct preallocation will be set, respecting DMSlicedSetBlockFills().
142 
143     Level: advanced
144 
145 .seealso DMDestroy(), DMCreateGlobalVector(), DMSlicedGetGlobalIndices(), MatMPIAIJSetPreallocation(),
146          MatMPIBAIJSetPreallocation(), DMSlicedGetMatrix(), DMSlicedSetBlockFills()
147 
148 @*/
149 PetscErrorCode  DMSlicedSetPreallocation(DM dm,PetscInt d_nz,const PetscInt d_nnz[],PetscInt o_nz,const PetscInt o_nnz[])
150 {
151   DM_Sliced *slice = (DM_Sliced*)dm->data;
152 
153   PetscFunctionBegin;
154   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
155   slice->d_nz  = d_nz;
156   slice->d_nnz = (PetscInt*)d_nnz;
157   slice->o_nz  = o_nz;
158   slice->o_nnz = (PetscInt*)o_nnz;
159   PetscFunctionReturn(0);
160 }
161 
162 #undef __FUNCT__
163 #define __FUNCT__ "DMSlicedSetBlockFills_Private"
164 static PetscErrorCode DMSlicedSetBlockFills_Private(PetscInt bs,const PetscInt *fill,DMSlicedBlockFills **inf)
165 {
166   PetscErrorCode     ierr;
167   PetscInt           i,j,nz,*fi,*fj;
168   DMSlicedBlockFills *f;
169 
170   PetscFunctionBegin;
171   PetscValidPointer(inf,3);
172   if (*inf) {ierr = PetscFree3((*inf)->i,(*inf)->j,*inf);CHKERRQ(ierr);}
173   if (!fill) PetscFunctionReturn(0);
174   for (i=0,nz=0; i<bs*bs; i++) if (fill[i]) nz++;
175   ierr = PetscMalloc3(1,DMSlicedBlockFills,&f,bs+1,PetscInt,&fi,nz,PetscInt,&fj);CHKERRQ(ierr);
176   f->bs = bs;
177   f->nz = nz;
178   f->i  = fi;
179   f->j  = fj;
180   for (i=0,nz=0; i<bs; i++) {
181     fi[i] = nz;
182     for (j=0; j<bs; j++) if (fill[i*bs+j]) fj[nz++] = j;
183   }
184   fi[i] = nz;
185   *inf = f;
186   PetscFunctionReturn(0);
187 }
188 
189 #undef __FUNCT__
190 #define __FUNCT__ "DMSlicedSetBlockFills"
191 /*@C
192     DMSlicedSetBlockFills - Sets the fill pattern in each block for a multi-component problem
193     of the matrix returned by DMSlicedGetMatrix().
194 
195     Logically Collective on DM
196 
197     Input Parameter:
198 +   sliced - the DM object
199 .   dfill - the fill pattern in the diagonal block (may be PETSC_NULL, means use dense block)
200 -   ofill - the fill pattern in the off-diagonal blocks
201 
202     Notes:
203     This only makes sense for multicomponent problems using scalar matrix formats (AIJ).
204     See DMDASetBlockFills() for example usage.
205 
206     Level: advanced
207 
208 .seealso DMSlicedGetMatrix(), DMDASetBlockFills()
209 @*/
210 PetscErrorCode  DMSlicedSetBlockFills(DM dm,const PetscInt *dfill,const PetscInt *ofill)
211 {
212   DM_Sliced      *slice = (DM_Sliced*)dm->data;
213   PetscErrorCode ierr;
214 
215   PetscFunctionBegin;
216   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
217   ierr = DMSlicedSetBlockFills_Private(slice->bs,dfill,&slice->dfill);CHKERRQ(ierr);
218   ierr = DMSlicedSetBlockFills_Private(slice->bs,ofill,&slice->ofill);CHKERRQ(ierr);
219   PetscFunctionReturn(0);
220 }
221 
222 extern PetscErrorCode DMDestroy_Private(DM,PetscBool *);
223 
224 #undef __FUNCT__
225 #define __FUNCT__ "DMDestroy_Sliced"
226 PetscErrorCode  DMDestroy_Sliced(DM dm)
227 {
228   PetscErrorCode ierr;
229   PetscBool      done;
230   DM_Sliced      *slice = (DM_Sliced*)dm->data;
231 
232   PetscFunctionBegin;
233   ierr = DMDestroy_Private(dm,&done);CHKERRQ(ierr);
234   if (!done) PetscFunctionReturn(0);
235 
236   if (slice->globalvector) {ierr = VecDestroy(slice->globalvector);CHKERRQ(ierr);}
237   ierr = PetscFree(slice->ghosts);CHKERRQ(ierr);
238   if (slice->dfill) {ierr = PetscFree3(slice->dfill,slice->dfill->i,slice->dfill->j);CHKERRQ(ierr);}
239   if (slice->ofill) {ierr = PetscFree3(slice->ofill,slice->ofill->i,slice->ofill->j);CHKERRQ(ierr);}
240   ierr = PetscFree(dm->data);CHKERRQ(ierr);
241   ierr = PetscHeaderDestroy(dm);CHKERRQ(ierr);
242   PetscFunctionReturn(0);
243 }
244 
245 #undef __FUNCT__
246 #define __FUNCT__ "DMCreateGlobalVector_Sliced"
247 PetscErrorCode  DMCreateGlobalVector_Sliced(DM dm,Vec *gvec)
248 {
249   PetscErrorCode     ierr;
250   PetscInt           bs,cnt;
251   DM_Sliced          *slice = (DM_Sliced*)dm->data;
252 
253   PetscFunctionBegin;
254   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
255   PetscValidPointer(gvec,2);
256   *gvec = 0;
257   if (slice->globalvector) {
258     ierr = PetscObjectGetReference((PetscObject)slice->globalvector,&cnt);CHKERRQ(ierr);
259     if (cnt == 1) {             /* Nobody else has a reference so we can just reference it and give it away */
260       *gvec = slice->globalvector;
261       ierr = PetscObjectReference((PetscObject)*gvec);CHKERRQ(ierr);
262       ierr = VecZeroEntries(*gvec);CHKERRQ(ierr);
263     } else {                    /* Someone else has a reference so we duplicate the global vector */
264       ierr = VecDuplicate(slice->globalvector,gvec);CHKERRQ(ierr);
265     }
266   } else {
267     bs = slice->bs;
268     ierr = VecCreateGhostBlock(((PetscObject)dm)->comm,bs,slice->n*bs,PETSC_DETERMINE,slice->Nghosts,slice->ghosts,&slice->globalvector);CHKERRQ(ierr);
269     *gvec = slice->globalvector;
270     ierr = PetscObjectReference((PetscObject)*gvec);CHKERRQ(ierr);
271   }
272   PetscFunctionReturn(0);
273 }
274 
275 EXTERN_C_BEGIN
276 #undef __FUNCT__
277 #define __FUNCT__ "DMCreate_Sliced"
278 PetscErrorCode  DMCreate_Sliced(DM p)
279 {
280   PetscErrorCode ierr;
281   DM_Sliced      *slice;
282 
283   PetscFunctionBegin;
284   ierr = PetscNewLog(p,DM_Sliced,&slice);CHKERRQ(ierr);
285   p->data = slice;
286 
287   ierr = PetscObjectChangeTypeName((PetscObject)p,"Sliced");CHKERRQ(ierr);
288   p->ops->createglobalvector = DMCreateGlobalVector_Sliced;
289   p->ops->getmatrix          = DMGetMatrix_Sliced;
290   p->ops->destroy            = DMDestroy_Sliced;
291   PetscFunctionReturn(0);
292 }
293 EXTERN_C_END
294 
295 #undef __FUNCT__
296 #define __FUNCT__ "DMSlicedCreate"
297 /*@C
298     DMSlicedCreate - Creates a DM object, used to manage data for a unstructured problem
299 
300     Collective on MPI_Comm
301 
302     Input Parameter:
303 .   comm - the processors that will share the global vector
304 
305     Output Parameters:
306 .   slice - the slice object
307 
308     Level: advanced
309 
310 .seealso DMDestroy(), DMCreateGlobalVector(), DMSlicedGetGlobalIndices()
311 
312 @*/
313 PetscErrorCode  DMSlicedCreate(MPI_Comm comm,DM *dm)
314 {
315   PetscErrorCode ierr;
316 
317   PetscFunctionBegin;
318   PetscValidPointer(dm,2);
319   ierr = DMCreate(comm,dm);CHKERRQ(ierr);
320   ierr = DMSetType(*dm,DMSLICED);CHKERRQ(ierr);
321   PetscFunctionReturn(0);
322 }
323 
324 
325 #undef __FUNCT__
326 #define __FUNCT__ "DMSlicedGetGlobalIndices"
327 /*@C
328     DMSlicedGetGlobalIndices - Gets the global indices for all the local entries
329 
330     Collective on DM
331 
332     Input Parameter:
333 .    slice - the slice object
334 
335     Output Parameters:
336 .    idx - the individual indices for each packed vector/array
337 
338     Level: advanced
339 
340     Notes:
341        The idx parameters should be freed by the calling routine with PetscFree()
342 
343 .seealso DMSlicedDestroy(), DMSlicedCreateGlobalVector(), DMSlicedCreate()
344 
345 @*/
346 PetscErrorCode  DMSlicedGetGlobalIndices(DM dm,PetscInt *idx[])
347 {
348   PetscFunctionReturn(0);
349 }
350 
351 
352 /* Explanation of the missing functions for DMDA-style handling of the local vector:
353 
354    DMSlicedCreateLocalVector()
355    DMSlicedGlobalToLocalBegin()
356    DMSlicedGlobalToLocalEnd()
357 
358  There is no way to get the global form from a local form, so DMSlicedCreateLocalVector() is a memory leak without
359  external accounting for the global vector.  Also, DMSliced intends the user to work with the VecGhost interface since the
360  ghosts are already ordered after the owned entries.  Contrast this to a DMDA where the local vector has a special
361  ordering described by the structured grid, hence it cannot share memory with the global form.  For this reason, users
362  of DMSliced should work with the global vector and use
363 
364    VecGhostGetLocalForm(), VecGhostRestoreLocalForm()
365    VecGhostUpdateBegin(), VecGhostUpdateEnd()
366 
367  rather than the missing DMDA-style functions.  This is conceptually simpler and offers better performance than is
368  possible with the DMDA-style interface.
369 */
370