xref: /petsc/src/mat/impls/is/matis.c (revision c77832ed7683be3617520af8bc25e940b04481a1)
1 
2 /*
3     Creates a matrix class for using the Neumann-Neumann type preconditioners.
4    This stores the matrices in globally unassembled form. Each processor
5    assembles only its local Neumann problem and the parallel matrix vector
6    product is handled "implicitly".
7 
8      We provide:
9          MatMult()
10 
11     Currently this allows for only one subdomain per processor.
12 
13 */
14 
15 #include <../src/mat/impls/is/matis.h>      /*I "petscmat.h" I*/
16 
17 static PetscErrorCode MatISComputeSF_Private(Mat);
18 
19 #undef __FUNCT__
20 #define __FUNCT__ "MatISComputeSF_Private"
21 static PetscErrorCode MatISComputeSF_Private(Mat B)
22 {
23   Mat_IS         *matis = (Mat_IS*)(B->data);
24   const PetscInt *gidxs;
25   PetscErrorCode ierr;
26 
27   PetscFunctionBegin;
28   ierr = MatGetSize(matis->A,&matis->sf_nleaves,NULL);CHKERRQ(ierr);
29   ierr = MatGetLocalSize(B,&matis->sf_nroots,NULL);CHKERRQ(ierr);
30   ierr = PetscSFCreate(PetscObjectComm((PetscObject)B),&matis->sf);CHKERRQ(ierr);
31   ierr = ISLocalToGlobalMappingGetIndices(B->rmap->mapping,&gidxs);CHKERRQ(ierr);
32   /* PETSC_OWN_POINTER refers to ilocal which is NULL */
33   ierr = PetscSFSetGraphLayout(matis->sf,B->rmap,matis->sf_nleaves,NULL,PETSC_OWN_POINTER,gidxs);CHKERRQ(ierr);
34   ierr = ISLocalToGlobalMappingRestoreIndices(B->rmap->mapping,&gidxs);CHKERRQ(ierr);
35   ierr = PetscMalloc2(matis->sf_nroots,&matis->sf_rootdata,matis->sf_nleaves,&matis->sf_leafdata);CHKERRQ(ierr);
36   PetscFunctionReturn(0);
37 }
38 
39 #undef __FUNCT__
40 #define __FUNCT__ "MatISSetPreallocation"
41 /*@
42    MatISSetPreallocation - Preallocates memory for a MATIS parallel matrix.
43 
44    Collective on MPI_Comm
45 
46    Input Parameters:
47 +  B - the matrix
48 .  d_nz  - number of nonzeros per row in DIAGONAL portion of local submatrix
49            (same value is used for all local rows)
50 .  d_nnz - array containing the number of nonzeros in the various rows of the
51            DIAGONAL portion of the local submatrix (possibly different for each row)
52            or NULL, if d_nz is used to specify the nonzero structure.
53            The size of this array is equal to the number of local rows, i.e 'm'.
54            For matrices that will be factored, you must leave room for (and set)
55            the diagonal entry even if it is zero.
56 .  o_nz  - number of nonzeros per row in the OFF-DIAGONAL portion of local
57            submatrix (same value is used for all local rows).
58 -  o_nnz - array containing the number of nonzeros in the various rows of the
59            OFF-DIAGONAL portion of the local submatrix (possibly different for
60            each row) or NULL, if o_nz is used to specify the nonzero
61            structure. The size of this array is equal to the number
62            of local rows, i.e 'm'.
63 
64    If the *_nnz parameter is given then the *_nz parameter is ignored
65 
66    Level: intermediate
67 
68    Notes: This function has the same interface as the MPIAIJ preallocation routine in order to simplify the transition
69           from the asssembled format to the unassembled one. It overestimates the preallocation of MATIS local
70           matrices; for exact preallocation, the user should set the preallocation directly on local matrix objects.
71 
72 .keywords: matrix
73 
74 .seealso: MatCreate(), MatCreateIS(), MatMPIAIJSetPreallocation(), MatISGetLocalMat()
75 @*/
76 PetscErrorCode  MatISSetPreallocation(Mat B,PetscInt d_nz,const PetscInt d_nnz[],PetscInt o_nz,const PetscInt o_nnz[])
77 {
78   PetscErrorCode ierr;
79 
80   PetscFunctionBegin;
81   PetscValidHeaderSpecific(B,MAT_CLASSID,1);
82   PetscValidType(B,1);
83   ierr = PetscTryMethod(B,"MatISSetPreallocation_C",(Mat,PetscInt,const PetscInt[],PetscInt,const PetscInt[]),(B,d_nz,d_nnz,o_nz,o_nnz));CHKERRQ(ierr);
84   PetscFunctionReturn(0);
85 }
86 
87 #undef __FUNCT__
88 #define __FUNCT__ "MatISSetPreallocation_IS"
89 PetscErrorCode  MatISSetPreallocation_IS(Mat B,PetscInt d_nz,const PetscInt d_nnz[],PetscInt o_nz,const PetscInt o_nnz[])
90 {
91   Mat_IS         *matis = (Mat_IS*)(B->data);
92   PetscInt       bs,i,nlocalcols;
93   PetscErrorCode ierr;
94 
95   PetscFunctionBegin;
96   if (!matis->A) SETERRQ(PetscObjectComm((PetscObject)B),PETSC_ERR_SUP,"You should first call MatSetLocalToGlobalMapping");
97   if (!matis->sf) { /* setup SF if not yet created and allocate rootdata and leafdata */
98     ierr = MatISComputeSF_Private(B);CHKERRQ(ierr);
99   }
100   if (!d_nnz) {
101     for (i=0;i<matis->sf_nroots;i++) matis->sf_rootdata[i] = d_nz;
102   } else {
103     for (i=0;i<matis->sf_nroots;i++) matis->sf_rootdata[i] = d_nnz[i];
104   }
105   if (!o_nnz) {
106     for (i=0;i<matis->sf_nroots;i++) matis->sf_rootdata[i] += o_nz;
107   } else {
108     for (i=0;i<matis->sf_nroots;i++) matis->sf_rootdata[i] += o_nnz[i];
109   }
110   ierr = PetscSFBcastBegin(matis->sf,MPIU_INT,matis->sf_rootdata,matis->sf_leafdata);CHKERRQ(ierr);
111   ierr = MatGetSize(matis->A,NULL,&nlocalcols);CHKERRQ(ierr);
112   ierr = MatGetBlockSize(matis->A,&bs);CHKERRQ(ierr);
113   ierr = PetscSFBcastEnd(matis->sf,MPIU_INT,matis->sf_rootdata,matis->sf_leafdata);CHKERRQ(ierr);
114   for (i=0;i<matis->sf_nleaves;i++) {
115     matis->sf_leafdata[i] = PetscMin(matis->sf_leafdata[i],nlocalcols);
116   }
117   ierr = MatSeqAIJSetPreallocation(matis->A,0,matis->sf_leafdata);CHKERRQ(ierr);
118   for (i=0;i<matis->sf_nleaves/bs;i++) {
119     matis->sf_leafdata[i] = matis->sf_leafdata[i*bs]/bs;
120   }
121   ierr = MatSeqBAIJSetPreallocation(matis->A,bs,0,matis->sf_leafdata);CHKERRQ(ierr);
122   for (i=0;i<matis->sf_nleaves/bs;i++) {
123     matis->sf_leafdata[i] = matis->sf_leafdata[i]-i;
124   }
125   ierr = MatSeqSBAIJSetPreallocation(matis->A,bs,0,matis->sf_leafdata);CHKERRQ(ierr);
126   PetscFunctionReturn(0);
127 }
128 
129 #undef __FUNCT__
130 #define __FUNCT__ "MatISSetMPIXAIJPreallocation_Private"
131 PETSC_EXTERN PetscErrorCode MatISSetMPIXAIJPreallocation_Private(Mat A, Mat B, PetscBool maxreduce)
132 {
133   Mat_IS          *matis = (Mat_IS*)(A->data);
134   PetscInt        *my_dnz,*my_onz,*dnz,*onz,*mat_ranges,*row_ownership;
135   const PetscInt  *global_indices_r,*global_indices_c;
136   PetscInt        i,j,bs,rows,cols;
137   PetscInt        lrows,lcols;
138   PetscInt        local_rows,local_cols;
139   PetscMPIInt     nsubdomains;
140   PetscBool       isdense,issbaij;
141   PetscErrorCode  ierr;
142 
143   PetscFunctionBegin;
144   ierr = MPI_Comm_size(PetscObjectComm((PetscObject)A),&nsubdomains);CHKERRQ(ierr);
145   ierr = MatGetSize(A,&rows,&cols);CHKERRQ(ierr);
146   ierr = MatGetBlockSize(A,&bs);CHKERRQ(ierr);
147   ierr = MatGetSize(matis->A,&local_rows,&local_cols);CHKERRQ(ierr);
148   ierr = PetscObjectTypeCompare((PetscObject)matis->A,MATSEQDENSE,&isdense);CHKERRQ(ierr);
149   ierr = PetscObjectTypeCompare((PetscObject)matis->A,MATSEQSBAIJ,&issbaij);CHKERRQ(ierr);
150   ierr = ISLocalToGlobalMappingGetIndices(A->rmap->mapping,&global_indices_r);CHKERRQ(ierr);
151   if (A->rmap->mapping != A->cmap->mapping) {
152     ierr = ISLocalToGlobalMappingGetIndices(A->cmap->mapping,&global_indices_c);CHKERRQ(ierr);
153   } else {
154     global_indices_c = global_indices_r;
155   }
156 
157   if (issbaij) {
158     ierr = MatGetRowUpperTriangular(matis->A);CHKERRQ(ierr);
159   }
160   /*
161      An SF reduce is needed to sum up properly on shared rows.
162      Note that generally preallocation is not exact, since it overestimates nonzeros
163   */
164   if (!matis->sf) { /* setup SF if not yet created and allocate rootdata and leafdata */
165     ierr = MatISComputeSF_Private(A);CHKERRQ(ierr);
166   }
167   ierr = MatGetLocalSize(A,&lrows,&lcols);CHKERRQ(ierr);
168   ierr = MatPreallocateInitialize(PetscObjectComm((PetscObject)A),lrows,lcols,dnz,onz);CHKERRQ(ierr);
169   /* All processes need to compute entire row ownership */
170   ierr = PetscMalloc1(rows,&row_ownership);CHKERRQ(ierr);
171   ierr = MatGetOwnershipRanges(A,(const PetscInt**)&mat_ranges);CHKERRQ(ierr);
172   for (i=0;i<nsubdomains;i++) {
173     for (j=mat_ranges[i];j<mat_ranges[i+1];j++) {
174       row_ownership[j] = i;
175     }
176   }
177   ierr = MatGetOwnershipRangesColumn(A,(const PetscInt**)&mat_ranges);CHKERRQ(ierr);
178 
179   /*
180      my_dnz and my_onz contains exact contribution to preallocation from each local mat
181      then, they will be summed up properly. This way, preallocation is always sufficient
182   */
183   ierr = PetscCalloc2(local_rows,&my_dnz,local_rows,&my_onz);CHKERRQ(ierr);
184   /* preallocation as a MATAIJ */
185   if (isdense) { /* special case for dense local matrices */
186     for (i=0;i<local_rows;i++) {
187       PetscInt owner = row_ownership[global_indices_r[i]];
188       for (j=0;j<local_cols;j++) {
189         PetscInt index_col = global_indices_c[j];
190         if (index_col > mat_ranges[owner]-1 && index_col < mat_ranges[owner+1] ) { /* diag block */
191           my_dnz[i] += 1;
192         } else { /* offdiag block */
193           my_onz[i] += 1;
194         }
195       }
196     }
197   } else { /* TODO: this could be optimized using MatGetRowIJ */
198     for (i=0;i<local_rows;i++) {
199       const PetscInt *cols;
200       PetscInt       ncols,index_row = global_indices_r[i];
201       ierr = MatGetRow(matis->A,i,&ncols,&cols,NULL);CHKERRQ(ierr);
202       for (j=0;j<ncols;j++) {
203         PetscInt owner = row_ownership[index_row];
204         PetscInt index_col = global_indices_c[cols[j]];
205         if (index_col > mat_ranges[owner]-1 && index_col < mat_ranges[owner+1] ) { /* diag block */
206           my_dnz[i] += 1;
207         } else { /* offdiag block */
208           my_onz[i] += 1;
209         }
210         /* same as before, interchanging rows and cols */
211         if (issbaij && index_col != index_row) {
212           owner = row_ownership[index_col];
213           if (index_row > mat_ranges[owner]-1 && index_row < mat_ranges[owner+1] ) {
214             my_dnz[cols[j]] += 1;
215           } else {
216             my_onz[cols[j]] += 1;
217           }
218         }
219       }
220       ierr = MatRestoreRow(matis->A,i,&ncols,&cols,NULL);CHKERRQ(ierr);
221     }
222   }
223   if (global_indices_c != global_indices_r) {
224     ierr = ISLocalToGlobalMappingRestoreIndices(A->cmap->mapping,&global_indices_c);CHKERRQ(ierr);
225   }
226   ierr = ISLocalToGlobalMappingRestoreIndices(A->rmap->mapping,&global_indices_r);CHKERRQ(ierr);
227   ierr = PetscFree(row_ownership);CHKERRQ(ierr);
228 
229   /* Reduce my_dnz and my_onz */
230   if (maxreduce) {
231     ierr = PetscSFReduceBegin(matis->sf,MPIU_INT,my_dnz,dnz,MPI_MAX);CHKERRQ(ierr);
232     ierr = PetscSFReduceEnd(matis->sf,MPIU_INT,my_dnz,dnz,MPI_MAX);CHKERRQ(ierr);
233     ierr = PetscSFReduceBegin(matis->sf,MPIU_INT,my_onz,onz,MPI_MAX);CHKERRQ(ierr);
234     ierr = PetscSFReduceEnd(matis->sf,MPIU_INT,my_onz,onz,MPI_MAX);CHKERRQ(ierr);
235   } else {
236     ierr = PetscSFReduceBegin(matis->sf,MPIU_INT,my_dnz,dnz,MPI_SUM);CHKERRQ(ierr);
237     ierr = PetscSFReduceEnd(matis->sf,MPIU_INT,my_dnz,dnz,MPI_SUM);CHKERRQ(ierr);
238     ierr = PetscSFReduceBegin(matis->sf,MPIU_INT,my_onz,onz,MPI_SUM);CHKERRQ(ierr);
239     ierr = PetscSFReduceEnd(matis->sf,MPIU_INT,my_onz,onz,MPI_SUM);CHKERRQ(ierr);
240   }
241   ierr = PetscFree2(my_dnz,my_onz);CHKERRQ(ierr);
242 
243   /* Resize preallocation if overestimated */
244   for (i=0;i<lrows;i++) {
245     dnz[i] = PetscMin(dnz[i],lcols);
246     onz[i] = PetscMin(onz[i],cols-lcols);
247   }
248   /* set preallocation */
249   ierr = MatMPIAIJSetPreallocation(B,0,dnz,0,onz);CHKERRQ(ierr);
250   for (i=0;i<lrows/bs;i++) {
251     dnz[i] = dnz[i*bs]/bs;
252     onz[i] = onz[i*bs]/bs;
253   }
254   ierr = MatMPIBAIJSetPreallocation(B,bs,0,dnz,0,onz);CHKERRQ(ierr);
255   ierr = MatMPISBAIJSetPreallocation(B,bs,0,dnz,0,onz);CHKERRQ(ierr);
256   ierr = MatPreallocateFinalize(dnz,onz);CHKERRQ(ierr);
257   if (issbaij) {
258     ierr = MatRestoreRowUpperTriangular(matis->A);CHKERRQ(ierr);
259   }
260   PetscFunctionReturn(0);
261 }
262 
263 #undef __FUNCT__
264 #define __FUNCT__ "MatISGetMPIXAIJ_IS"
265 PetscErrorCode MatISGetMPIXAIJ_IS(Mat mat, MatReuse reuse, Mat *M)
266 {
267   Mat_IS         *matis = (Mat_IS*)(mat->data);
268   Mat            local_mat;
269   /* info on mat */
270   PetscInt       bs,rows,cols,lrows,lcols;
271   PetscInt       local_rows,local_cols;
272   PetscBool      isdense,issbaij,isseqaij;
273   PetscMPIInt    nsubdomains;
274   /* values insertion */
275   PetscScalar    *array;
276   /* work */
277   PetscErrorCode ierr;
278 
279   PetscFunctionBegin;
280   /* get info from mat */
281   ierr = MPI_Comm_size(PetscObjectComm((PetscObject)mat),&nsubdomains);CHKERRQ(ierr);
282   if (nsubdomains == 1) {
283     if (reuse == MAT_INITIAL_MATRIX) {
284       ierr = MatDuplicate(matis->A,MAT_COPY_VALUES,&(*M));CHKERRQ(ierr);
285     } else {
286       ierr = MatCopy(matis->A,*M,SAME_NONZERO_PATTERN);CHKERRQ(ierr);
287     }
288     PetscFunctionReturn(0);
289   }
290   ierr = MatGetSize(mat,&rows,&cols);CHKERRQ(ierr);
291   ierr = MatGetBlockSize(mat,&bs);CHKERRQ(ierr);
292   ierr = MatGetLocalSize(mat,&lrows,&lcols);CHKERRQ(ierr);
293   ierr = MatGetSize(matis->A,&local_rows,&local_cols);CHKERRQ(ierr);
294   ierr = PetscObjectTypeCompare((PetscObject)matis->A,MATSEQDENSE,&isdense);CHKERRQ(ierr);
295   ierr = PetscObjectTypeCompare((PetscObject)matis->A,MATSEQAIJ,&isseqaij);CHKERRQ(ierr);
296   ierr = PetscObjectTypeCompare((PetscObject)matis->A,MATSEQSBAIJ,&issbaij);CHKERRQ(ierr);
297 
298   if (reuse == MAT_INITIAL_MATRIX) {
299     MatType     new_mat_type;
300     PetscBool   issbaij_red;
301 
302     /* determining new matrix type */
303     ierr = MPIU_Allreduce(&issbaij,&issbaij_red,1,MPIU_BOOL,MPI_LAND,PetscObjectComm((PetscObject)mat));CHKERRQ(ierr);
304     if (issbaij_red) {
305       new_mat_type = MATSBAIJ;
306     } else {
307       if (bs>1) {
308         new_mat_type = MATBAIJ;
309       } else {
310         new_mat_type = MATAIJ;
311       }
312     }
313 
314     ierr = MatCreate(PetscObjectComm((PetscObject)mat),M);CHKERRQ(ierr);
315     ierr = MatSetSizes(*M,lrows,lcols,rows,cols);CHKERRQ(ierr);
316     ierr = MatSetBlockSize(*M,bs);CHKERRQ(ierr);
317     ierr = MatSetType(*M,new_mat_type);CHKERRQ(ierr);
318     ierr = MatISSetMPIXAIJPreallocation_Private(mat,*M,PETSC_FALSE);CHKERRQ(ierr);
319   } else {
320     PetscInt mbs,mrows,mcols,mlrows,mlcols;
321     /* some checks */
322     ierr = MatGetBlockSize(*M,&mbs);CHKERRQ(ierr);
323     ierr = MatGetSize(*M,&mrows,&mcols);CHKERRQ(ierr);
324     ierr = MatGetLocalSize(*M,&mlrows,&mlcols);CHKERRQ(ierr);
325     if (mrows != rows) SETERRQ2(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Cannot reuse matrix. Wrong number of rows (%d != %d)",rows,mrows);
326     if (mcols != cols) SETERRQ2(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Cannot reuse matrix. Wrong number of cols (%d != %d)",cols,mcols);
327     if (mlrows != lrows) SETERRQ2(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Cannot reuse matrix. Wrong number of local rows (%d != %d)",lrows,mlrows);
328     if (mlcols != lcols) SETERRQ2(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Cannot reuse matrix. Wrong number of local cols (%d != %d)",lcols,mlcols);
329     if (mbs != bs) SETERRQ2(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Cannot reuse matrix. Wrong block size (%d != %d)",bs,mbs);
330     ierr = MatZeroEntries(*M);CHKERRQ(ierr);
331   }
332 
333   if (issbaij) {
334     ierr = MatConvert(matis->A,MATSEQBAIJ,MAT_INITIAL_MATRIX,&local_mat);CHKERRQ(ierr);
335   } else {
336     ierr = PetscObjectReference((PetscObject)matis->A);CHKERRQ(ierr);
337     local_mat = matis->A;
338   }
339 
340   /* Set values */
341   ierr = MatSetLocalToGlobalMapping(*M,mat->rmap->mapping,mat->cmap->mapping);CHKERRQ(ierr);
342   if (isdense) { /* special case for dense local matrices */
343     PetscInt i,*dummy;
344 
345     ierr = PetscMalloc1(PetscMax(local_rows,local_cols),&dummy);CHKERRQ(ierr);
346     for (i=0;i<PetscMax(local_rows,local_cols);i++) dummy[i] = i;
347     ierr = MatSetOption(*M,MAT_ROW_ORIENTED,PETSC_FALSE);CHKERRQ(ierr);
348     ierr = MatDenseGetArray(local_mat,&array);CHKERRQ(ierr);
349     ierr = MatSetValuesLocal(*M,local_rows,dummy,local_cols,dummy,array,ADD_VALUES);CHKERRQ(ierr);
350     ierr = MatDenseRestoreArray(local_mat,&array);CHKERRQ(ierr);
351     ierr = PetscFree(dummy);CHKERRQ(ierr);
352   } else if (isseqaij) {
353     PetscInt  i,nvtxs,*xadj,*adjncy;
354     PetscBool done;
355 
356     ierr = MatGetRowIJ(local_mat,0,PETSC_FALSE,PETSC_FALSE,&nvtxs,(const PetscInt**)&xadj,(const PetscInt**)&adjncy,&done);CHKERRQ(ierr);
357     if (!done) SETERRQ1(PetscObjectComm((PetscObject)local_mat),PETSC_ERR_PLIB,"Error in MatRestoreRowIJ called in %s\n",__FUNCT__);
358     ierr = MatSeqAIJGetArray(local_mat,&array);CHKERRQ(ierr);
359     for (i=0;i<nvtxs;i++) {
360       ierr = MatSetValuesLocal(*M,1,&i,xadj[i+1]-xadj[i],adjncy+xadj[i],array+xadj[i],ADD_VALUES);CHKERRQ(ierr);
361     }
362     ierr = MatRestoreRowIJ(local_mat,0,PETSC_FALSE,PETSC_FALSE,&nvtxs,(const PetscInt**)&xadj,(const PetscInt**)&adjncy,&done);CHKERRQ(ierr);
363     if (!done) SETERRQ1(PetscObjectComm((PetscObject)local_mat),PETSC_ERR_PLIB,"Error in MatRestoreRowIJ called in %s\n",__FUNCT__);
364     ierr = MatSeqAIJRestoreArray(local_mat,&array);CHKERRQ(ierr);
365   } else { /* very basic values insertion for all other matrix types */
366     PetscInt i;
367 
368     for (i=0;i<local_rows;i++) {
369       PetscInt       j;
370       const PetscInt *local_indices_cols;
371 
372       ierr = MatGetRow(local_mat,i,&j,&local_indices_cols,(const PetscScalar**)&array);CHKERRQ(ierr);
373       ierr = MatSetValuesLocal(*M,1,&i,j,local_indices_cols,array,ADD_VALUES);CHKERRQ(ierr);
374       ierr = MatRestoreRow(local_mat,i,&j,&local_indices_cols,(const PetscScalar**)&array);CHKERRQ(ierr);
375     }
376   }
377   ierr = MatAssemblyBegin(*M,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
378   ierr = MatDestroy(&local_mat);CHKERRQ(ierr);
379   ierr = MatAssemblyEnd(*M,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
380   if (isdense) {
381     ierr = MatSetOption(*M,MAT_ROW_ORIENTED,PETSC_TRUE);CHKERRQ(ierr);
382   }
383   PetscFunctionReturn(0);
384 }
385 
386 #undef __FUNCT__
387 #define __FUNCT__ "MatISGetMPIXAIJ"
388 /*@
389     MatISGetMPIXAIJ - Converts MATIS matrix into a parallel AIJ format
390 
391   Input Parameter:
392 .  mat - the matrix (should be of type MATIS)
393 .  reuse - either MAT_INITIAL_MATRIX or MAT_REUSE_MATRIX
394 
395   Output Parameter:
396 .  newmat - the matrix in AIJ format
397 
398   Level: developer
399 
400   Notes: mat and *newmat cannot be the same object when MAT_REUSE_MATRIX is requested.
401 
402 .seealso: MATIS
403 @*/
404 PetscErrorCode MatISGetMPIXAIJ(Mat mat, MatReuse reuse, Mat *newmat)
405 {
406   PetscErrorCode ierr;
407 
408   PetscFunctionBegin;
409   PetscValidHeaderSpecific(mat,MAT_CLASSID,1);
410   PetscValidLogicalCollectiveEnum(mat,reuse,2);
411   PetscValidPointer(newmat,3);
412   if (reuse != MAT_INITIAL_MATRIX) {
413     PetscValidHeaderSpecific(*newmat,MAT_CLASSID,3);
414     PetscCheckSameComm(mat,1,*newmat,3);
415     if (mat == *newmat) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Cannot reuse the same matrix");
416   }
417   ierr = PetscUseMethod(mat,"MatISGetMPIXAIJ_C",(Mat,MatReuse,Mat*),(mat,reuse,newmat));CHKERRQ(ierr);
418   PetscFunctionReturn(0);
419 }
420 
421 #undef __FUNCT__
422 #define __FUNCT__ "MatDuplicate_IS"
423 PetscErrorCode MatDuplicate_IS(Mat mat,MatDuplicateOption op,Mat *newmat)
424 {
425   PetscErrorCode ierr;
426   Mat_IS         *matis = (Mat_IS*)(mat->data);
427   PetscInt       bs,m,n,M,N;
428   Mat            B,localmat;
429 
430   PetscFunctionBegin;
431   ierr = MatGetBlockSize(mat,&bs);CHKERRQ(ierr);
432   ierr = MatGetSize(mat,&M,&N);CHKERRQ(ierr);
433   ierr = MatGetLocalSize(mat,&m,&n);CHKERRQ(ierr);
434   ierr = MatCreateIS(PetscObjectComm((PetscObject)mat),bs,m,n,M,N,mat->rmap->mapping,mat->cmap->mapping,&B);CHKERRQ(ierr);
435   ierr = MatDuplicate(matis->A,op,&localmat);CHKERRQ(ierr);
436   ierr = MatISSetLocalMat(B,localmat);CHKERRQ(ierr);
437   ierr = MatDestroy(&localmat);CHKERRQ(ierr);
438   ierr = MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
439   ierr = MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
440   *newmat = B;
441   PetscFunctionReturn(0);
442 }
443 
444 #undef __FUNCT__
445 #define __FUNCT__ "MatIsHermitian_IS"
446 PetscErrorCode MatIsHermitian_IS(Mat A,PetscReal tol,PetscBool  *flg)
447 {
448   PetscErrorCode ierr;
449   Mat_IS         *matis = (Mat_IS*)A->data;
450   PetscBool      local_sym;
451 
452   PetscFunctionBegin;
453   ierr = MatIsHermitian(matis->A,tol,&local_sym);CHKERRQ(ierr);
454   ierr = MPIU_Allreduce(&local_sym,flg,1,MPIU_BOOL,MPI_LAND,PetscObjectComm((PetscObject)A));CHKERRQ(ierr);
455   PetscFunctionReturn(0);
456 }
457 
458 #undef __FUNCT__
459 #define __FUNCT__ "MatIsSymmetric_IS"
460 PetscErrorCode MatIsSymmetric_IS(Mat A,PetscReal tol,PetscBool  *flg)
461 {
462   PetscErrorCode ierr;
463   Mat_IS         *matis = (Mat_IS*)A->data;
464   PetscBool      local_sym;
465 
466   PetscFunctionBegin;
467   ierr = MatIsSymmetric(matis->A,tol,&local_sym);CHKERRQ(ierr);
468   ierr = MPIU_Allreduce(&local_sym,flg,1,MPIU_BOOL,MPI_LAND,PetscObjectComm((PetscObject)A));CHKERRQ(ierr);
469   PetscFunctionReturn(0);
470 }
471 
472 #undef __FUNCT__
473 #define __FUNCT__ "MatDestroy_IS"
474 PetscErrorCode MatDestroy_IS(Mat A)
475 {
476   PetscErrorCode ierr;
477   Mat_IS         *b = (Mat_IS*)A->data;
478 
479   PetscFunctionBegin;
480   ierr = MatDestroy(&b->A);CHKERRQ(ierr);
481   ierr = VecScatterDestroy(&b->cctx);CHKERRQ(ierr);
482   ierr = VecScatterDestroy(&b->rctx);CHKERRQ(ierr);
483   ierr = VecDestroy(&b->x);CHKERRQ(ierr);
484   ierr = VecDestroy(&b->y);CHKERRQ(ierr);
485   ierr = PetscSFDestroy(&b->sf);CHKERRQ(ierr);
486   ierr = PetscFree2(b->sf_rootdata,b->sf_leafdata);CHKERRQ(ierr);
487   ierr = PetscFree(A->data);CHKERRQ(ierr);
488   ierr = PetscObjectChangeTypeName((PetscObject)A,0);CHKERRQ(ierr);
489   ierr = PetscObjectComposeFunction((PetscObject)A,"MatISGetLocalMat_C",NULL);CHKERRQ(ierr);
490   ierr = PetscObjectComposeFunction((PetscObject)A,"MatISSetLocalMat_C",NULL);CHKERRQ(ierr);
491   ierr = PetscObjectComposeFunction((PetscObject)A,"MatISGetMPIXAIJ_C",NULL);CHKERRQ(ierr);
492   ierr = PetscObjectComposeFunction((PetscObject)A,"MatISSetPreallocation_C",NULL);CHKERRQ(ierr);
493   PetscFunctionReturn(0);
494 }
495 
496 #undef __FUNCT__
497 #define __FUNCT__ "MatMult_IS"
498 PetscErrorCode MatMult_IS(Mat A,Vec x,Vec y)
499 {
500   PetscErrorCode ierr;
501   Mat_IS         *is  = (Mat_IS*)A->data;
502   PetscScalar    zero = 0.0;
503 
504   PetscFunctionBegin;
505   /*  scatter the global vector x into the local work vector */
506   ierr = VecScatterBegin(is->cctx,x,is->x,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
507   ierr = VecScatterEnd(is->cctx,x,is->x,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
508 
509   /* multiply the local matrix */
510   ierr = MatMult(is->A,is->x,is->y);CHKERRQ(ierr);
511 
512   /* scatter product back into global memory */
513   ierr = VecSet(y,zero);CHKERRQ(ierr);
514   ierr = VecScatterBegin(is->rctx,is->y,y,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
515   ierr = VecScatterEnd(is->rctx,is->y,y,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
516   PetscFunctionReturn(0);
517 }
518 
519 #undef __FUNCT__
520 #define __FUNCT__ "MatMultAdd_IS"
521 PetscErrorCode MatMultAdd_IS(Mat A,Vec v1,Vec v2,Vec v3)
522 {
523   Vec            temp_vec;
524   PetscErrorCode ierr;
525 
526   PetscFunctionBegin; /*  v3 = v2 + A * v1.*/
527   if (v3 != v2) {
528     ierr = MatMult(A,v1,v3);CHKERRQ(ierr);
529     ierr = VecAXPY(v3,1.0,v2);CHKERRQ(ierr);
530   } else {
531     ierr = VecDuplicate(v2,&temp_vec);CHKERRQ(ierr);
532     ierr = MatMult(A,v1,temp_vec);CHKERRQ(ierr);
533     ierr = VecAXPY(temp_vec,1.0,v2);CHKERRQ(ierr);
534     ierr = VecCopy(temp_vec,v3);CHKERRQ(ierr);
535     ierr = VecDestroy(&temp_vec);CHKERRQ(ierr);
536   }
537   PetscFunctionReturn(0);
538 }
539 
540 #undef __FUNCT__
541 #define __FUNCT__ "MatMultTranspose_IS"
542 PetscErrorCode MatMultTranspose_IS(Mat A,Vec y,Vec x)
543 {
544   Mat_IS         *is = (Mat_IS*)A->data;
545   PetscErrorCode ierr;
546 
547   PetscFunctionBegin;
548   /*  scatter the global vector x into the local work vector */
549   ierr = VecScatterBegin(is->rctx,y,is->y,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
550   ierr = VecScatterEnd(is->rctx,y,is->y,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
551 
552   /* multiply the local matrix */
553   ierr = MatMultTranspose(is->A,is->y,is->x);CHKERRQ(ierr);
554 
555   /* scatter product back into global vector */
556   ierr = VecSet(x,0);CHKERRQ(ierr);
557   ierr = VecScatterBegin(is->cctx,is->x,x,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
558   ierr = VecScatterEnd(is->cctx,is->x,x,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
559   PetscFunctionReturn(0);
560 }
561 
562 #undef __FUNCT__
563 #define __FUNCT__ "MatMultTransposeAdd_IS"
564 PetscErrorCode MatMultTransposeAdd_IS(Mat A,Vec v1,Vec v2,Vec v3)
565 {
566   Vec            temp_vec;
567   PetscErrorCode ierr;
568 
569   PetscFunctionBegin; /*  v3 = v2 + A' * v1.*/
570   if (v3 != v2) {
571     ierr = MatMultTranspose(A,v1,v3);CHKERRQ(ierr);
572     ierr = VecAXPY(v3,1.0,v2);CHKERRQ(ierr);
573   } else {
574     ierr = VecDuplicate(v2,&temp_vec);CHKERRQ(ierr);
575     ierr = MatMultTranspose(A,v1,temp_vec);CHKERRQ(ierr);
576     ierr = VecAXPY(temp_vec,1.0,v2);CHKERRQ(ierr);
577     ierr = VecCopy(temp_vec,v3);CHKERRQ(ierr);
578     ierr = VecDestroy(&temp_vec);CHKERRQ(ierr);
579   }
580   PetscFunctionReturn(0);
581 }
582 
583 #undef __FUNCT__
584 #define __FUNCT__ "MatView_IS"
585 PetscErrorCode MatView_IS(Mat A,PetscViewer viewer)
586 {
587   Mat_IS         *a = (Mat_IS*)A->data;
588   PetscErrorCode ierr;
589   PetscViewer    sviewer;
590 
591   PetscFunctionBegin;
592   ierr = PetscViewerGetSubViewer(viewer,PETSC_COMM_SELF,&sviewer);CHKERRQ(ierr);
593   ierr = MatView(a->A,sviewer);CHKERRQ(ierr);
594   ierr = PetscViewerRestoreSubViewer(viewer,PETSC_COMM_SELF,&sviewer);CHKERRQ(ierr);
595   PetscFunctionReturn(0);
596 }
597 
598 #undef __FUNCT__
599 #define __FUNCT__ "MatSetLocalToGlobalMapping_IS"
600 PetscErrorCode MatSetLocalToGlobalMapping_IS(Mat A,ISLocalToGlobalMapping rmapping,ISLocalToGlobalMapping cmapping)
601 {
602   PetscErrorCode ierr;
603   PetscInt       nr,rbs,nc,cbs;
604   Mat_IS         *is = (Mat_IS*)A->data;
605   IS             from,to;
606   Vec            cglobal,rglobal;
607 
608   PetscFunctionBegin;
609   PetscCheckSameComm(A,1,rmapping,2);
610   PetscCheckSameComm(A,1,cmapping,3);
611   /* Destroy any previous data */
612   ierr = VecDestroy(&is->x);CHKERRQ(ierr);
613   ierr = VecDestroy(&is->y);CHKERRQ(ierr);
614   ierr = VecScatterDestroy(&is->rctx);CHKERRQ(ierr);
615   ierr = VecScatterDestroy(&is->cctx);CHKERRQ(ierr);
616   ierr = MatDestroy(&is->A);CHKERRQ(ierr);
617   ierr = PetscSFDestroy(&is->sf);CHKERRQ(ierr);
618   ierr = PetscFree2(is->sf_rootdata,is->sf_leafdata);CHKERRQ(ierr);
619 
620   /* Setup Layout and set local to global maps */
621   ierr = PetscLayoutSetUp(A->rmap);CHKERRQ(ierr);
622   ierr = PetscLayoutSetUp(A->cmap);CHKERRQ(ierr);
623   ierr = PetscLayoutSetISLocalToGlobalMapping(A->rmap,rmapping);CHKERRQ(ierr);
624   ierr = PetscLayoutSetISLocalToGlobalMapping(A->cmap,cmapping);CHKERRQ(ierr);
625 
626   /* Create the local matrix A */
627   ierr = ISLocalToGlobalMappingGetSize(rmapping,&nr);CHKERRQ(ierr);
628   ierr = ISLocalToGlobalMappingGetBlockSize(rmapping,&rbs);CHKERRQ(ierr);
629   ierr = ISLocalToGlobalMappingGetSize(cmapping,&nc);CHKERRQ(ierr);
630   ierr = ISLocalToGlobalMappingGetBlockSize(cmapping,&cbs);CHKERRQ(ierr);
631   ierr = MatCreate(PETSC_COMM_SELF,&is->A);CHKERRQ(ierr);
632   ierr = MatSetType(is->A,MATAIJ);CHKERRQ(ierr);
633   ierr = MatSetSizes(is->A,nr,nc,nr,nc);CHKERRQ(ierr);
634   ierr = MatSetBlockSizes(is->A,rbs,cbs);CHKERRQ(ierr);
635   ierr = MatSetOptionsPrefix(is->A,((PetscObject)A)->prefix);CHKERRQ(ierr);
636   ierr = MatAppendOptionsPrefix(is->A,"is_");CHKERRQ(ierr);
637   ierr = MatSetFromOptions(is->A);CHKERRQ(ierr);
638   ierr = PetscLayoutSetUp(is->A->rmap);CHKERRQ(ierr);
639   ierr = PetscLayoutSetUp(is->A->cmap);CHKERRQ(ierr);
640 
641   /* Create the local work vectors */
642   ierr = MatCreateVecs(is->A,&is->x,&is->y);CHKERRQ(ierr);
643 
644   /* setup the global to local scatters */
645   ierr = MatCreateVecs(A,&cglobal,&rglobal);CHKERRQ(ierr);
646   ierr = ISCreateStride(PETSC_COMM_SELF,nr,0,1,&to);CHKERRQ(ierr);
647   ierr = ISLocalToGlobalMappingApplyIS(rmapping,to,&from);CHKERRQ(ierr);
648   ierr = VecScatterCreate(rglobal,from,is->y,to,&is->rctx);CHKERRQ(ierr);
649   if (rmapping != cmapping) {
650     ierr = ISDestroy(&to);CHKERRQ(ierr);
651     ierr = ISDestroy(&from);CHKERRQ(ierr);
652     ierr = ISCreateStride(PETSC_COMM_SELF,nc,0,1,&to);CHKERRQ(ierr);
653     ierr = ISLocalToGlobalMappingApplyIS(cmapping,to,&from);CHKERRQ(ierr);
654     ierr = VecScatterCreate(cglobal,from,is->x,to,&is->cctx);CHKERRQ(ierr);
655   } else {
656     ierr = PetscObjectReference((PetscObject)is->rctx);CHKERRQ(ierr);
657     is->cctx = is->rctx;
658   }
659   ierr = VecDestroy(&rglobal);CHKERRQ(ierr);
660   ierr = VecDestroy(&cglobal);CHKERRQ(ierr);
661   ierr = ISDestroy(&to);CHKERRQ(ierr);
662   ierr = ISDestroy(&from);CHKERRQ(ierr);
663   PetscFunctionReturn(0);
664 }
665 
666 #undef __FUNCT__
667 #define __FUNCT__ "MatSetValues_IS"
668 PetscErrorCode MatSetValues_IS(Mat mat, PetscInt m,const PetscInt *rows, PetscInt n,const PetscInt *cols, const PetscScalar *values, InsertMode addv)
669 {
670   Mat_IS         *is = (Mat_IS*)mat->data;
671   PetscInt       rows_l[2048],cols_l[2048];
672   PetscErrorCode ierr;
673 
674   PetscFunctionBegin;
675 #if defined(PETSC_USE_DEBUG)
676   if (m > 2048 || n > 2048) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_SUP,"Number of row/column indices must be <= 2048: they are %D %D",m,n);
677 #endif
678   ierr = ISG2LMapApply(mat->rmap->mapping,m,rows,rows_l);CHKERRQ(ierr);
679   ierr = ISG2LMapApply(mat->cmap->mapping,n,cols,cols_l);CHKERRQ(ierr);
680   ierr = MatSetValues(is->A,m,rows_l,n,cols_l,values,addv);CHKERRQ(ierr);
681   PetscFunctionReturn(0);
682 }
683 
684 #undef __FUNCT__
685 #define __FUNCT__ "MatSetValuesLocal_IS"
686 PetscErrorCode MatSetValuesLocal_IS(Mat A,PetscInt m,const PetscInt *rows, PetscInt n,const PetscInt *cols,const PetscScalar *values,InsertMode addv)
687 {
688   PetscErrorCode ierr;
689   Mat_IS         *is = (Mat_IS*)A->data;
690 
691   PetscFunctionBegin;
692   ierr = MatSetValues(is->A,m,rows,n,cols,values,addv);CHKERRQ(ierr);
693   PetscFunctionReturn(0);
694 }
695 
696 #undef __FUNCT__
697 #define __FUNCT__ "MatSetValuesBlockedLocal_IS"
698 PetscErrorCode MatSetValuesBlockedLocal_IS(Mat A,PetscInt m,const PetscInt *rows, PetscInt n,const PetscInt *cols,const PetscScalar *values,InsertMode addv)
699 {
700   PetscErrorCode ierr;
701   Mat_IS         *is = (Mat_IS*)A->data;
702 
703   PetscFunctionBegin;
704   ierr = MatSetValuesBlocked(is->A,m,rows,n,cols,values,addv);CHKERRQ(ierr);
705   PetscFunctionReturn(0);
706 }
707 
708 #undef __FUNCT__
709 #define __FUNCT__ "MatZeroRows_IS"
710 PetscErrorCode MatZeroRows_IS(Mat A,PetscInt n,const PetscInt rows[],PetscScalar diag,Vec x,Vec b)
711 {
712   PetscInt       n_l = 0, *rows_l = NULL;
713   PetscErrorCode ierr;
714 
715   PetscFunctionBegin;
716   if (x && b) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"No support");
717   if (n) {
718     ierr = PetscMalloc1(n,&rows_l);CHKERRQ(ierr);
719     ierr = ISGlobalToLocalMappingApply(A->rmap->mapping,IS_GTOLM_DROP,n,rows,&n_l,rows_l);CHKERRQ(ierr);
720   }
721   ierr = MatZeroRowsLocal(A,n_l,rows_l,diag,x,b);CHKERRQ(ierr);
722   ierr = PetscFree(rows_l);CHKERRQ(ierr);
723   PetscFunctionReturn(0);
724 }
725 
726 #undef __FUNCT__
727 #define __FUNCT__ "MatZeroRowsLocal_IS"
728 PetscErrorCode MatZeroRowsLocal_IS(Mat A,PetscInt n,const PetscInt rows[],PetscScalar diag,Vec x,Vec b)
729 {
730   Mat_IS         *is = (Mat_IS*)A->data;
731   PetscErrorCode ierr;
732   PetscInt       i;
733   PetscScalar    *array;
734 
735   PetscFunctionBegin;
736   if (x && b) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"No support");
737   {
738     /*
739        Set up is->x as a "counting vector". This is in order to MatMult_IS
740        work properly in the interface nodes.
741     */
742     Vec counter;
743     ierr = MatCreateVecs(A,NULL,&counter);CHKERRQ(ierr);
744     ierr = VecSet(counter,0.);CHKERRQ(ierr);
745     ierr = VecSet(is->y,1.);CHKERRQ(ierr);
746     ierr = VecScatterBegin(is->rctx,is->y,counter,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
747     ierr = VecScatterEnd(is->rctx,is->y,counter,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
748     ierr = VecScatterBegin(is->rctx,counter,is->y,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
749     ierr = VecScatterEnd(is->rctx,counter,is->y,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
750     ierr = VecDestroy(&counter);CHKERRQ(ierr);
751   }
752   if (!n) {
753     is->pure_neumann = PETSC_TRUE;
754   } else {
755     is->pure_neumann = PETSC_FALSE;
756 
757     ierr = VecGetArray(is->y,&array);CHKERRQ(ierr);
758     ierr = MatZeroRows(is->A,n,rows,diag,0,0);CHKERRQ(ierr);
759     for (i=0; i<n; i++) {
760       ierr = MatSetValue(is->A,rows[i],rows[i],diag/(array[rows[i]]),INSERT_VALUES);CHKERRQ(ierr);
761     }
762     ierr = MatAssemblyBegin(is->A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
763     ierr = MatAssemblyEnd(is->A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
764     ierr = VecRestoreArray(is->y,&array);CHKERRQ(ierr);
765   }
766   PetscFunctionReturn(0);
767 }
768 
769 #undef __FUNCT__
770 #define __FUNCT__ "MatAssemblyBegin_IS"
771 PetscErrorCode MatAssemblyBegin_IS(Mat A,MatAssemblyType type)
772 {
773   Mat_IS         *is = (Mat_IS*)A->data;
774   PetscErrorCode ierr;
775 
776   PetscFunctionBegin;
777   ierr = MatAssemblyBegin(is->A,type);CHKERRQ(ierr);
778   PetscFunctionReturn(0);
779 }
780 
781 #undef __FUNCT__
782 #define __FUNCT__ "MatAssemblyEnd_IS"
783 PetscErrorCode MatAssemblyEnd_IS(Mat A,MatAssemblyType type)
784 {
785   Mat_IS         *is = (Mat_IS*)A->data;
786   PetscErrorCode ierr;
787 
788   PetscFunctionBegin;
789   ierr = MatAssemblyEnd(is->A,type);CHKERRQ(ierr);
790   PetscFunctionReturn(0);
791 }
792 
793 #undef __FUNCT__
794 #define __FUNCT__ "MatISGetLocalMat_IS"
795 PetscErrorCode MatISGetLocalMat_IS(Mat mat,Mat *local)
796 {
797   Mat_IS *is = (Mat_IS*)mat->data;
798 
799   PetscFunctionBegin;
800   *local = is->A;
801   PetscFunctionReturn(0);
802 }
803 
804 #undef __FUNCT__
805 #define __FUNCT__ "MatISGetLocalMat"
806 /*@
807     MatISGetLocalMat - Gets the local matrix stored inside a MATIS matrix.
808 
809   Input Parameter:
810 .  mat - the matrix
811 
812   Output Parameter:
813 .  local - the local matrix
814 
815   Level: advanced
816 
817   Notes:
818     This can be called if you have precomputed the nonzero structure of the
819   matrix and want to provide it to the inner matrix object to improve the performance
820   of the MatSetValues() operation.
821 
822   This function does not increase the reference count for the local Mat.  Do not destroy it and do not attempt to use
823   your reference after destroying the parent mat.
824 
825 .seealso: MATIS
826 @*/
827 PetscErrorCode MatISGetLocalMat(Mat mat,Mat *local)
828 {
829   PetscErrorCode ierr;
830 
831   PetscFunctionBegin;
832   PetscValidHeaderSpecific(mat,MAT_CLASSID,1);
833   PetscValidPointer(local,2);
834   ierr = PetscUseMethod(mat,"MatISGetLocalMat_C",(Mat,Mat*),(mat,local));CHKERRQ(ierr);
835   PetscFunctionReturn(0);
836 }
837 
838 #undef __FUNCT__
839 #define __FUNCT__ "MatISSetLocalMat_IS"
840 PetscErrorCode MatISSetLocalMat_IS(Mat mat,Mat local)
841 {
842   Mat_IS         *is = (Mat_IS*)mat->data;
843   PetscInt       nrows,ncols,orows,ocols;
844   PetscErrorCode ierr;
845 
846   PetscFunctionBegin;
847   if (is->A) {
848     ierr = MatGetSize(is->A,&orows,&ocols);CHKERRQ(ierr);
849     ierr = MatGetSize(local,&nrows,&ncols);CHKERRQ(ierr);
850     if (orows != nrows || ocols != ncols) SETERRQ4(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Local MATIS matrix should be of size %dx%d (you passed a %dx%d matrix)\n",orows,ocols,nrows,ncols);
851   }
852   ierr  = PetscObjectReference((PetscObject)local);CHKERRQ(ierr);
853   ierr  = MatDestroy(&is->A);CHKERRQ(ierr);
854   is->A = local;
855   PetscFunctionReturn(0);
856 }
857 
858 #undef __FUNCT__
859 #define __FUNCT__ "MatISSetLocalMat"
860 /*@
861     MatISSetLocalMat - Replace the local matrix stored inside a MATIS object.
862 
863   Input Parameter:
864 .  mat - the matrix
865 .  local - the local matrix
866 
867   Output Parameter:
868 
869   Level: advanced
870 
871   Notes:
872     This can be called if you have precomputed the local matrix and
873   want to provide it to the matrix object MATIS.
874 
875 .seealso: MATIS
876 @*/
877 PetscErrorCode MatISSetLocalMat(Mat mat,Mat local)
878 {
879   PetscErrorCode ierr;
880 
881   PetscFunctionBegin;
882   PetscValidHeaderSpecific(mat,MAT_CLASSID,1);
883   PetscValidHeaderSpecific(local,MAT_CLASSID,2);
884   ierr = PetscUseMethod(mat,"MatISSetLocalMat_C",(Mat,Mat),(mat,local));CHKERRQ(ierr);
885   PetscFunctionReturn(0);
886 }
887 
888 #undef __FUNCT__
889 #define __FUNCT__ "MatZeroEntries_IS"
890 PetscErrorCode MatZeroEntries_IS(Mat A)
891 {
892   Mat_IS         *a = (Mat_IS*)A->data;
893   PetscErrorCode ierr;
894 
895   PetscFunctionBegin;
896   ierr = MatZeroEntries(a->A);CHKERRQ(ierr);
897   PetscFunctionReturn(0);
898 }
899 
900 #undef __FUNCT__
901 #define __FUNCT__ "MatScale_IS"
902 PetscErrorCode MatScale_IS(Mat A,PetscScalar a)
903 {
904   Mat_IS         *is = (Mat_IS*)A->data;
905   PetscErrorCode ierr;
906 
907   PetscFunctionBegin;
908   ierr = MatScale(is->A,a);CHKERRQ(ierr);
909   PetscFunctionReturn(0);
910 }
911 
912 #undef __FUNCT__
913 #define __FUNCT__ "MatGetDiagonal_IS"
914 PetscErrorCode MatGetDiagonal_IS(Mat A, Vec v)
915 {
916   Mat_IS         *is = (Mat_IS*)A->data;
917   PetscErrorCode ierr;
918 
919   PetscFunctionBegin;
920   /* get diagonal of the local matrix */
921   ierr = MatGetDiagonal(is->A,is->y);CHKERRQ(ierr);
922 
923   /* scatter diagonal back into global vector */
924   ierr = VecSet(v,0);CHKERRQ(ierr);
925   ierr = VecScatterBegin(is->rctx,is->y,v,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
926   ierr = VecScatterEnd(is->rctx,is->y,v,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
927   PetscFunctionReturn(0);
928 }
929 
930 #undef __FUNCT__
931 #define __FUNCT__ "MatSetOption_IS"
932 PetscErrorCode MatSetOption_IS(Mat A,MatOption op,PetscBool flg)
933 {
934   Mat_IS         *a = (Mat_IS*)A->data;
935   PetscErrorCode ierr;
936 
937   PetscFunctionBegin;
938   ierr = MatSetOption(a->A,op,flg);CHKERRQ(ierr);
939   PetscFunctionReturn(0);
940 }
941 
942 #undef __FUNCT__
943 #define __FUNCT__ "MatCreateIS"
944 /*@
945     MatCreateIS - Creates a "process" unassmembled matrix, it is assembled on each
946        process but not across processes.
947 
948    Input Parameters:
949 +     comm    - MPI communicator that will share the matrix
950 .     bs      - block size of the matrix
951 .     m,n,M,N - local and/or global sizes of the left and right vector used in matrix vector products
952 .     rmap    - local to global map for rows
953 -     cmap    - local to global map for cols
954 
955    Output Parameter:
956 .    A - the resulting matrix
957 
958    Level: advanced
959 
960    Notes: See MATIS for more details
961           m and n are NOT related to the size of the map, they are the size of the part of the vector owned
962           by that process. The sizes of rmap and cmap define the size of the local matrices.
963           If either rmap or cmap are NULL, than the matrix is assumed to be square
964 
965 .seealso: MATIS, MatSetLocalToGlobalMapping()
966 @*/
967 PetscErrorCode  MatCreateIS(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,ISLocalToGlobalMapping rmap,ISLocalToGlobalMapping cmap,Mat *A)
968 {
969   PetscErrorCode ierr;
970 
971   PetscFunctionBegin;
972   if (!rmap && !cmap) SETERRQ(comm,PETSC_ERR_USER,"You need to provide at least one of the mapping");
973   ierr = MatCreate(comm,A);CHKERRQ(ierr);
974   ierr = MatSetSizes(*A,m,n,M,N);CHKERRQ(ierr);
975   ierr = MatSetBlockSize(*A,bs);CHKERRQ(ierr);
976   ierr = MatSetType(*A,MATIS);CHKERRQ(ierr);
977   ierr = MatSetUp(*A);CHKERRQ(ierr);
978   if (rmap && cmap) {
979     ierr = MatSetLocalToGlobalMapping(*A,rmap,cmap);CHKERRQ(ierr);
980   } else if (!rmap) {
981     ierr = MatSetLocalToGlobalMapping(*A,cmap,cmap);CHKERRQ(ierr);
982   } else {
983     ierr = MatSetLocalToGlobalMapping(*A,rmap,rmap);CHKERRQ(ierr);
984   }
985   PetscFunctionReturn(0);
986 }
987 
988 /*MC
989    MATIS - MATIS = "is" - A matrix type to be used for using the non-overlapping domain decomposition type preconditioners (e.g. PCBDDC).
990    This stores the matrices in globally unassembled form. Each processor
991    assembles only its local Neumann problem and the parallel matrix vector
992    product is handled "implicitly".
993 
994    Operations Provided:
995 +  MatMult()
996 .  MatMultAdd()
997 .  MatMultTranspose()
998 .  MatMultTransposeAdd()
999 .  MatZeroEntries()
1000 .  MatSetOption()
1001 .  MatZeroRows()
1002 .  MatZeroRowsLocal()
1003 .  MatSetValues()
1004 .  MatSetValuesLocal()
1005 .  MatScale()
1006 .  MatGetDiagonal()
1007 -  MatSetLocalToGlobalMapping()
1008 
1009    Options Database Keys:
1010 . -mat_type is - sets the matrix type to "is" during a call to MatSetFromOptions()
1011 
1012    Notes: Options prefix for the inner matrix are given by -is_mat_xxx
1013 
1014           You must call MatSetLocalToGlobalMapping() before using this matrix type.
1015 
1016           You can do matrix preallocation on the local matrix after you obtain it with
1017           MatISGetLocalMat(); otherwise, you could use MatISSetPreallocation()
1018 
1019   Level: advanced
1020 
1021 .seealso: Mat, MatISGetLocalMat(), MatSetLocalToGlobalMapping(), MatISSetPreallocation(), PCBDDC
1022 
1023 M*/
1024 
1025 #undef __FUNCT__
1026 #define __FUNCT__ "MatCreate_IS"
1027 PETSC_EXTERN PetscErrorCode MatCreate_IS(Mat A)
1028 {
1029   PetscErrorCode ierr;
1030   Mat_IS         *b;
1031 
1032   PetscFunctionBegin;
1033   ierr    = PetscNewLog(A,&b);CHKERRQ(ierr);
1034   A->data = (void*)b;
1035 
1036   /* matrix ops */
1037   ierr    = PetscMemzero(A->ops,sizeof(struct _MatOps));CHKERRQ(ierr);
1038   A->ops->mult                    = MatMult_IS;
1039   A->ops->multadd                 = MatMultAdd_IS;
1040   A->ops->multtranspose           = MatMultTranspose_IS;
1041   A->ops->multtransposeadd        = MatMultTransposeAdd_IS;
1042   A->ops->destroy                 = MatDestroy_IS;
1043   A->ops->setlocaltoglobalmapping = MatSetLocalToGlobalMapping_IS;
1044   A->ops->setvalues               = MatSetValues_IS;
1045   A->ops->setvalueslocal          = MatSetValuesLocal_IS;
1046   A->ops->setvaluesblockedlocal   = MatSetValuesBlockedLocal_IS;
1047   A->ops->zerorows                = MatZeroRows_IS;
1048   A->ops->zerorowslocal           = MatZeroRowsLocal_IS;
1049   A->ops->assemblybegin           = MatAssemblyBegin_IS;
1050   A->ops->assemblyend             = MatAssemblyEnd_IS;
1051   A->ops->view                    = MatView_IS;
1052   A->ops->zeroentries             = MatZeroEntries_IS;
1053   A->ops->scale                   = MatScale_IS;
1054   A->ops->getdiagonal             = MatGetDiagonal_IS;
1055   A->ops->setoption               = MatSetOption_IS;
1056   A->ops->ishermitian             = MatIsHermitian_IS;
1057   A->ops->issymmetric             = MatIsSymmetric_IS;
1058   A->ops->duplicate               = MatDuplicate_IS;
1059 
1060   /* special MATIS functions */
1061   ierr = PetscObjectComposeFunction((PetscObject)A,"MatISGetLocalMat_C",MatISGetLocalMat_IS);CHKERRQ(ierr);
1062   ierr = PetscObjectComposeFunction((PetscObject)A,"MatISSetLocalMat_C",MatISSetLocalMat_IS);CHKERRQ(ierr);
1063   ierr = PetscObjectComposeFunction((PetscObject)A,"MatISGetMPIXAIJ_C",MatISGetMPIXAIJ_IS);CHKERRQ(ierr);
1064   ierr = PetscObjectComposeFunction((PetscObject)A,"MatISSetPreallocation_C",MatISSetPreallocation_IS);CHKERRQ(ierr);
1065   ierr = PetscObjectChangeTypeName((PetscObject)A,MATIS);CHKERRQ(ierr);
1066   PetscFunctionReturn(0);
1067 }
1068