xref: /petsc/src/mat/impls/is/matis.c (revision 00d931fe9835bef04c3bcd2a9a1bf118d64cc4c2)
1 
2 /*
3     Creates a matrix class for using the Neumann-Neumann type preconditioners.
4    This stores the matrices in globally unassembled form. Each processor
5    assembles only its local Neumann problem and the parallel matrix vector
6    product is handled "implicitly".
7 
8      We provide:
9          MatMult()
10 
11     Currently this allows for only one subdomain per processor.
12 
13 */
14 
15 #include <../src/mat/impls/is/matis.h>      /*I "petscmat.h" I*/
16 
17 static PetscErrorCode MatISComputeSF_Private(Mat);
18 
19 #undef __FUNCT__
20 #define __FUNCT__ "MatISComputeSF_Private"
21 static PetscErrorCode MatISComputeSF_Private(Mat B)
22 {
23   Mat_IS         *matis = (Mat_IS*)(B->data);
24   const PetscInt *gidxs;
25   PetscErrorCode ierr;
26 
27   PetscFunctionBegin;
28   ierr = MatGetSize(matis->A,&matis->sf_nleaves,NULL);CHKERRQ(ierr);
29   ierr = MatGetLocalSize(B,&matis->sf_nroots,NULL);CHKERRQ(ierr);
30   ierr = PetscSFCreate(PetscObjectComm((PetscObject)B),&matis->sf);CHKERRQ(ierr);
31   ierr = ISLocalToGlobalMappingGetIndices(B->rmap->mapping,&gidxs);CHKERRQ(ierr);
32   /* PETSC_OWN_POINTER refers to ilocal which is NULL */
33   ierr = PetscSFSetGraphLayout(matis->sf,B->rmap,matis->sf_nleaves,NULL,PETSC_OWN_POINTER,gidxs);CHKERRQ(ierr);
34   ierr = ISLocalToGlobalMappingRestoreIndices(B->rmap->mapping,&gidxs);CHKERRQ(ierr);
35   ierr = PetscMalloc2(matis->sf_nroots,&matis->sf_rootdata,matis->sf_nleaves,&matis->sf_leafdata);CHKERRQ(ierr);
36   PetscFunctionReturn(0);
37 }
38 
39 #undef __FUNCT__
40 #define __FUNCT__ "MatISSetPreallocation"
41 /*@
42    MatISSetPreallocation - Preallocates memory for a MATIS parallel matrix.
43 
44    Collective on MPI_Comm
45 
46    Input Parameters:
47 +  B - the matrix
48 .  d_nz  - number of nonzeros per row in DIAGONAL portion of local submatrix
49            (same value is used for all local rows)
50 .  d_nnz - array containing the number of nonzeros in the various rows of the
51            DIAGONAL portion of the local submatrix (possibly different for each row)
52            or NULL, if d_nz is used to specify the nonzero structure.
53            The size of this array is equal to the number of local rows, i.e 'm'.
54            For matrices that will be factored, you must leave room for (and set)
55            the diagonal entry even if it is zero.
56 .  o_nz  - number of nonzeros per row in the OFF-DIAGONAL portion of local
57            submatrix (same value is used for all local rows).
58 -  o_nnz - array containing the number of nonzeros in the various rows of the
59            OFF-DIAGONAL portion of the local submatrix (possibly different for
60            each row) or NULL, if o_nz is used to specify the nonzero
61            structure. The size of this array is equal to the number
62            of local rows, i.e 'm'.
63 
64    If the *_nnz parameter is given then the *_nz parameter is ignored
65 
66    Level: intermediate
67 
68    Notes: This function has the same interface as the MPIAIJ preallocation routine in order to simplify the transition
69           from the asssembled format to the unassembled one. It overestimates the preallocation of MATIS local
70           matrices; for exact preallocation, the user should set the preallocation directly on local matrix objects.
71 
72 .keywords: matrix
73 
74 .seealso: MatCreate(), MatCreateIS(), MatMPIAIJSetPreallocation(), MatISGetLocalMat()
75 @*/
76 PetscErrorCode  MatISSetPreallocation(Mat B,PetscInt d_nz,const PetscInt d_nnz[],PetscInt o_nz,const PetscInt o_nnz[])
77 {
78   PetscErrorCode ierr;
79 
80   PetscFunctionBegin;
81   PetscValidHeaderSpecific(B,MAT_CLASSID,1);
82   PetscValidType(B,1);
83   ierr = PetscTryMethod(B,"MatISSetPreallocation_C",(Mat,PetscInt,const PetscInt[],PetscInt,const PetscInt[]),(B,d_nz,d_nnz,o_nz,o_nnz));CHKERRQ(ierr);
84   PetscFunctionReturn(0);
85 }
86 
87 #undef __FUNCT__
88 #define __FUNCT__ "MatISSetPreallocation_IS"
89 PetscErrorCode  MatISSetPreallocation_IS(Mat B,PetscInt d_nz,const PetscInt d_nnz[],PetscInt o_nz,const PetscInt o_nnz[])
90 {
91   Mat_IS         *matis = (Mat_IS*)(B->data);
92   PetscInt       bs,i,nlocalcols;
93   PetscErrorCode ierr;
94 
95   PetscFunctionBegin;
96   if (!matis->A) SETERRQ(PetscObjectComm((PetscObject)B),PETSC_ERR_SUP,"You should first call MatSetLocalToGlobalMapping");
97   if (!matis->sf) { /* setup SF if not yet created and allocate rootdata and leafdata */
98     ierr = MatISComputeSF_Private(B);CHKERRQ(ierr);
99   }
100   if (!d_nnz) {
101     for (i=0;i<matis->sf_nroots;i++) matis->sf_rootdata[i] = d_nz;
102   } else {
103     for (i=0;i<matis->sf_nroots;i++) matis->sf_rootdata[i] = d_nnz[i];
104   }
105   if (!o_nnz) {
106     for (i=0;i<matis->sf_nroots;i++) matis->sf_rootdata[i] += o_nz;
107   } else {
108     for (i=0;i<matis->sf_nroots;i++) matis->sf_rootdata[i] += o_nnz[i];
109   }
110   ierr = PetscSFBcastBegin(matis->sf,MPIU_INT,matis->sf_rootdata,matis->sf_leafdata);CHKERRQ(ierr);
111   ierr = MatGetSize(matis->A,NULL,&nlocalcols);CHKERRQ(ierr);
112   ierr = MatGetBlockSize(matis->A,&bs);CHKERRQ(ierr);
113   ierr = PetscSFBcastEnd(matis->sf,MPIU_INT,matis->sf_rootdata,matis->sf_leafdata);CHKERRQ(ierr);
114   for (i=0;i<matis->sf_nleaves;i++) {
115     matis->sf_leafdata[i] = PetscMin(matis->sf_leafdata[i],nlocalcols);
116   }
117   ierr = MatSeqAIJSetPreallocation(matis->A,0,matis->sf_leafdata);CHKERRQ(ierr);
118   for (i=0;i<matis->sf_nleaves/bs;i++) {
119     matis->sf_leafdata[i] = matis->sf_leafdata[i*bs]/bs;
120   }
121   ierr = MatSeqBAIJSetPreallocation(matis->A,bs,0,matis->sf_leafdata);CHKERRQ(ierr);
122   for (i=0;i<matis->sf_nleaves/bs;i++) {
123     matis->sf_leafdata[i] = matis->sf_leafdata[i]-i;
124   }
125   ierr = MatSeqSBAIJSetPreallocation(matis->A,bs,0,matis->sf_leafdata);CHKERRQ(ierr);
126   PetscFunctionReturn(0);
127 }
128 
129 #undef __FUNCT__
130 #define __FUNCT__ "MatISSetMPIXAIJPreallocation_Private"
131 PETSC_EXTERN PetscErrorCode MatISSetMPIXAIJPreallocation_Private(Mat A, Mat B, PetscBool maxreduce)
132 {
133   Mat_IS          *matis = (Mat_IS*)(A->data);
134   PetscInt        *my_dnz,*my_onz,*dnz,*onz,*mat_ranges,*row_ownership;
135   const PetscInt  *global_indices_r,*global_indices_c;
136   PetscInt        i,j,bs,rows,cols;
137   PetscInt        lrows,lcols;
138   PetscInt        local_rows,local_cols;
139   PetscMPIInt     nsubdomains;
140   PetscBool       isdense,issbaij;
141   PetscErrorCode  ierr;
142 
143   PetscFunctionBegin;
144   ierr = MPI_Comm_size(PetscObjectComm((PetscObject)A),&nsubdomains);CHKERRQ(ierr);
145   ierr = MatGetSize(A,&rows,&cols);CHKERRQ(ierr);
146   ierr = MatGetBlockSize(A,&bs);CHKERRQ(ierr);
147   ierr = MatGetSize(matis->A,&local_rows,&local_cols);CHKERRQ(ierr);
148   ierr = PetscObjectTypeCompare((PetscObject)matis->A,MATSEQDENSE,&isdense);CHKERRQ(ierr);
149   ierr = PetscObjectTypeCompare((PetscObject)matis->A,MATSEQSBAIJ,&issbaij);CHKERRQ(ierr);
150   ierr = ISLocalToGlobalMappingGetIndices(A->rmap->mapping,&global_indices_r);CHKERRQ(ierr);
151   if (A->rmap->mapping != A->cmap->mapping) {
152     ierr = ISLocalToGlobalMappingGetIndices(A->rmap->mapping,&global_indices_c);CHKERRQ(ierr);
153   } else {
154     global_indices_c = global_indices_r;
155   }
156 
157   if (issbaij) {
158     ierr = MatGetRowUpperTriangular(matis->A);CHKERRQ(ierr);
159   }
160   /*
161      An SF reduce is needed to sum up properly on shared rows.
162      Note that generally preallocation is not exact, since it overestimates nonzeros
163   */
164   if (!matis->sf) { /* setup SF if not yet created and allocate rootdata and leafdata */
165     ierr = MatISComputeSF_Private(A);CHKERRQ(ierr);
166   }
167   ierr = MatGetLocalSize(A,&lrows,&lcols);CHKERRQ(ierr);
168   ierr = MatPreallocateInitialize(PetscObjectComm((PetscObject)A),lrows,lcols,dnz,onz);CHKERRQ(ierr);
169   /* All processes need to compute entire row ownership */
170   ierr = PetscMalloc1(rows,&row_ownership);CHKERRQ(ierr);
171   ierr = MatGetOwnershipRanges(A,(const PetscInt**)&mat_ranges);CHKERRQ(ierr);
172   for (i=0;i<nsubdomains;i++) {
173     for (j=mat_ranges[i];j<mat_ranges[i+1];j++) {
174       row_ownership[j] = i;
175     }
176   }
177 
178   /*
179      my_dnz and my_onz contains exact contribution to preallocation from each local mat
180      then, they will be summed up properly. This way, preallocation is always sufficient
181   */
182   ierr = PetscCalloc2(local_rows,&my_dnz,local_rows,&my_onz);CHKERRQ(ierr);
183   /* preallocation as a MATAIJ */
184   if (isdense) { /* special case for dense local matrices */
185     for (i=0;i<local_rows;i++) {
186       PetscInt index_row = global_indices_r[i];
187       for (j=i;j<local_rows;j++) {
188         PetscInt owner = row_ownership[index_row];
189         PetscInt index_col = global_indices_c[j];
190         if (index_col > mat_ranges[owner]-1 && index_col < mat_ranges[owner+1] ) { /* diag block */
191           my_dnz[i] += 1;
192         } else { /* offdiag block */
193           my_onz[i] += 1;
194         }
195         /* same as before, interchanging rows and cols */
196         if (i != j) {
197           owner = row_ownership[index_col];
198           if (index_row > mat_ranges[owner]-1 && index_row < mat_ranges[owner+1] ) {
199             my_dnz[j] += 1;
200           } else {
201             my_onz[j] += 1;
202           }
203         }
204       }
205     }
206   } else { /* TODO: this could be optimized using MatGetRowIJ */
207     for (i=0;i<local_rows;i++) {
208       const PetscInt *cols;
209       PetscInt       ncols,index_row = global_indices_r[i];
210       ierr = MatGetRow(matis->A,i,&ncols,&cols,NULL);CHKERRQ(ierr);
211       for (j=0;j<ncols;j++) {
212         PetscInt owner = row_ownership[index_row];
213         PetscInt index_col = global_indices_c[cols[j]];
214         if (index_col > mat_ranges[owner]-1 && index_col < mat_ranges[owner+1] ) { /* diag block */
215           my_dnz[i] += 1;
216         } else { /* offdiag block */
217           my_onz[i] += 1;
218         }
219         /* same as before, interchanging rows and cols */
220         if (issbaij && index_col != index_row) {
221           owner = row_ownership[index_col];
222           if (index_row > mat_ranges[owner]-1 && index_row < mat_ranges[owner+1] ) {
223             my_dnz[cols[j]] += 1;
224           } else {
225             my_onz[cols[j]] += 1;
226           }
227         }
228       }
229       ierr = MatRestoreRow(matis->A,i,&ncols,&cols,NULL);CHKERRQ(ierr);
230     }
231   }
232   ierr = ISLocalToGlobalMappingRestoreIndices(A->rmap->mapping,&global_indices_r);CHKERRQ(ierr);
233   if (global_indices_c != global_indices_r) {
234     ierr = ISLocalToGlobalMappingRestoreIndices(A->rmap->mapping,&global_indices_c);CHKERRQ(ierr);
235   }
236   ierr = PetscFree(row_ownership);CHKERRQ(ierr);
237 
238   /* Reduce my_dnz and my_onz */
239   if (maxreduce) {
240     ierr = PetscSFReduceBegin(matis->sf,MPIU_INT,my_dnz,dnz,MPI_MAX);CHKERRQ(ierr);
241     ierr = PetscSFReduceEnd(matis->sf,MPIU_INT,my_dnz,dnz,MPI_MAX);CHKERRQ(ierr);
242     ierr = PetscSFReduceBegin(matis->sf,MPIU_INT,my_onz,onz,MPI_MAX);CHKERRQ(ierr);
243     ierr = PetscSFReduceEnd(matis->sf,MPIU_INT,my_onz,onz,MPI_MAX);CHKERRQ(ierr);
244   } else {
245     ierr = PetscSFReduceBegin(matis->sf,MPIU_INT,my_dnz,dnz,MPI_SUM);CHKERRQ(ierr);
246     ierr = PetscSFReduceEnd(matis->sf,MPIU_INT,my_dnz,dnz,MPI_SUM);CHKERRQ(ierr);
247     ierr = PetscSFReduceBegin(matis->sf,MPIU_INT,my_onz,onz,MPI_SUM);CHKERRQ(ierr);
248     ierr = PetscSFReduceEnd(matis->sf,MPIU_INT,my_onz,onz,MPI_SUM);CHKERRQ(ierr);
249   }
250   ierr = PetscFree2(my_dnz,my_onz);CHKERRQ(ierr);
251 
252   /* Resize preallocation if overestimated */
253   for (i=0;i<lrows;i++) {
254     dnz[i] = PetscMin(dnz[i],lcols);
255     onz[i] = PetscMin(onz[i],cols-lcols);
256   }
257   /* set preallocation */
258   ierr = MatMPIAIJSetPreallocation(B,0,dnz,0,onz);CHKERRQ(ierr);
259   for (i=0;i<lrows/bs;i++) {
260     dnz[i] = dnz[i*bs]/bs;
261     onz[i] = onz[i*bs]/bs;
262   }
263   ierr = MatMPIBAIJSetPreallocation(B,bs,0,dnz,0,onz);CHKERRQ(ierr);
264   ierr = MatMPISBAIJSetPreallocation(B,bs,0,dnz,0,onz);CHKERRQ(ierr);
265   ierr = MatPreallocateFinalize(dnz,onz);CHKERRQ(ierr);
266   if (issbaij) {
267     ierr = MatRestoreRowUpperTriangular(matis->A);CHKERRQ(ierr);
268   }
269   PetscFunctionReturn(0);
270 }
271 
272 #undef __FUNCT__
273 #define __FUNCT__ "MatISGetMPIXAIJ_IS"
274 PetscErrorCode MatISGetMPIXAIJ_IS(Mat mat, MatReuse reuse, Mat *M)
275 {
276   Mat_IS         *matis = (Mat_IS*)(mat->data);
277   Mat            local_mat;
278   /* info on mat */
279   PetscInt       bs,rows,cols,lrows,lcols;
280   PetscInt       local_rows,local_cols;
281   PetscBool      isdense,issbaij,isseqaij;
282   PetscMPIInt    nsubdomains;
283   /* values insertion */
284   PetscScalar    *array;
285   /* work */
286   PetscErrorCode ierr;
287 
288   PetscFunctionBegin;
289   /* get info from mat */
290   ierr = MPI_Comm_size(PetscObjectComm((PetscObject)mat),&nsubdomains);CHKERRQ(ierr);
291   if (nsubdomains == 1) {
292     if (reuse == MAT_INITIAL_MATRIX) {
293       ierr = MatDuplicate(matis->A,MAT_COPY_VALUES,&(*M));CHKERRQ(ierr);
294     } else {
295       ierr = MatCopy(matis->A,*M,SAME_NONZERO_PATTERN);CHKERRQ(ierr);
296     }
297     PetscFunctionReturn(0);
298   }
299   ierr = MatGetSize(mat,&rows,&cols);CHKERRQ(ierr);
300   ierr = MatGetBlockSize(mat,&bs);CHKERRQ(ierr);
301   ierr = MatGetLocalSize(mat,&lrows,&lcols);CHKERRQ(ierr);
302   ierr = MatGetSize(matis->A,&local_rows,&local_cols);CHKERRQ(ierr);
303   ierr = PetscObjectTypeCompare((PetscObject)matis->A,MATSEQDENSE,&isdense);CHKERRQ(ierr);
304   ierr = PetscObjectTypeCompare((PetscObject)matis->A,MATSEQAIJ,&isseqaij);CHKERRQ(ierr);
305   ierr = PetscObjectTypeCompare((PetscObject)matis->A,MATSEQSBAIJ,&issbaij);CHKERRQ(ierr);
306 
307   if (reuse == MAT_INITIAL_MATRIX) {
308     MatType     new_mat_type;
309     PetscBool   issbaij_red;
310 
311     /* determining new matrix type */
312     ierr = MPIU_Allreduce(&issbaij,&issbaij_red,1,MPIU_BOOL,MPI_LAND,PetscObjectComm((PetscObject)mat));CHKERRQ(ierr);
313     if (issbaij_red) {
314       new_mat_type = MATSBAIJ;
315     } else {
316       if (bs>1) {
317         new_mat_type = MATBAIJ;
318       } else {
319         new_mat_type = MATAIJ;
320       }
321     }
322 
323     ierr = MatCreate(PetscObjectComm((PetscObject)mat),M);CHKERRQ(ierr);
324     ierr = MatSetSizes(*M,lrows,lcols,rows,cols);CHKERRQ(ierr);
325     ierr = MatSetBlockSize(*M,bs);CHKERRQ(ierr);
326     ierr = MatSetType(*M,new_mat_type);CHKERRQ(ierr);
327     ierr = MatISSetMPIXAIJPreallocation_Private(mat,*M,PETSC_FALSE);CHKERRQ(ierr);
328   } else {
329     PetscInt mbs,mrows,mcols,mlrows,mlcols;
330     /* some checks */
331     ierr = MatGetBlockSize(*M,&mbs);CHKERRQ(ierr);
332     ierr = MatGetSize(*M,&mrows,&mcols);CHKERRQ(ierr);
333     ierr = MatGetLocalSize(*M,&mlrows,&mlcols);CHKERRQ(ierr);
334     if (mrows != rows) SETERRQ2(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Cannot reuse matrix. Wrong number of rows (%d != %d)",rows,mrows);
335     if (mcols != cols) SETERRQ2(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Cannot reuse matrix. Wrong number of cols (%d != %d)",cols,mcols);
336     if (mlrows != lrows) SETERRQ2(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Cannot reuse matrix. Wrong number of local rows (%d != %d)",lrows,mlrows);
337     if (mlcols != lcols) SETERRQ2(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Cannot reuse matrix. Wrong number of local cols (%d != %d)",lcols,mlcols);
338     if (mbs != bs) SETERRQ2(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Cannot reuse matrix. Wrong block size (%d != %d)",bs,mbs);
339     ierr = MatZeroEntries(*M);CHKERRQ(ierr);
340   }
341 
342   if (issbaij) {
343     ierr = MatConvert(matis->A,MATSEQBAIJ,MAT_INITIAL_MATRIX,&local_mat);CHKERRQ(ierr);
344   } else {
345     ierr = PetscObjectReference((PetscObject)matis->A);CHKERRQ(ierr);
346     local_mat = matis->A;
347   }
348 
349   /* Set values */
350   ierr = MatSetLocalToGlobalMapping(*M,mat->rmap->mapping,mat->cmap->mapping);CHKERRQ(ierr);
351   if (isdense) { /* special case for dense local matrices */
352     PetscInt i,*dummy_rows,*dummy_cols;
353 
354     if (local_rows != local_cols) {
355       ierr = PetscMalloc2(local_rows,&dummy_rows,local_cols,&dummy_cols);CHKERRQ(ierr);
356       for (i=0;i<local_rows;i++) dummy_rows[i] = i;
357       for (i=0;i<local_cols;i++) dummy_cols[i] = i;
358     } else {
359       ierr = PetscMalloc1(local_rows,&dummy_rows);CHKERRQ(ierr);
360       for (i=0;i<local_rows;i++) dummy_rows[i] = i;
361       dummy_cols = dummy_rows;
362     }
363     ierr = MatSetOption(*M,MAT_ROW_ORIENTED,PETSC_FALSE);CHKERRQ(ierr);
364     ierr = MatDenseGetArray(local_mat,&array);CHKERRQ(ierr);
365     ierr = MatSetValuesLocal(*M,local_rows,dummy_rows,local_cols,dummy_cols,array,ADD_VALUES);CHKERRQ(ierr);
366     ierr = MatDenseRestoreArray(local_mat,&array);CHKERRQ(ierr);
367     if (dummy_rows != dummy_cols) {
368       ierr = PetscFree2(dummy_rows,dummy_cols);CHKERRQ(ierr);
369     } else {
370       ierr = PetscFree(dummy_rows);CHKERRQ(ierr);
371     }
372   } else if (isseqaij) {
373     PetscInt  i,nvtxs,*xadj,*adjncy;
374     PetscBool done;
375 
376     ierr = MatGetRowIJ(local_mat,0,PETSC_FALSE,PETSC_FALSE,&nvtxs,(const PetscInt**)&xadj,(const PetscInt**)&adjncy,&done);CHKERRQ(ierr);
377     if (!done) SETERRQ1(PetscObjectComm((PetscObject)local_mat),PETSC_ERR_PLIB,"Error in MatRestoreRowIJ called in %s\n",__FUNCT__);
378     ierr = MatSeqAIJGetArray(local_mat,&array);CHKERRQ(ierr);
379     for (i=0;i<nvtxs;i++) {
380       ierr = MatSetValuesLocal(*M,1,&i,xadj[i+1]-xadj[i],adjncy+xadj[i],array+xadj[i],ADD_VALUES);CHKERRQ(ierr);
381     }
382     ierr = MatRestoreRowIJ(local_mat,0,PETSC_FALSE,PETSC_FALSE,&nvtxs,(const PetscInt**)&xadj,(const PetscInt**)&adjncy,&done);CHKERRQ(ierr);
383     if (!done) SETERRQ1(PetscObjectComm((PetscObject)local_mat),PETSC_ERR_PLIB,"Error in MatRestoreRowIJ called in %s\n",__FUNCT__);
384     ierr = MatSeqAIJRestoreArray(local_mat,&array);CHKERRQ(ierr);
385   } else { /* very basic values insertion for all other matrix types */
386     PetscInt i;
387 
388     for (i=0;i<local_rows;i++) {
389       PetscInt       j;
390       const PetscInt *local_indices_cols;
391 
392       ierr = MatGetRow(local_mat,i,&j,&local_indices_cols,(const PetscScalar**)&array);CHKERRQ(ierr);
393       ierr = MatSetValuesLocal(*M,1,&i,j,local_indices_cols,array,ADD_VALUES);CHKERRQ(ierr);
394       ierr = MatRestoreRow(local_mat,i,&j,&local_indices_cols,(const PetscScalar**)&array);CHKERRQ(ierr);
395     }
396   }
397   ierr = MatAssemblyBegin(*M,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
398   ierr = MatDestroy(&local_mat);CHKERRQ(ierr);
399   ierr = MatAssemblyEnd(*M,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
400   if (isdense) {
401     ierr = MatSetOption(*M,MAT_ROW_ORIENTED,PETSC_TRUE);CHKERRQ(ierr);
402   }
403   PetscFunctionReturn(0);
404 }
405 
406 #undef __FUNCT__
407 #define __FUNCT__ "MatISGetMPIXAIJ"
408 /*@
409     MatISGetMPIXAIJ - Converts MATIS matrix into a parallel AIJ format
410 
411   Input Parameter:
412 .  mat - the matrix (should be of type MATIS)
413 .  reuse - either MAT_INITIAL_MATRIX or MAT_REUSE_MATRIX
414 
415   Output Parameter:
416 .  newmat - the matrix in AIJ format
417 
418   Level: developer
419 
420   Notes: mat and *newmat cannot be the same object when MAT_REUSE_MATRIX is requested.
421 
422 .seealso: MATIS
423 @*/
424 PetscErrorCode MatISGetMPIXAIJ(Mat mat, MatReuse reuse, Mat *newmat)
425 {
426   PetscErrorCode ierr;
427 
428   PetscFunctionBegin;
429   PetscValidHeaderSpecific(mat,MAT_CLASSID,1);
430   PetscValidLogicalCollectiveEnum(mat,reuse,2);
431   PetscValidPointer(newmat,3);
432   if (reuse != MAT_INITIAL_MATRIX) {
433     PetscValidHeaderSpecific(*newmat,MAT_CLASSID,3);
434     PetscCheckSameComm(mat,1,*newmat,3);
435     if (mat == *newmat) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Cannot reuse the same matrix");
436   }
437   ierr = PetscUseMethod(mat,"MatISGetMPIXAIJ_C",(Mat,MatReuse,Mat*),(mat,reuse,newmat));CHKERRQ(ierr);
438   PetscFunctionReturn(0);
439 }
440 
441 #undef __FUNCT__
442 #define __FUNCT__ "MatDuplicate_IS"
443 PetscErrorCode MatDuplicate_IS(Mat mat,MatDuplicateOption op,Mat *newmat)
444 {
445   PetscErrorCode ierr;
446   Mat_IS         *matis = (Mat_IS*)(mat->data);
447   PetscInt       bs,m,n,M,N;
448   Mat            B,localmat;
449 
450   PetscFunctionBegin;
451   ierr = MatGetBlockSize(mat,&bs);CHKERRQ(ierr);
452   ierr = MatGetSize(mat,&M,&N);CHKERRQ(ierr);
453   ierr = MatGetLocalSize(mat,&m,&n);CHKERRQ(ierr);
454   ierr = MatCreateIS(PetscObjectComm((PetscObject)mat),bs,m,n,M,N,mat->rmap->mapping,mat->cmap->mapping,&B);CHKERRQ(ierr);
455   ierr = MatDuplicate(matis->A,op,&localmat);CHKERRQ(ierr);
456   ierr = MatISSetLocalMat(B,localmat);CHKERRQ(ierr);
457   ierr = MatDestroy(&localmat);CHKERRQ(ierr);
458   ierr = MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
459   ierr = MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
460   *newmat = B;
461   PetscFunctionReturn(0);
462 }
463 
464 #undef __FUNCT__
465 #define __FUNCT__ "MatIsHermitian_IS"
466 PetscErrorCode MatIsHermitian_IS(Mat A,PetscReal tol,PetscBool  *flg)
467 {
468   PetscErrorCode ierr;
469   Mat_IS         *matis = (Mat_IS*)A->data;
470   PetscBool      local_sym;
471 
472   PetscFunctionBegin;
473   ierr = MatIsHermitian(matis->A,tol,&local_sym);CHKERRQ(ierr);
474   ierr = MPIU_Allreduce(&local_sym,flg,1,MPIU_BOOL,MPI_LAND,PetscObjectComm((PetscObject)A));CHKERRQ(ierr);
475   PetscFunctionReturn(0);
476 }
477 
478 #undef __FUNCT__
479 #define __FUNCT__ "MatIsSymmetric_IS"
480 PetscErrorCode MatIsSymmetric_IS(Mat A,PetscReal tol,PetscBool  *flg)
481 {
482   PetscErrorCode ierr;
483   Mat_IS         *matis = (Mat_IS*)A->data;
484   PetscBool      local_sym;
485 
486   PetscFunctionBegin;
487   ierr = MatIsSymmetric(matis->A,tol,&local_sym);CHKERRQ(ierr);
488   ierr = MPIU_Allreduce(&local_sym,flg,1,MPIU_BOOL,MPI_LAND,PetscObjectComm((PetscObject)A));CHKERRQ(ierr);
489   PetscFunctionReturn(0);
490 }
491 
492 #undef __FUNCT__
493 #define __FUNCT__ "MatDestroy_IS"
494 PetscErrorCode MatDestroy_IS(Mat A)
495 {
496   PetscErrorCode ierr;
497   Mat_IS         *b = (Mat_IS*)A->data;
498 
499   PetscFunctionBegin;
500   ierr = MatDestroy(&b->A);CHKERRQ(ierr);
501   ierr = VecScatterDestroy(&b->cctx);CHKERRQ(ierr);
502   ierr = VecScatterDestroy(&b->rctx);CHKERRQ(ierr);
503   ierr = VecDestroy(&b->x);CHKERRQ(ierr);
504   ierr = VecDestroy(&b->y);CHKERRQ(ierr);
505   ierr = PetscSFDestroy(&b->sf);CHKERRQ(ierr);
506   ierr = PetscFree2(b->sf_rootdata,b->sf_leafdata);CHKERRQ(ierr);
507   ierr = PetscFree(A->data);CHKERRQ(ierr);
508   ierr = PetscObjectChangeTypeName((PetscObject)A,0);CHKERRQ(ierr);
509   ierr = PetscObjectComposeFunction((PetscObject)A,"MatISGetLocalMat_C",NULL);CHKERRQ(ierr);
510   ierr = PetscObjectComposeFunction((PetscObject)A,"MatISSetLocalMat_C",NULL);CHKERRQ(ierr);
511   ierr = PetscObjectComposeFunction((PetscObject)A,"MatISGetMPIXAIJ_C",NULL);CHKERRQ(ierr);
512   ierr = PetscObjectComposeFunction((PetscObject)A,"MatISSetPreallocation_C",NULL);CHKERRQ(ierr);
513   PetscFunctionReturn(0);
514 }
515 
516 #undef __FUNCT__
517 #define __FUNCT__ "MatMult_IS"
518 PetscErrorCode MatMult_IS(Mat A,Vec x,Vec y)
519 {
520   PetscErrorCode ierr;
521   Mat_IS         *is  = (Mat_IS*)A->data;
522   PetscScalar    zero = 0.0;
523 
524   PetscFunctionBegin;
525   /*  scatter the global vector x into the local work vector */
526   ierr = VecScatterBegin(is->cctx,x,is->x,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
527   ierr = VecScatterEnd(is->cctx,x,is->x,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
528 
529   /* multiply the local matrix */
530   ierr = MatMult(is->A,is->x,is->y);CHKERRQ(ierr);
531 
532   /* scatter product back into global memory */
533   ierr = VecSet(y,zero);CHKERRQ(ierr);
534   ierr = VecScatterBegin(is->rctx,is->y,y,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
535   ierr = VecScatterEnd(is->rctx,is->y,y,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
536   PetscFunctionReturn(0);
537 }
538 
539 #undef __FUNCT__
540 #define __FUNCT__ "MatMultAdd_IS"
541 PetscErrorCode MatMultAdd_IS(Mat A,Vec v1,Vec v2,Vec v3)
542 {
543   Vec            temp_vec;
544   PetscErrorCode ierr;
545 
546   PetscFunctionBegin; /*  v3 = v2 + A * v1.*/
547   if (v3 != v2) {
548     ierr = MatMult(A,v1,v3);CHKERRQ(ierr);
549     ierr = VecAXPY(v3,1.0,v2);CHKERRQ(ierr);
550   } else {
551     ierr = VecDuplicate(v2,&temp_vec);CHKERRQ(ierr);
552     ierr = MatMult(A,v1,temp_vec);CHKERRQ(ierr);
553     ierr = VecAXPY(temp_vec,1.0,v2);CHKERRQ(ierr);
554     ierr = VecCopy(temp_vec,v3);CHKERRQ(ierr);
555     ierr = VecDestroy(&temp_vec);CHKERRQ(ierr);
556   }
557   PetscFunctionReturn(0);
558 }
559 
560 #undef __FUNCT__
561 #define __FUNCT__ "MatMultTranspose_IS"
562 PetscErrorCode MatMultTranspose_IS(Mat A,Vec y,Vec x)
563 {
564   Mat_IS         *is = (Mat_IS*)A->data;
565   PetscErrorCode ierr;
566 
567   PetscFunctionBegin;
568   /*  scatter the global vector x into the local work vector */
569   ierr = VecScatterBegin(is->rctx,y,is->y,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
570   ierr = VecScatterEnd(is->rctx,y,is->y,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
571 
572   /* multiply the local matrix */
573   ierr = MatMultTranspose(is->A,is->y,is->x);CHKERRQ(ierr);
574 
575   /* scatter product back into global vector */
576   ierr = VecSet(x,0);CHKERRQ(ierr);
577   ierr = VecScatterBegin(is->cctx,is->x,x,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
578   ierr = VecScatterEnd(is->cctx,is->x,x,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
579   PetscFunctionReturn(0);
580 }
581 
582 #undef __FUNCT__
583 #define __FUNCT__ "MatMultTransposeAdd_IS"
584 PetscErrorCode MatMultTransposeAdd_IS(Mat A,Vec v1,Vec v2,Vec v3)
585 {
586   Vec            temp_vec;
587   PetscErrorCode ierr;
588 
589   PetscFunctionBegin; /*  v3 = v2 + A' * v1.*/
590   if (v3 != v2) {
591     ierr = MatMultTranspose(A,v1,v3);CHKERRQ(ierr);
592     ierr = VecAXPY(v3,1.0,v2);CHKERRQ(ierr);
593   } else {
594     ierr = VecDuplicate(v2,&temp_vec);CHKERRQ(ierr);
595     ierr = MatMultTranspose(A,v1,temp_vec);CHKERRQ(ierr);
596     ierr = VecAXPY(temp_vec,1.0,v2);CHKERRQ(ierr);
597     ierr = VecCopy(temp_vec,v3);CHKERRQ(ierr);
598     ierr = VecDestroy(&temp_vec);CHKERRQ(ierr);
599   }
600   PetscFunctionReturn(0);
601 }
602 
603 #undef __FUNCT__
604 #define __FUNCT__ "MatView_IS"
605 PetscErrorCode MatView_IS(Mat A,PetscViewer viewer)
606 {
607   Mat_IS         *a = (Mat_IS*)A->data;
608   PetscErrorCode ierr;
609   PetscViewer    sviewer;
610 
611   PetscFunctionBegin;
612   ierr = PetscViewerGetSubViewer(viewer,PETSC_COMM_SELF,&sviewer);CHKERRQ(ierr);
613   ierr = MatView(a->A,sviewer);CHKERRQ(ierr);
614   ierr = PetscViewerRestoreSubViewer(viewer,PETSC_COMM_SELF,&sviewer);CHKERRQ(ierr);
615   PetscFunctionReturn(0);
616 }
617 
618 #undef __FUNCT__
619 #define __FUNCT__ "MatSetLocalToGlobalMapping_IS"
620 PetscErrorCode MatSetLocalToGlobalMapping_IS(Mat A,ISLocalToGlobalMapping rmapping,ISLocalToGlobalMapping cmapping)
621 {
622   PetscErrorCode ierr;
623   PetscInt       nr,rbs,nc,cbs;
624   Mat_IS         *is = (Mat_IS*)A->data;
625   IS             from,to;
626   Vec            cglobal,rglobal;
627 
628   PetscFunctionBegin;
629   PetscCheckSameComm(A,1,rmapping,2);
630   PetscCheckSameComm(A,1,cmapping,3);
631   /* Destroy any previous data */
632   ierr = VecDestroy(&is->x);CHKERRQ(ierr);
633   ierr = VecDestroy(&is->y);CHKERRQ(ierr);
634   ierr = VecScatterDestroy(&is->rctx);CHKERRQ(ierr);
635   ierr = VecScatterDestroy(&is->cctx);CHKERRQ(ierr);
636   ierr = MatDestroy(&is->A);CHKERRQ(ierr);
637   ierr = PetscSFDestroy(&is->sf);CHKERRQ(ierr);
638   ierr = PetscFree2(is->sf_rootdata,is->sf_leafdata);CHKERRQ(ierr);
639 
640   /* Setup Layout and set local to global maps */
641   ierr = PetscLayoutSetUp(A->rmap);CHKERRQ(ierr);
642   ierr = PetscLayoutSetUp(A->cmap);CHKERRQ(ierr);
643   ierr = PetscLayoutSetISLocalToGlobalMapping(A->rmap,rmapping);CHKERRQ(ierr);
644   ierr = PetscLayoutSetISLocalToGlobalMapping(A->cmap,cmapping);CHKERRQ(ierr);
645 
646   /* Create the local matrix A */
647   ierr = ISLocalToGlobalMappingGetSize(rmapping,&nr);CHKERRQ(ierr);
648   ierr = ISLocalToGlobalMappingGetBlockSize(rmapping,&rbs);CHKERRQ(ierr);
649   ierr = ISLocalToGlobalMappingGetSize(cmapping,&nc);CHKERRQ(ierr);
650   ierr = ISLocalToGlobalMappingGetBlockSize(cmapping,&cbs);CHKERRQ(ierr);
651   ierr = MatCreate(PETSC_COMM_SELF,&is->A);CHKERRQ(ierr);
652   ierr = MatSetType(is->A,MATAIJ);CHKERRQ(ierr);
653   ierr = MatSetSizes(is->A,nr,nc,nr,nc);CHKERRQ(ierr);
654   ierr = MatSetBlockSizes(is->A,rbs,cbs);CHKERRQ(ierr);
655   ierr = MatSetOptionsPrefix(is->A,((PetscObject)A)->prefix);CHKERRQ(ierr);
656   ierr = MatAppendOptionsPrefix(is->A,"is_");CHKERRQ(ierr);
657   ierr = MatSetFromOptions(is->A);CHKERRQ(ierr);
658 
659   /* Create the local work vectors */
660   ierr = MatCreateVecs(is->A,&is->x,&is->y);CHKERRQ(ierr);
661 
662   /* setup the global to local scatters */
663   ierr = MatCreateVecs(A,&cglobal,&rglobal);CHKERRQ(ierr);
664   ierr = ISCreateStride(PETSC_COMM_SELF,nr,0,1,&to);CHKERRQ(ierr);
665   ierr = ISLocalToGlobalMappingApplyIS(rmapping,to,&from);CHKERRQ(ierr);
666   ierr = VecScatterCreate(rglobal,from,is->y,to,&is->rctx);CHKERRQ(ierr);
667   if (rmapping != cmapping) {
668     ierr = ISDestroy(&to);CHKERRQ(ierr);
669     ierr = ISDestroy(&from);CHKERRQ(ierr);
670     ierr = ISCreateStride(PETSC_COMM_SELF,nc,0,1,&to);CHKERRQ(ierr);
671     ierr = ISLocalToGlobalMappingApplyIS(cmapping,to,&from);CHKERRQ(ierr);
672     ierr = VecScatterCreate(cglobal,from,is->x,to,&is->cctx);CHKERRQ(ierr);
673   } else {
674     ierr = PetscObjectReference((PetscObject)is->rctx);CHKERRQ(ierr);
675     is->cctx = is->rctx;
676   }
677   ierr = VecDestroy(&rglobal);CHKERRQ(ierr);
678   ierr = VecDestroy(&cglobal);CHKERRQ(ierr);
679   ierr = ISDestroy(&to);CHKERRQ(ierr);
680   ierr = ISDestroy(&from);CHKERRQ(ierr);
681   PetscFunctionReturn(0);
682 }
683 
684 #undef __FUNCT__
685 #define __FUNCT__ "MatSetValues_IS"
686 PetscErrorCode MatSetValues_IS(Mat mat, PetscInt m,const PetscInt *rows, PetscInt n,const PetscInt *cols, const PetscScalar *values, InsertMode addv)
687 {
688   Mat_IS         *is = (Mat_IS*)mat->data;
689   PetscInt       rows_l[2048],cols_l[2048];
690   PetscErrorCode ierr;
691 
692   PetscFunctionBegin;
693 #if defined(PETSC_USE_DEBUG)
694   if (m > 2048 || n > 2048) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_SUP,"Number of row/column indices must be <= 2048: they are %D %D",m,n);
695 #endif
696   ierr = ISG2LMapApply(mat->rmap->mapping,m,rows,rows_l);CHKERRQ(ierr);
697   ierr = ISG2LMapApply(mat->cmap->mapping,n,cols,cols_l);CHKERRQ(ierr);
698   ierr = MatSetValues(is->A,m,rows_l,n,cols_l,values,addv);CHKERRQ(ierr);
699   PetscFunctionReturn(0);
700 }
701 
702 #undef __FUNCT__
703 #define __FUNCT__ "MatSetValuesLocal_IS"
704 PetscErrorCode MatSetValuesLocal_IS(Mat A,PetscInt m,const PetscInt *rows, PetscInt n,const PetscInt *cols,const PetscScalar *values,InsertMode addv)
705 {
706   PetscErrorCode ierr;
707   Mat_IS         *is = (Mat_IS*)A->data;
708 
709   PetscFunctionBegin;
710   ierr = MatSetValues(is->A,m,rows,n,cols,values,addv);CHKERRQ(ierr);
711   PetscFunctionReturn(0);
712 }
713 
714 #undef __FUNCT__
715 #define __FUNCT__ "MatSetValuesBlockedLocal_IS"
716 PetscErrorCode MatSetValuesBlockedLocal_IS(Mat A,PetscInt m,const PetscInt *rows, PetscInt n,const PetscInt *cols,const PetscScalar *values,InsertMode addv)
717 {
718   PetscErrorCode ierr;
719   Mat_IS         *is = (Mat_IS*)A->data;
720 
721   PetscFunctionBegin;
722   ierr = MatSetValuesBlocked(is->A,m,rows,n,cols,values,addv);CHKERRQ(ierr);
723   PetscFunctionReturn(0);
724 }
725 
726 #undef __FUNCT__
727 #define __FUNCT__ "MatZeroRows_IS"
728 PetscErrorCode MatZeroRows_IS(Mat A,PetscInt n,const PetscInt rows[],PetscScalar diag,Vec x,Vec b)
729 {
730   PetscInt       n_l = 0, *rows_l = NULL;
731   PetscErrorCode ierr;
732 
733   PetscFunctionBegin;
734   if (x && b) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"No support");
735   if (n) {
736     ierr = PetscMalloc1(n,&rows_l);CHKERRQ(ierr);
737     ierr = ISGlobalToLocalMappingApply(A->rmap->mapping,IS_GTOLM_DROP,n,rows,&n_l,rows_l);CHKERRQ(ierr);
738   }
739   ierr = MatZeroRowsLocal(A,n_l,rows_l,diag,x,b);CHKERRQ(ierr);
740   ierr = PetscFree(rows_l);CHKERRQ(ierr);
741   PetscFunctionReturn(0);
742 }
743 
744 #undef __FUNCT__
745 #define __FUNCT__ "MatZeroRowsLocal_IS"
746 PetscErrorCode MatZeroRowsLocal_IS(Mat A,PetscInt n,const PetscInt rows[],PetscScalar diag,Vec x,Vec b)
747 {
748   Mat_IS         *is = (Mat_IS*)A->data;
749   PetscErrorCode ierr;
750   PetscInt       i;
751   PetscScalar    *array;
752 
753   PetscFunctionBegin;
754   if (x && b) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"No support");
755   {
756     /*
757        Set up is->x as a "counting vector". This is in order to MatMult_IS
758        work properly in the interface nodes.
759     */
760     Vec counter;
761     ierr = MatCreateVecs(A,NULL,&counter);CHKERRQ(ierr);
762     ierr = VecSet(counter,0.);CHKERRQ(ierr);
763     ierr = VecSet(is->y,1.);CHKERRQ(ierr);
764     ierr = VecScatterBegin(is->rctx,is->y,counter,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
765     ierr = VecScatterEnd(is->rctx,is->y,counter,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
766     ierr = VecScatterBegin(is->rctx,counter,is->y,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
767     ierr = VecScatterEnd(is->rctx,counter,is->y,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
768     ierr = VecDestroy(&counter);CHKERRQ(ierr);
769   }
770   if (!n) {
771     is->pure_neumann = PETSC_TRUE;
772   } else {
773     is->pure_neumann = PETSC_FALSE;
774 
775     ierr = VecGetArray(is->y,&array);CHKERRQ(ierr);
776     ierr = MatZeroRows(is->A,n,rows,diag,0,0);CHKERRQ(ierr);
777     for (i=0; i<n; i++) {
778       ierr = MatSetValue(is->A,rows[i],rows[i],diag/(array[rows[i]]),INSERT_VALUES);CHKERRQ(ierr);
779     }
780     ierr = MatAssemblyBegin(is->A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
781     ierr = MatAssemblyEnd(is->A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
782     ierr = VecRestoreArray(is->y,&array);CHKERRQ(ierr);
783   }
784   PetscFunctionReturn(0);
785 }
786 
787 #undef __FUNCT__
788 #define __FUNCT__ "MatAssemblyBegin_IS"
789 PetscErrorCode MatAssemblyBegin_IS(Mat A,MatAssemblyType type)
790 {
791   Mat_IS         *is = (Mat_IS*)A->data;
792   PetscErrorCode ierr;
793 
794   PetscFunctionBegin;
795   ierr = MatAssemblyBegin(is->A,type);CHKERRQ(ierr);
796   PetscFunctionReturn(0);
797 }
798 
799 #undef __FUNCT__
800 #define __FUNCT__ "MatAssemblyEnd_IS"
801 PetscErrorCode MatAssemblyEnd_IS(Mat A,MatAssemblyType type)
802 {
803   Mat_IS         *is = (Mat_IS*)A->data;
804   PetscErrorCode ierr;
805 
806   PetscFunctionBegin;
807   ierr = MatAssemblyEnd(is->A,type);CHKERRQ(ierr);
808   PetscFunctionReturn(0);
809 }
810 
811 #undef __FUNCT__
812 #define __FUNCT__ "MatISGetLocalMat_IS"
813 PetscErrorCode MatISGetLocalMat_IS(Mat mat,Mat *local)
814 {
815   Mat_IS *is = (Mat_IS*)mat->data;
816 
817   PetscFunctionBegin;
818   *local = is->A;
819   PetscFunctionReturn(0);
820 }
821 
822 #undef __FUNCT__
823 #define __FUNCT__ "MatISGetLocalMat"
824 /*@
825     MatISGetLocalMat - Gets the local matrix stored inside a MATIS matrix.
826 
827   Input Parameter:
828 .  mat - the matrix
829 
830   Output Parameter:
831 .  local - the local matrix
832 
833   Level: advanced
834 
835   Notes:
836     This can be called if you have precomputed the nonzero structure of the
837   matrix and want to provide it to the inner matrix object to improve the performance
838   of the MatSetValues() operation.
839 
840 .seealso: MATIS
841 @*/
842 PetscErrorCode MatISGetLocalMat(Mat mat,Mat *local)
843 {
844   PetscErrorCode ierr;
845 
846   PetscFunctionBegin;
847   PetscValidHeaderSpecific(mat,MAT_CLASSID,1);
848   PetscValidPointer(local,2);
849   ierr = PetscUseMethod(mat,"MatISGetLocalMat_C",(Mat,Mat*),(mat,local));CHKERRQ(ierr);
850   PetscFunctionReturn(0);
851 }
852 
853 #undef __FUNCT__
854 #define __FUNCT__ "MatISSetLocalMat_IS"
855 PetscErrorCode MatISSetLocalMat_IS(Mat mat,Mat local)
856 {
857   Mat_IS         *is = (Mat_IS*)mat->data;
858   PetscInt       nrows,ncols,orows,ocols;
859   PetscErrorCode ierr;
860 
861   PetscFunctionBegin;
862   if (is->A) {
863     ierr = MatGetSize(is->A,&orows,&ocols);CHKERRQ(ierr);
864     ierr = MatGetSize(local,&nrows,&ncols);CHKERRQ(ierr);
865     if (orows != nrows || ocols != ncols) SETERRQ4(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Local MATIS matrix should be of size %dx%d (you passed a %dx%d matrix)\n",orows,ocols,nrows,ncols);
866   }
867   ierr  = PetscObjectReference((PetscObject)local);CHKERRQ(ierr);
868   ierr  = MatDestroy(&is->A);CHKERRQ(ierr);
869   is->A = local;
870   PetscFunctionReturn(0);
871 }
872 
873 #undef __FUNCT__
874 #define __FUNCT__ "MatISSetLocalMat"
875 /*@
876     MatISSetLocalMat - Replace the local matrix stored inside a MATIS object.
877 
878   Input Parameter:
879 .  mat - the matrix
880 .  local - the local matrix
881 
882   Output Parameter:
883 
884   Level: advanced
885 
886   Notes:
887     This can be called if you have precomputed the local matrix and
888   want to provide it to the matrix object MATIS.
889 
890 .seealso: MATIS
891 @*/
892 PetscErrorCode MatISSetLocalMat(Mat mat,Mat local)
893 {
894   PetscErrorCode ierr;
895 
896   PetscFunctionBegin;
897   PetscValidHeaderSpecific(mat,MAT_CLASSID,1);
898   PetscValidHeaderSpecific(local,MAT_CLASSID,2);
899   ierr = PetscUseMethod(mat,"MatISSetLocalMat_C",(Mat,Mat),(mat,local));CHKERRQ(ierr);
900   PetscFunctionReturn(0);
901 }
902 
903 #undef __FUNCT__
904 #define __FUNCT__ "MatZeroEntries_IS"
905 PetscErrorCode MatZeroEntries_IS(Mat A)
906 {
907   Mat_IS         *a = (Mat_IS*)A->data;
908   PetscErrorCode ierr;
909 
910   PetscFunctionBegin;
911   ierr = MatZeroEntries(a->A);CHKERRQ(ierr);
912   PetscFunctionReturn(0);
913 }
914 
915 #undef __FUNCT__
916 #define __FUNCT__ "MatScale_IS"
917 PetscErrorCode MatScale_IS(Mat A,PetscScalar a)
918 {
919   Mat_IS         *is = (Mat_IS*)A->data;
920   PetscErrorCode ierr;
921 
922   PetscFunctionBegin;
923   ierr = MatScale(is->A,a);CHKERRQ(ierr);
924   PetscFunctionReturn(0);
925 }
926 
927 #undef __FUNCT__
928 #define __FUNCT__ "MatGetDiagonal_IS"
929 PetscErrorCode MatGetDiagonal_IS(Mat A, Vec v)
930 {
931   Mat_IS         *is = (Mat_IS*)A->data;
932   PetscErrorCode ierr;
933 
934   PetscFunctionBegin;
935   /* get diagonal of the local matrix */
936   ierr = MatGetDiagonal(is->A,is->y);CHKERRQ(ierr);
937 
938   /* scatter diagonal back into global vector */
939   ierr = VecSet(v,0);CHKERRQ(ierr);
940   ierr = VecScatterBegin(is->rctx,is->y,v,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
941   ierr = VecScatterEnd(is->rctx,is->y,v,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
942   PetscFunctionReturn(0);
943 }
944 
945 #undef __FUNCT__
946 #define __FUNCT__ "MatSetOption_IS"
947 PetscErrorCode MatSetOption_IS(Mat A,MatOption op,PetscBool flg)
948 {
949   Mat_IS         *a = (Mat_IS*)A->data;
950   PetscErrorCode ierr;
951 
952   PetscFunctionBegin;
953   ierr = MatSetOption(a->A,op,flg);CHKERRQ(ierr);
954   PetscFunctionReturn(0);
955 }
956 
957 #undef __FUNCT__
958 #define __FUNCT__ "MatCreateIS"
959 /*@
960     MatCreateIS - Creates a "process" unassmembled matrix, it is assembled on each
961        process but not across processes.
962 
963    Input Parameters:
964 +     comm    - MPI communicator that will share the matrix
965 .     bs      - block size of the matrix
966 .     m,n,M,N - local and/or global sizes of the left and right vector used in matrix vector products
967 .     rmap    - local to global map for rows
968 -     cmap    - local to global map for cols
969 
970    Output Parameter:
971 .    A - the resulting matrix
972 
973    Level: advanced
974 
975    Notes: See MATIS for more details
976           m and n are NOT related to the size of the map, they are the size of the part of the vector owned
977           by that process. The sizes of rmap and cmap define the size of the local matrices.
978           If either rmap or cmap are NULL, than the matrix is assumed to be square
979 
980 .seealso: MATIS, MatSetLocalToGlobalMapping()
981 @*/
982 PetscErrorCode  MatCreateIS(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,ISLocalToGlobalMapping rmap,ISLocalToGlobalMapping cmap,Mat *A)
983 {
984   PetscErrorCode ierr;
985 
986   PetscFunctionBegin;
987   if (!rmap && !cmap) SETERRQ(comm,PETSC_ERR_USER,"You need to provide at least one of the mapping");
988   ierr = MatCreate(comm,A);CHKERRQ(ierr);
989   ierr = MatSetBlockSize(*A,bs);CHKERRQ(ierr);
990   ierr = MatSetSizes(*A,m,n,M,N);CHKERRQ(ierr);
991   ierr = MatSetType(*A,MATIS);CHKERRQ(ierr);
992   ierr = MatSetUp(*A);CHKERRQ(ierr);
993   if (rmap && cmap) {
994     ierr = MatSetLocalToGlobalMapping(*A,rmap,cmap);CHKERRQ(ierr);
995   } else if (!rmap) {
996     ierr = MatSetLocalToGlobalMapping(*A,cmap,cmap);CHKERRQ(ierr);
997   } else {
998     ierr = MatSetLocalToGlobalMapping(*A,rmap,rmap);CHKERRQ(ierr);
999   }
1000   PetscFunctionReturn(0);
1001 }
1002 
1003 /*MC
1004    MATIS - MATIS = "is" - A matrix type to be used for using the non-overlapping domain decomposition type preconditioners (e.g. PCBDDC).
1005    This stores the matrices in globally unassembled form. Each processor
1006    assembles only its local Neumann problem and the parallel matrix vector
1007    product is handled "implicitly".
1008 
1009    Operations Provided:
1010 +  MatMult()
1011 .  MatMultAdd()
1012 .  MatMultTranspose()
1013 .  MatMultTransposeAdd()
1014 .  MatZeroEntries()
1015 .  MatSetOption()
1016 .  MatZeroRows()
1017 .  MatZeroRowsLocal()
1018 .  MatSetValues()
1019 .  MatSetValuesLocal()
1020 .  MatScale()
1021 .  MatGetDiagonal()
1022 -  MatSetLocalToGlobalMapping()
1023 
1024    Options Database Keys:
1025 . -mat_type is - sets the matrix type to "is" during a call to MatSetFromOptions()
1026 
1027    Notes: Options prefix for the inner matrix are given by -is_mat_xxx
1028 
1029           You must call MatSetLocalToGlobalMapping() before using this matrix type.
1030 
1031           You can do matrix preallocation on the local matrix after you obtain it with
1032           MatISGetLocalMat(); otherwise, you could use MatISSetPreallocation()
1033 
1034   Level: advanced
1035 
1036 .seealso: Mat, MatISGetLocalMat(), MatSetLocalToGlobalMapping(), MatISSetPreallocation(), PCBDDC
1037 
1038 M*/
1039 
1040 #undef __FUNCT__
1041 #define __FUNCT__ "MatCreate_IS"
1042 PETSC_EXTERN PetscErrorCode MatCreate_IS(Mat A)
1043 {
1044   PetscErrorCode ierr;
1045   Mat_IS         *b;
1046 
1047   PetscFunctionBegin;
1048   ierr    = PetscNewLog(A,&b);CHKERRQ(ierr);
1049   A->data = (void*)b;
1050 
1051   /* matrix ops */
1052   ierr    = PetscMemzero(A->ops,sizeof(struct _MatOps));CHKERRQ(ierr);
1053   A->ops->mult                    = MatMult_IS;
1054   A->ops->multadd                 = MatMultAdd_IS;
1055   A->ops->multtranspose           = MatMultTranspose_IS;
1056   A->ops->multtransposeadd        = MatMultTransposeAdd_IS;
1057   A->ops->destroy                 = MatDestroy_IS;
1058   A->ops->setlocaltoglobalmapping = MatSetLocalToGlobalMapping_IS;
1059   A->ops->setvalues               = MatSetValues_IS;
1060   A->ops->setvalueslocal          = MatSetValuesLocal_IS;
1061   A->ops->setvaluesblockedlocal   = MatSetValuesBlockedLocal_IS;
1062   A->ops->zerorows                = MatZeroRows_IS;
1063   A->ops->zerorowslocal           = MatZeroRowsLocal_IS;
1064   A->ops->assemblybegin           = MatAssemblyBegin_IS;
1065   A->ops->assemblyend             = MatAssemblyEnd_IS;
1066   A->ops->view                    = MatView_IS;
1067   A->ops->zeroentries             = MatZeroEntries_IS;
1068   A->ops->scale                   = MatScale_IS;
1069   A->ops->getdiagonal             = MatGetDiagonal_IS;
1070   A->ops->setoption               = MatSetOption_IS;
1071   A->ops->ishermitian             = MatIsHermitian_IS;
1072   A->ops->issymmetric             = MatIsSymmetric_IS;
1073   A->ops->duplicate               = MatDuplicate_IS;
1074 
1075   /* special MATIS functions */
1076   ierr = PetscObjectComposeFunction((PetscObject)A,"MatISGetLocalMat_C",MatISGetLocalMat_IS);CHKERRQ(ierr);
1077   ierr = PetscObjectComposeFunction((PetscObject)A,"MatISSetLocalMat_C",MatISSetLocalMat_IS);CHKERRQ(ierr);
1078   ierr = PetscObjectComposeFunction((PetscObject)A,"MatISGetMPIXAIJ_C",MatISGetMPIXAIJ_IS);CHKERRQ(ierr);
1079   ierr = PetscObjectComposeFunction((PetscObject)A,"MatISSetPreallocation_C",MatISSetPreallocation_IS);CHKERRQ(ierr);
1080   ierr = PetscObjectChangeTypeName((PetscObject)A,MATIS);CHKERRQ(ierr);
1081   PetscFunctionReturn(0);
1082 }
1083