xref: /petsc/src/mat/impls/is/matis.c (revision eab890ac70e3f8cf2d8ed246aa467cd84d643ced)
1 
2 /*
3     Creates a matrix class for using the Neumann-Neumann type preconditioners.
4    This stores the matrices in globally unassembled form. Each processor
5    assembles only its local Neumann problem and the parallel matrix vector
6    product is handled "implicitly".
7 
8      We provide:
9          MatMult()
10 
11     Currently this allows for only one subdomain per processor.
12 
13 */
14 
15 #include <../src/mat/impls/is/matis.h>      /*I "petscmat.h" I*/
16 
17 static PetscErrorCode MatISComputeSF_Private(Mat);
18 
19 #undef __FUNCT__
20 #define __FUNCT__ "MatISComputeSF_Private"
21 static PetscErrorCode MatISComputeSF_Private(Mat B)
22 {
23   Mat_IS         *matis = (Mat_IS*)(B->data);
24   const PetscInt *gidxs;
25   PetscErrorCode ierr;
26 
27   PetscFunctionBegin;
28   ierr = MatGetSize(matis->A,&matis->sf_nleaves,NULL);CHKERRQ(ierr);
29   ierr = MatGetLocalSize(B,&matis->sf_nroots,NULL);CHKERRQ(ierr);
30   ierr = PetscSFCreate(PetscObjectComm((PetscObject)B),&matis->sf);CHKERRQ(ierr);
31   ierr = ISLocalToGlobalMappingGetIndices(B->rmap->mapping,&gidxs);CHKERRQ(ierr);
32   /* PETSC_OWN_POINTER refers to ilocal which is NULL */
33   ierr = PetscSFSetGraphLayout(matis->sf,B->rmap,matis->sf_nleaves,NULL,PETSC_OWN_POINTER,gidxs);CHKERRQ(ierr);
34   ierr = ISLocalToGlobalMappingRestoreIndices(B->rmap->mapping,&gidxs);CHKERRQ(ierr);
35   ierr = PetscMalloc2(matis->sf_nroots,&matis->sf_rootdata,matis->sf_nleaves,&matis->sf_leafdata);CHKERRQ(ierr);
36   PetscFunctionReturn(0);
37 }
38 
39 #undef __FUNCT__
40 #define __FUNCT__ "MatISSetPreallocation"
41 /*@
42    MatISSetPreallocation - Preallocates memory for a MATIS parallel matrix.
43 
44    Collective on MPI_Comm
45 
46    Input Parameters:
47 +  B - the matrix
48 .  d_nz  - number of nonzeros per row in DIAGONAL portion of local submatrix
49            (same value is used for all local rows)
50 .  d_nnz - array containing the number of nonzeros in the various rows of the
51            DIAGONAL portion of the local submatrix (possibly different for each row)
52            or NULL, if d_nz is used to specify the nonzero structure.
53            The size of this array is equal to the number of local rows, i.e 'm'.
54            For matrices that will be factored, you must leave room for (and set)
55            the diagonal entry even if it is zero.
56 .  o_nz  - number of nonzeros per row in the OFF-DIAGONAL portion of local
57            submatrix (same value is used for all local rows).
58 -  o_nnz - array containing the number of nonzeros in the various rows of the
59            OFF-DIAGONAL portion of the local submatrix (possibly different for
60            each row) or NULL, if o_nz is used to specify the nonzero
61            structure. The size of this array is equal to the number
62            of local rows, i.e 'm'.
63 
64    If the *_nnz parameter is given then the *_nz parameter is ignored
65 
66    Level: intermediate
67 
68    Notes: This function has the same interface as the MPIAIJ preallocation routine in order to simplify the transition
69           from the asssembled format to the unassembled one. It overestimates the preallocation of MATIS local
70           matrices; for exact preallocation, the user should set the preallocation directly on local matrix objects.
71 
72 .keywords: matrix
73 
74 .seealso: MatCreate(), MatCreateIS(), MatMPIAIJSetPreallocation(), MatISGetLocalMat()
75 @*/
76 PetscErrorCode  MatISSetPreallocation(Mat B,PetscInt d_nz,const PetscInt d_nnz[],PetscInt o_nz,const PetscInt o_nnz[])
77 {
78   PetscErrorCode ierr;
79 
80   PetscFunctionBegin;
81   PetscValidHeaderSpecific(B,MAT_CLASSID,1);
82   PetscValidType(B,1);
83   ierr = PetscTryMethod(B,"MatISSetPreallocation_C",(Mat,PetscInt,const PetscInt[],PetscInt,const PetscInt[]),(B,d_nz,d_nnz,o_nz,o_nnz));CHKERRQ(ierr);
84   PetscFunctionReturn(0);
85 }
86 
87 #undef __FUNCT__
88 #define __FUNCT__ "MatISSetPreallocation_IS"
89 PetscErrorCode  MatISSetPreallocation_IS(Mat B,PetscInt d_nz,const PetscInt d_nnz[],PetscInt o_nz,const PetscInt o_nnz[])
90 {
91   Mat_IS         *matis = (Mat_IS*)(B->data);
92   PetscInt       bs,i,nlocalcols;
93   PetscErrorCode ierr;
94 
95   PetscFunctionBegin;
96   if (!matis->A) {
97     SETERRQ(PetscObjectComm((PetscObject)B),PETSC_ERR_SUP,"You should first call MatSetLocalToGlobalMapping");
98   }
99   if (!matis->sf) { /* setup SF if not yet created and allocate rootdata and leafdata */
100     ierr = MatISComputeSF_Private(B);CHKERRQ(ierr);
101   }
102   if (!d_nnz) {
103     for (i=0;i<matis->sf_nroots;i++) matis->sf_rootdata[i] = d_nz;
104   } else {
105     for (i=0;i<matis->sf_nroots;i++) matis->sf_rootdata[i] = d_nnz[i];
106   }
107   if (!o_nnz) {
108     for (i=0;i<matis->sf_nroots;i++) matis->sf_rootdata[i] += o_nz;
109   } else {
110     for (i=0;i<matis->sf_nroots;i++) matis->sf_rootdata[i] += o_nnz[i];
111   }
112   ierr = PetscSFBcastBegin(matis->sf,MPIU_INT,matis->sf_rootdata,matis->sf_leafdata);CHKERRQ(ierr);
113   ierr = MatGetSize(matis->A,NULL,&nlocalcols);CHKERRQ(ierr);
114   ierr = MatGetBlockSize(matis->A,&bs);CHKERRQ(ierr);
115   ierr = PetscSFBcastEnd(matis->sf,MPIU_INT,matis->sf_rootdata,matis->sf_leafdata);CHKERRQ(ierr);
116   for (i=0;i<matis->sf_nleaves;i++) {
117     matis->sf_leafdata[i] = PetscMin(matis->sf_leafdata[i],nlocalcols);
118   }
119   ierr = MatSeqAIJSetPreallocation(matis->A,0,matis->sf_leafdata);CHKERRQ(ierr);
120   for (i=0;i<matis->sf_nleaves/bs;i++) {
121     matis->sf_leafdata[i] = matis->sf_leafdata[i*bs]/bs;
122   }
123   ierr = MatSeqBAIJSetPreallocation(matis->A,bs,0,matis->sf_leafdata);CHKERRQ(ierr);
124   for (i=0;i<matis->sf_nleaves/bs;i++) {
125     matis->sf_leafdata[i] = matis->sf_leafdata[i]-i;
126   }
127   ierr = MatSeqSBAIJSetPreallocation(matis->A,bs,0,matis->sf_leafdata);CHKERRQ(ierr);
128   PetscFunctionReturn(0);
129 }
130 
131 #undef __FUNCT__
132 #define __FUNCT__ "MatISSetMPIXAIJPreallocation_Private"
133 PETSC_EXTERN PetscErrorCode MatISSetMPIXAIJPreallocation_Private(Mat A, Mat B, PetscBool maxreduce)
134 {
135   Mat_IS          *matis = (Mat_IS*)(A->data);
136   PetscInt        *my_dnz,*my_onz,*dnz,*onz,*mat_ranges,*row_ownership;
137   const PetscInt  *global_indices_r,*global_indices_c;
138   PetscInt        i,j,bs,rows,cols;
139   PetscInt        lrows,lcols;
140   PetscInt        local_rows,local_cols;
141   PetscMPIInt     nsubdomains;
142   PetscBool       isdense,issbaij;
143   PetscErrorCode  ierr;
144 
145   PetscFunctionBegin;
146   ierr = MPI_Comm_size(PetscObjectComm((PetscObject)A),&nsubdomains);CHKERRQ(ierr);
147   ierr = MatGetSize(A,&rows,&cols);CHKERRQ(ierr);
148   ierr = MatGetBlockSize(A,&bs);CHKERRQ(ierr);
149   ierr = MatGetSize(matis->A,&local_rows,&local_cols);CHKERRQ(ierr);
150   ierr = PetscObjectTypeCompare((PetscObject)matis->A,MATSEQDENSE,&isdense);CHKERRQ(ierr);
151   ierr = PetscObjectTypeCompare((PetscObject)matis->A,MATSEQSBAIJ,&issbaij);CHKERRQ(ierr);
152   ierr = ISLocalToGlobalMappingGetIndices(A->rmap->mapping,&global_indices_r);CHKERRQ(ierr);
153   if (A->rmap->mapping != A->cmap->mapping) {
154     ierr = ISLocalToGlobalMappingGetIndices(A->rmap->mapping,&global_indices_c);CHKERRQ(ierr);
155   } else {
156     global_indices_c = global_indices_r;
157   }
158 
159   if (issbaij) {
160     ierr = MatGetRowUpperTriangular(matis->A);CHKERRQ(ierr);
161   }
162   /*
163      An SF reduce is needed to sum up properly on shared rows.
164      Note that generally preallocation is not exact, since it overestimates nonzeros
165   */
166   if (!matis->sf) { /* setup SF if not yet created and allocate rootdata and leafdata */
167     ierr = MatISComputeSF_Private(A);CHKERRQ(ierr);
168   }
169   ierr = MatGetLocalSize(A,&lrows,&lcols);CHKERRQ(ierr);
170   ierr = MatPreallocateInitialize(PetscObjectComm((PetscObject)A),lrows,lcols,dnz,onz);CHKERRQ(ierr);
171   /* All processes need to compute entire row ownership */
172   ierr = PetscMalloc1(rows,&row_ownership);CHKERRQ(ierr);
173   ierr = MatGetOwnershipRanges(A,(const PetscInt**)&mat_ranges);CHKERRQ(ierr);
174   for (i=0;i<nsubdomains;i++) {
175     for (j=mat_ranges[i];j<mat_ranges[i+1];j++) {
176       row_ownership[j] = i;
177     }
178   }
179 
180   /*
181      my_dnz and my_onz contains exact contribution to preallocation from each local mat
182      then, they will be summed up properly. This way, preallocation is always sufficient
183   */
184   ierr = PetscCalloc2(local_rows,&my_dnz,local_rows,&my_onz);CHKERRQ(ierr);
185   /* preallocation as a MATAIJ */
186   if (isdense) { /* special case for dense local matrices */
187     for (i=0;i<local_rows;i++) {
188       PetscInt index_row = global_indices_r[i];
189       for (j=i;j<local_rows;j++) {
190         PetscInt owner = row_ownership[index_row];
191         PetscInt index_col = global_indices_c[j];
192         if (index_col > mat_ranges[owner]-1 && index_col < mat_ranges[owner+1] ) { /* diag block */
193           my_dnz[i] += 1;
194         } else { /* offdiag block */
195           my_onz[i] += 1;
196         }
197         /* same as before, interchanging rows and cols */
198         if (i != j) {
199           owner = row_ownership[index_col];
200           if (index_row > mat_ranges[owner]-1 && index_row < mat_ranges[owner+1] ) {
201             my_dnz[j] += 1;
202           } else {
203             my_onz[j] += 1;
204           }
205         }
206       }
207     }
208   } else { /* TODO: this could be optimized using MatGetRowIJ */
209     for (i=0;i<local_rows;i++) {
210       const PetscInt *cols;
211       PetscInt       ncols,index_row = global_indices_r[i];
212       ierr = MatGetRow(matis->A,i,&ncols,&cols,NULL);CHKERRQ(ierr);
213       for (j=0;j<ncols;j++) {
214         PetscInt owner = row_ownership[index_row];
215         PetscInt index_col = global_indices_c[cols[j]];
216         if (index_col > mat_ranges[owner]-1 && index_col < mat_ranges[owner+1] ) { /* diag block */
217           my_dnz[i] += 1;
218         } else { /* offdiag block */
219           my_onz[i] += 1;
220         }
221         /* same as before, interchanging rows and cols */
222         if (issbaij && index_col != index_row) {
223           owner = row_ownership[index_col];
224           if (index_row > mat_ranges[owner]-1 && index_row < mat_ranges[owner+1] ) {
225             my_dnz[cols[j]] += 1;
226           } else {
227             my_onz[cols[j]] += 1;
228           }
229         }
230       }
231       ierr = MatRestoreRow(matis->A,i,&ncols,&cols,NULL);CHKERRQ(ierr);
232     }
233   }
234   ierr = ISLocalToGlobalMappingRestoreIndices(A->rmap->mapping,&global_indices_r);CHKERRQ(ierr);
235   if (global_indices_c != global_indices_r) {
236     ierr = ISLocalToGlobalMappingRestoreIndices(A->rmap->mapping,&global_indices_c);CHKERRQ(ierr);
237   }
238   ierr = PetscFree(row_ownership);CHKERRQ(ierr);
239 
240   /* Reduce my_dnz and my_onz */
241   if (maxreduce) {
242     ierr = PetscSFReduceBegin(matis->sf,MPIU_INT,my_dnz,dnz,MPI_MAX);CHKERRQ(ierr);
243     ierr = PetscSFReduceEnd(matis->sf,MPIU_INT,my_dnz,dnz,MPI_MAX);CHKERRQ(ierr);
244     ierr = PetscSFReduceBegin(matis->sf,MPIU_INT,my_onz,onz,MPI_MAX);CHKERRQ(ierr);
245     ierr = PetscSFReduceEnd(matis->sf,MPIU_INT,my_onz,onz,MPI_MAX);CHKERRQ(ierr);
246   } else {
247     ierr = PetscSFReduceBegin(matis->sf,MPIU_INT,my_dnz,dnz,MPI_SUM);CHKERRQ(ierr);
248     ierr = PetscSFReduceEnd(matis->sf,MPIU_INT,my_dnz,dnz,MPI_SUM);CHKERRQ(ierr);
249     ierr = PetscSFReduceBegin(matis->sf,MPIU_INT,my_onz,onz,MPI_SUM);CHKERRQ(ierr);
250     ierr = PetscSFReduceEnd(matis->sf,MPIU_INT,my_onz,onz,MPI_SUM);CHKERRQ(ierr);
251   }
252   ierr = PetscFree2(my_dnz,my_onz);CHKERRQ(ierr);
253 
254   /* Resize preallocation if overestimated */
255   for (i=0;i<lrows;i++) {
256     dnz[i] = PetscMin(dnz[i],lcols);
257     onz[i] = PetscMin(onz[i],cols-lcols);
258   }
259   /* set preallocation */
260   ierr = MatMPIAIJSetPreallocation(B,0,dnz,0,onz);CHKERRQ(ierr);
261   for (i=0;i<lrows/bs;i++) {
262     dnz[i] = dnz[i*bs]/bs;
263     onz[i] = onz[i*bs]/bs;
264   }
265   ierr = MatMPIBAIJSetPreallocation(B,bs,0,dnz,0,onz);CHKERRQ(ierr);
266   ierr = MatMPISBAIJSetPreallocation(B,bs,0,dnz,0,onz);CHKERRQ(ierr);
267   ierr = MatPreallocateFinalize(dnz,onz);CHKERRQ(ierr);
268   if (issbaij) {
269     ierr = MatRestoreRowUpperTriangular(matis->A);CHKERRQ(ierr);
270   }
271   PetscFunctionReturn(0);
272 }
273 
274 #undef __FUNCT__
275 #define __FUNCT__ "MatISGetMPIXAIJ_IS"
276 PetscErrorCode MatISGetMPIXAIJ_IS(Mat mat, MatReuse reuse, Mat *M)
277 {
278   Mat_IS         *matis = (Mat_IS*)(mat->data);
279   Mat            local_mat;
280   /* info on mat */
281   PetscInt       bs,rows,cols,lrows,lcols;
282   PetscInt       local_rows,local_cols;
283   PetscBool      isdense,issbaij,isseqaij;
284   PetscMPIInt    nsubdomains;
285   /* values insertion */
286   PetscScalar    *array;
287   /* work */
288   PetscErrorCode ierr;
289 
290   PetscFunctionBegin;
291   /* get info from mat */
292   ierr = MPI_Comm_size(PetscObjectComm((PetscObject)mat),&nsubdomains);CHKERRQ(ierr);
293   if (nsubdomains == 1) {
294     if (reuse == MAT_INITIAL_MATRIX) {
295       ierr = MatDuplicate(matis->A,MAT_COPY_VALUES,&(*M));CHKERRQ(ierr);
296     } else {
297       ierr = MatCopy(matis->A,*M,SAME_NONZERO_PATTERN);CHKERRQ(ierr);
298     }
299     PetscFunctionReturn(0);
300   }
301   ierr = MatGetSize(mat,&rows,&cols);CHKERRQ(ierr);
302   ierr = MatGetBlockSize(mat,&bs);CHKERRQ(ierr);
303   ierr = MatGetLocalSize(mat,&lrows,&lcols);CHKERRQ(ierr);
304   ierr = MatGetSize(matis->A,&local_rows,&local_cols);CHKERRQ(ierr);
305   ierr = PetscObjectTypeCompare((PetscObject)matis->A,MATSEQDENSE,&isdense);CHKERRQ(ierr);
306   ierr = PetscObjectTypeCompare((PetscObject)matis->A,MATSEQAIJ,&isseqaij);CHKERRQ(ierr);
307   ierr = PetscObjectTypeCompare((PetscObject)matis->A,MATSEQSBAIJ,&issbaij);CHKERRQ(ierr);
308 
309   if (reuse == MAT_INITIAL_MATRIX) {
310     MatType     new_mat_type;
311     PetscBool   issbaij_red;
312 
313     /* determining new matrix type */
314     ierr = MPI_Allreduce(&issbaij,&issbaij_red,1,MPIU_BOOL,MPI_LAND,PetscObjectComm((PetscObject)mat));CHKERRQ(ierr);
315     if (issbaij_red) {
316       new_mat_type = MATSBAIJ;
317     } else {
318       if (bs>1) {
319         new_mat_type = MATBAIJ;
320       } else {
321         new_mat_type = MATAIJ;
322       }
323     }
324 
325     ierr = MatCreate(PetscObjectComm((PetscObject)mat),M);CHKERRQ(ierr);
326     ierr = MatSetSizes(*M,lrows,lcols,rows,cols);CHKERRQ(ierr);
327     ierr = MatSetBlockSize(*M,bs);CHKERRQ(ierr);
328     ierr = MatSetType(*M,new_mat_type);CHKERRQ(ierr);
329     ierr = MatISSetMPIXAIJPreallocation_Private(mat,*M,PETSC_FALSE);CHKERRQ(ierr);
330   } else {
331     PetscInt mbs,mrows,mcols,mlrows,mlcols;
332     /* some checks */
333     ierr = MatGetBlockSize(*M,&mbs);CHKERRQ(ierr);
334     ierr = MatGetSize(*M,&mrows,&mcols);CHKERRQ(ierr);
335     ierr = MatGetLocalSize(*M,&mlrows,&mlcols);CHKERRQ(ierr);
336     if (mrows != rows) {
337       SETERRQ2(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Cannot reuse matrix. Wrong number of rows (%d != %d)",rows,mrows);
338     }
339     if (mcols != cols) {
340       SETERRQ2(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Cannot reuse matrix. Wrong number of cols (%d != %d)",cols,mcols);
341     }
342     if (mlrows != lrows) {
343       SETERRQ2(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Cannot reuse matrix. Wrong number of local rows (%d != %d)",lrows,mlrows);
344     }
345     if (mlcols != lcols) {
346       SETERRQ2(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Cannot reuse matrix. Wrong number of local cols (%d != %d)",lcols,mlcols);
347     }
348     if (mbs != bs) {
349       SETERRQ2(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Cannot reuse matrix. Wrong block size (%d != %d)",bs,mbs);
350     }
351     ierr = MatZeroEntries(*M);CHKERRQ(ierr);
352   }
353 
354   if (issbaij) {
355     ierr = MatConvert(matis->A,MATSEQBAIJ,MAT_INITIAL_MATRIX,&local_mat);CHKERRQ(ierr);
356   } else {
357     ierr = PetscObjectReference((PetscObject)matis->A);CHKERRQ(ierr);
358     local_mat = matis->A;
359   }
360 
361   /* Set values */
362   ierr = MatSetLocalToGlobalMapping(*M,mat->rmap->mapping,mat->cmap->mapping);CHKERRQ(ierr);
363   if (isdense) { /* special case for dense local matrices */
364     PetscInt i,*dummy_rows,*dummy_cols;
365 
366     if (local_rows != local_cols) {
367       ierr = PetscMalloc2(local_rows,&dummy_rows,local_cols,&dummy_cols);CHKERRQ(ierr);
368       for (i=0;i<local_rows;i++) dummy_rows[i] = i;
369       for (i=0;i<local_cols;i++) dummy_cols[i] = i;
370     } else {
371       ierr = PetscMalloc1(local_rows,&dummy_rows);CHKERRQ(ierr);
372       for (i=0;i<local_rows;i++) dummy_rows[i] = i;
373       dummy_cols = dummy_rows;
374     }
375     ierr = MatSetOption(*M,MAT_ROW_ORIENTED,PETSC_FALSE);CHKERRQ(ierr);
376     ierr = MatDenseGetArray(local_mat,&array);CHKERRQ(ierr);
377     ierr = MatSetValuesLocal(*M,local_rows,dummy_rows,local_cols,dummy_cols,array,ADD_VALUES);CHKERRQ(ierr);
378     ierr = MatDenseRestoreArray(local_mat,&array);CHKERRQ(ierr);
379     if (dummy_rows != dummy_cols) {
380       ierr = PetscFree2(dummy_rows,dummy_cols);CHKERRQ(ierr);
381     } else {
382       ierr = PetscFree(dummy_rows);CHKERRQ(ierr);
383     }
384   } else if (isseqaij) {
385     PetscInt  i,nvtxs,*xadj,*adjncy;
386     PetscBool done;
387 
388     ierr = MatGetRowIJ(local_mat,0,PETSC_FALSE,PETSC_FALSE,&nvtxs,(const PetscInt**)&xadj,(const PetscInt**)&adjncy,&done);CHKERRQ(ierr);
389     if (!done) {
390       SETERRQ1(PetscObjectComm((PetscObject)local_mat),PETSC_ERR_PLIB,"Error in MatRestoreRowIJ called in %s\n",__FUNCT__);
391     }
392     ierr = MatSeqAIJGetArray(local_mat,&array);CHKERRQ(ierr);
393     for (i=0;i<nvtxs;i++) {
394       ierr = MatSetValuesLocal(*M,1,&i,xadj[i+1]-xadj[i],adjncy+xadj[i],array+xadj[i],ADD_VALUES);CHKERRQ(ierr);
395     }
396     ierr = MatRestoreRowIJ(local_mat,0,PETSC_FALSE,PETSC_FALSE,&nvtxs,(const PetscInt**)&xadj,(const PetscInt**)&adjncy,&done);CHKERRQ(ierr);
397     if (!done) {
398       SETERRQ1(PetscObjectComm((PetscObject)local_mat),PETSC_ERR_PLIB,"Error in MatRestoreRowIJ called in %s\n",__FUNCT__);
399     }
400     ierr = MatSeqAIJRestoreArray(local_mat,&array);CHKERRQ(ierr);
401   } else { /* very basic values insertion for all other matrix types */
402     PetscInt i;
403 
404     for (i=0;i<local_rows;i++) {
405       PetscInt       j;
406       const PetscInt *local_indices_cols;
407 
408       ierr = MatGetRow(local_mat,i,&j,&local_indices_cols,(const PetscScalar**)&array);CHKERRQ(ierr);
409       ierr = MatSetValuesLocal(*M,1,&i,j,local_indices_cols,array,ADD_VALUES);CHKERRQ(ierr);
410       ierr = MatRestoreRow(local_mat,i,&j,&local_indices_cols,(const PetscScalar**)&array);CHKERRQ(ierr);
411     }
412   }
413   ierr = MatAssemblyBegin(*M,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
414   ierr = MatDestroy(&local_mat);CHKERRQ(ierr);
415   ierr = MatAssemblyEnd(*M,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
416   if (isdense) {
417     ierr = MatSetOption(*M,MAT_ROW_ORIENTED,PETSC_TRUE);CHKERRQ(ierr);
418   }
419   PetscFunctionReturn(0);
420 }
421 
422 #undef __FUNCT__
423 #define __FUNCT__ "MatISGetMPIXAIJ"
424 /*@
425     MatISGetMPIXAIJ - Converts MATIS matrix into a parallel AIJ format
426 
427   Input Parameter:
428 .  mat - the matrix (should be of type MATIS)
429 .  reuse - either MAT_INITIAL_MATRIX or MAT_REUSE_MATRIX
430 
431   Output Parameter:
432 .  newmat - the matrix in AIJ format
433 
434   Level: developer
435 
436   Notes: mat and *newmat cannot be the same object when MAT_REUSE_MATRIX is requested.
437 
438 .seealso: MATIS
439 @*/
440 PetscErrorCode MatISGetMPIXAIJ(Mat mat, MatReuse reuse, Mat *newmat)
441 {
442   PetscErrorCode ierr;
443 
444   PetscFunctionBegin;
445   PetscValidHeaderSpecific(mat,MAT_CLASSID,1);
446   PetscValidLogicalCollectiveEnum(mat,reuse,2);
447   PetscValidPointer(newmat,3);
448   if (reuse != MAT_INITIAL_MATRIX) {
449     PetscValidHeaderSpecific(*newmat,MAT_CLASSID,3);
450     PetscCheckSameComm(mat,1,*newmat,3);
451     if (mat == *newmat) {
452       SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Cannot reuse the same matrix");
453     }
454   }
455   ierr = PetscUseMethod(mat,"MatISGetMPIXAIJ_C",(Mat,MatReuse,Mat*),(mat,reuse,newmat));CHKERRQ(ierr);
456   PetscFunctionReturn(0);
457 }
458 
459 #undef __FUNCT__
460 #define __FUNCT__ "MatDuplicate_IS"
461 PetscErrorCode MatDuplicate_IS(Mat mat,MatDuplicateOption op,Mat *newmat)
462 {
463   PetscErrorCode ierr;
464   Mat_IS         *matis = (Mat_IS*)(mat->data);
465   PetscInt       bs,m,n,M,N;
466   Mat            B,localmat;
467 
468   PetscFunctionBegin;
469   ierr = MatGetBlockSize(mat,&bs);CHKERRQ(ierr);
470   ierr = MatGetSize(mat,&M,&N);CHKERRQ(ierr);
471   ierr = MatGetLocalSize(mat,&m,&n);CHKERRQ(ierr);
472   ierr = MatCreateIS(PetscObjectComm((PetscObject)mat),bs,m,n,M,N,mat->rmap->mapping,mat->cmap->mapping,&B);CHKERRQ(ierr);
473   ierr = MatDuplicate(matis->A,op,&localmat);CHKERRQ(ierr);
474   ierr = MatISSetLocalMat(B,localmat);CHKERRQ(ierr);
475   ierr = MatDestroy(&localmat);CHKERRQ(ierr);
476   ierr = MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
477   ierr = MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
478   *newmat = B;
479   PetscFunctionReturn(0);
480 }
481 
482 #undef __FUNCT__
483 #define __FUNCT__ "MatIsHermitian_IS"
484 PetscErrorCode MatIsHermitian_IS(Mat A,PetscReal tol,PetscBool  *flg)
485 {
486   PetscErrorCode ierr;
487   Mat_IS         *matis = (Mat_IS*)A->data;
488   PetscBool      local_sym;
489 
490   PetscFunctionBegin;
491   ierr = MatIsHermitian(matis->A,tol,&local_sym);CHKERRQ(ierr);
492   ierr = MPI_Allreduce(&local_sym,flg,1,MPIU_BOOL,MPI_LAND,PetscObjectComm((PetscObject)A));CHKERRQ(ierr);
493   PetscFunctionReturn(0);
494 }
495 
496 #undef __FUNCT__
497 #define __FUNCT__ "MatIsSymmetric_IS"
498 PetscErrorCode MatIsSymmetric_IS(Mat A,PetscReal tol,PetscBool  *flg)
499 {
500   PetscErrorCode ierr;
501   Mat_IS         *matis = (Mat_IS*)A->data;
502   PetscBool      local_sym;
503 
504   PetscFunctionBegin;
505   ierr = MatIsSymmetric(matis->A,tol,&local_sym);CHKERRQ(ierr);
506   ierr = MPI_Allreduce(&local_sym,flg,1,MPIU_BOOL,MPI_LAND,PetscObjectComm((PetscObject)A));CHKERRQ(ierr);
507   PetscFunctionReturn(0);
508 }
509 
510 #undef __FUNCT__
511 #define __FUNCT__ "MatDestroy_IS"
512 PetscErrorCode MatDestroy_IS(Mat A)
513 {
514   PetscErrorCode ierr;
515   Mat_IS         *b = (Mat_IS*)A->data;
516 
517   PetscFunctionBegin;
518   ierr = MatDestroy(&b->A);CHKERRQ(ierr);
519   ierr = VecScatterDestroy(&b->cctx);CHKERRQ(ierr);
520   ierr = VecScatterDestroy(&b->rctx);CHKERRQ(ierr);
521   ierr = VecDestroy(&b->x);CHKERRQ(ierr);
522   ierr = VecDestroy(&b->y);CHKERRQ(ierr);
523   ierr = PetscSFDestroy(&b->sf);CHKERRQ(ierr);
524   ierr = PetscFree2(b->sf_rootdata,b->sf_leafdata);CHKERRQ(ierr);
525   ierr = PetscFree(A->data);CHKERRQ(ierr);
526   ierr = PetscObjectChangeTypeName((PetscObject)A,0);CHKERRQ(ierr);
527   ierr = PetscObjectComposeFunction((PetscObject)A,"MatISGetLocalMat_C",NULL);CHKERRQ(ierr);
528   ierr = PetscObjectComposeFunction((PetscObject)A,"MatISSetLocalMat_C",NULL);CHKERRQ(ierr);
529   ierr = PetscObjectComposeFunction((PetscObject)A,"MatISGetMPIXAIJ_C",NULL);CHKERRQ(ierr);
530   ierr = PetscObjectComposeFunction((PetscObject)A,"MatISSetPreallocation_C",NULL);CHKERRQ(ierr);
531   PetscFunctionReturn(0);
532 }
533 
534 #undef __FUNCT__
535 #define __FUNCT__ "MatMult_IS"
536 PetscErrorCode MatMult_IS(Mat A,Vec x,Vec y)
537 {
538   PetscErrorCode ierr;
539   Mat_IS         *is  = (Mat_IS*)A->data;
540   PetscScalar    zero = 0.0;
541 
542   PetscFunctionBegin;
543   /*  scatter the global vector x into the local work vector */
544   ierr = VecScatterBegin(is->cctx,x,is->x,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
545   ierr = VecScatterEnd(is->cctx,x,is->x,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
546 
547   /* multiply the local matrix */
548   ierr = MatMult(is->A,is->x,is->y);CHKERRQ(ierr);
549 
550   /* scatter product back into global memory */
551   ierr = VecSet(y,zero);CHKERRQ(ierr);
552   ierr = VecScatterBegin(is->rctx,is->y,y,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
553   ierr = VecScatterEnd(is->rctx,is->y,y,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
554   PetscFunctionReturn(0);
555 }
556 
557 #undef __FUNCT__
558 #define __FUNCT__ "MatMultAdd_IS"
559 PetscErrorCode MatMultAdd_IS(Mat A,Vec v1,Vec v2,Vec v3)
560 {
561   Vec            temp_vec;
562   PetscErrorCode ierr;
563 
564   PetscFunctionBegin; /*  v3 = v2 + A * v1.*/
565   if (v3 != v2) {
566     ierr = MatMult(A,v1,v3);CHKERRQ(ierr);
567     ierr = VecAXPY(v3,1.0,v2);CHKERRQ(ierr);
568   } else {
569     ierr = VecDuplicate(v2,&temp_vec);CHKERRQ(ierr);
570     ierr = MatMult(A,v1,temp_vec);CHKERRQ(ierr);
571     ierr = VecAXPY(temp_vec,1.0,v2);CHKERRQ(ierr);
572     ierr = VecCopy(temp_vec,v3);CHKERRQ(ierr);
573     ierr = VecDestroy(&temp_vec);CHKERRQ(ierr);
574   }
575   PetscFunctionReturn(0);
576 }
577 
578 #undef __FUNCT__
579 #define __FUNCT__ "MatMultTranspose_IS"
580 PetscErrorCode MatMultTranspose_IS(Mat A,Vec y,Vec x)
581 {
582   Mat_IS         *is = (Mat_IS*)A->data;
583   PetscErrorCode ierr;
584 
585   PetscFunctionBegin;
586   /*  scatter the global vector x into the local work vector */
587   ierr = VecScatterBegin(is->rctx,y,is->y,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
588   ierr = VecScatterEnd(is->rctx,y,is->y,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
589 
590   /* multiply the local matrix */
591   ierr = MatMultTranspose(is->A,is->y,is->x);CHKERRQ(ierr);
592 
593   /* scatter product back into global vector */
594   ierr = VecSet(x,0);CHKERRQ(ierr);
595   ierr = VecScatterBegin(is->cctx,is->x,x,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
596   ierr = VecScatterEnd(is->cctx,is->x,x,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
597   PetscFunctionReturn(0);
598 }
599 
600 #undef __FUNCT__
601 #define __FUNCT__ "MatMultTransposeAdd_IS"
602 PetscErrorCode MatMultTransposeAdd_IS(Mat A,Vec v1,Vec v2,Vec v3)
603 {
604   Vec            temp_vec;
605   PetscErrorCode ierr;
606 
607   PetscFunctionBegin; /*  v3 = v2 + A' * v1.*/
608   if (v3 != v2) {
609     ierr = MatMultTranspose(A,v1,v3);CHKERRQ(ierr);
610     ierr = VecAXPY(v3,1.0,v2);CHKERRQ(ierr);
611   } else {
612     ierr = VecDuplicate(v2,&temp_vec);CHKERRQ(ierr);
613     ierr = MatMultTranspose(A,v1,temp_vec);CHKERRQ(ierr);
614     ierr = VecAXPY(temp_vec,1.0,v2);CHKERRQ(ierr);
615     ierr = VecCopy(temp_vec,v3);CHKERRQ(ierr);
616     ierr = VecDestroy(&temp_vec);CHKERRQ(ierr);
617   }
618   PetscFunctionReturn(0);
619 }
620 
621 #undef __FUNCT__
622 #define __FUNCT__ "MatView_IS"
623 PetscErrorCode MatView_IS(Mat A,PetscViewer viewer)
624 {
625   Mat_IS         *a = (Mat_IS*)A->data;
626   PetscErrorCode ierr;
627   PetscViewer    sviewer;
628 
629   PetscFunctionBegin;
630   ierr = PetscViewerASCIISynchronizedAllow(viewer,PETSC_TRUE);CHKERRQ(ierr);
631   ierr = PetscViewerGetSingleton(viewer,&sviewer);CHKERRQ(ierr);
632   ierr = MatView(a->A,sviewer);CHKERRQ(ierr);
633   ierr = PetscViewerRestoreSingleton(viewer,&sviewer);CHKERRQ(ierr);
634   PetscFunctionReturn(0);
635 }
636 
637 #undef __FUNCT__
638 #define __FUNCT__ "MatSetLocalToGlobalMapping_IS"
639 PetscErrorCode MatSetLocalToGlobalMapping_IS(Mat A,ISLocalToGlobalMapping rmapping,ISLocalToGlobalMapping cmapping)
640 {
641   PetscErrorCode ierr;
642   PetscInt       nr,rbs,nc,cbs;
643   Mat_IS         *is = (Mat_IS*)A->data;
644   IS             from,to;
645   Vec            cglobal,rglobal;
646 
647   PetscFunctionBegin;
648   PetscCheckSameComm(A,1,rmapping,2);
649   PetscCheckSameComm(A,1,cmapping,3);
650   /* Destroy any previous data */
651   ierr = VecDestroy(&is->x);CHKERRQ(ierr);
652   ierr = VecDestroy(&is->y);CHKERRQ(ierr);
653   ierr = VecScatterDestroy(&is->rctx);CHKERRQ(ierr);
654   ierr = VecScatterDestroy(&is->cctx);CHKERRQ(ierr);
655   ierr = MatDestroy(&is->A);CHKERRQ(ierr);
656   ierr = PetscSFDestroy(&is->sf);CHKERRQ(ierr);
657   ierr = PetscFree2(is->sf_rootdata,is->sf_leafdata);CHKERRQ(ierr);
658 
659   /* Setup Layout and set local to global maps */
660   ierr = PetscLayoutSetUp(A->rmap);CHKERRQ(ierr);
661   ierr = PetscLayoutSetUp(A->cmap);CHKERRQ(ierr);
662   ierr = PetscLayoutSetISLocalToGlobalMapping(A->rmap,rmapping);CHKERRQ(ierr);
663   ierr = PetscLayoutSetISLocalToGlobalMapping(A->cmap,cmapping);CHKERRQ(ierr);
664 
665   /* Create the local matrix A */
666   ierr = ISLocalToGlobalMappingGetSize(rmapping,&nr);CHKERRQ(ierr);
667   ierr = ISLocalToGlobalMappingGetBlockSize(rmapping,&rbs);CHKERRQ(ierr);
668   ierr = ISLocalToGlobalMappingGetSize(cmapping,&nc);CHKERRQ(ierr);
669   ierr = ISLocalToGlobalMappingGetBlockSize(cmapping,&cbs);CHKERRQ(ierr);
670   ierr = MatCreate(PETSC_COMM_SELF,&is->A);CHKERRQ(ierr);
671   ierr = MatSetType(is->A,MATAIJ);CHKERRQ(ierr);
672   ierr = MatSetSizes(is->A,nr,nc,nr,nc);CHKERRQ(ierr);
673   ierr = MatSetBlockSizes(is->A,rbs,cbs);CHKERRQ(ierr);
674   ierr = MatSetOptionsPrefix(is->A,((PetscObject)A)->prefix);CHKERRQ(ierr);
675   ierr = MatAppendOptionsPrefix(is->A,"is_");CHKERRQ(ierr);
676   ierr = MatSetFromOptions(is->A);CHKERRQ(ierr);
677 
678   /* Create the local work vectors */
679   ierr = MatCreateVecs(is->A,&is->x,&is->y);CHKERRQ(ierr);
680 
681   /* setup the global to local scatters */
682   ierr = MatCreateVecs(A,&cglobal,&rglobal);CHKERRQ(ierr);
683   ierr = ISCreateStride(PETSC_COMM_SELF,nr,0,1,&to);CHKERRQ(ierr);
684   ierr = ISLocalToGlobalMappingApplyIS(rmapping,to,&from);CHKERRQ(ierr);
685   ierr = VecScatterCreate(rglobal,from,is->y,to,&is->rctx);CHKERRQ(ierr);
686   if (rmapping != cmapping) {
687     ierr = ISDestroy(&to);CHKERRQ(ierr);
688     ierr = ISDestroy(&from);CHKERRQ(ierr);
689     ierr = ISCreateStride(PETSC_COMM_SELF,nc,0,1,&to);CHKERRQ(ierr);
690     ierr = ISLocalToGlobalMappingApplyIS(cmapping,to,&from);CHKERRQ(ierr);
691     ierr = VecScatterCreate(cglobal,from,is->x,to,&is->cctx);CHKERRQ(ierr);
692   } else {
693     ierr = PetscObjectReference((PetscObject)is->rctx);CHKERRQ(ierr);
694     is->cctx = is->rctx;
695   }
696   ierr = VecDestroy(&rglobal);CHKERRQ(ierr);
697   ierr = VecDestroy(&cglobal);CHKERRQ(ierr);
698   ierr = ISDestroy(&to);CHKERRQ(ierr);
699   ierr = ISDestroy(&from);CHKERRQ(ierr);
700   PetscFunctionReturn(0);
701 }
702 
703 #undef __FUNCT__
704 #define __FUNCT__ "MatSetValues_IS"
705 PetscErrorCode MatSetValues_IS(Mat mat, PetscInt m,const PetscInt *rows, PetscInt n,const PetscInt *cols, const PetscScalar *values, InsertMode addv)
706 {
707   Mat_IS         *is = (Mat_IS*)mat->data;
708   PetscInt       rows_l[2048],cols_l[2048];
709   PetscErrorCode ierr;
710 
711   PetscFunctionBegin;
712 #if defined(PETSC_USE_DEBUG)
713   if (m > 2048 || n > 2048) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_SUP,"Number of row/column indices must be <= 2048: they are %D %D",m,n);
714 #endif
715   ierr = ISG2LMapApply(mat->rmap->mapping,m,rows,rows_l);CHKERRQ(ierr);
716   ierr = ISG2LMapApply(mat->cmap->mapping,n,cols,cols_l);CHKERRQ(ierr);
717   ierr = MatSetValues(is->A,m,rows_l,n,cols_l,values,addv);CHKERRQ(ierr);
718   PetscFunctionReturn(0);
719 }
720 
721 #undef __FUNCT__
722 #define __FUNCT__ "MatSetValuesLocal_IS"
723 PetscErrorCode MatSetValuesLocal_IS(Mat A,PetscInt m,const PetscInt *rows, PetscInt n,const PetscInt *cols,const PetscScalar *values,InsertMode addv)
724 {
725   PetscErrorCode ierr;
726   Mat_IS         *is = (Mat_IS*)A->data;
727 
728   PetscFunctionBegin;
729   ierr = MatSetValues(is->A,m,rows,n,cols,values,addv);CHKERRQ(ierr);
730   PetscFunctionReturn(0);
731 }
732 
733 #undef __FUNCT__
734 #define __FUNCT__ "MatSetValuesBlockedLocal_IS"
735 PetscErrorCode MatSetValuesBlockedLocal_IS(Mat A,PetscInt m,const PetscInt *rows, PetscInt n,const PetscInt *cols,const PetscScalar *values,InsertMode addv)
736 {
737   PetscErrorCode ierr;
738   Mat_IS         *is = (Mat_IS*)A->data;
739 
740   PetscFunctionBegin;
741   ierr = MatSetValuesBlocked(is->A,m,rows,n,cols,values,addv);CHKERRQ(ierr);
742   PetscFunctionReturn(0);
743 }
744 
745 #undef __FUNCT__
746 #define __FUNCT__ "MatZeroRows_IS"
747 PetscErrorCode MatZeroRows_IS(Mat A,PetscInt n,const PetscInt rows[],PetscScalar diag,Vec x,Vec b)
748 {
749   PetscInt       n_l = 0, *rows_l = NULL;
750   PetscErrorCode ierr;
751 
752   PetscFunctionBegin;
753   if (x && b) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"No support");
754   if (n) {
755     ierr = PetscMalloc1(n,&rows_l);CHKERRQ(ierr);
756     ierr = ISGlobalToLocalMappingApply(A->rmap->mapping,IS_GTOLM_DROP,n,rows,&n_l,rows_l);CHKERRQ(ierr);
757   }
758   ierr = MatZeroRowsLocal(A,n_l,rows_l,diag,x,b);CHKERRQ(ierr);
759   ierr = PetscFree(rows_l);CHKERRQ(ierr);
760   PetscFunctionReturn(0);
761 }
762 
763 #undef __FUNCT__
764 #define __FUNCT__ "MatZeroRowsLocal_IS"
765 PetscErrorCode MatZeroRowsLocal_IS(Mat A,PetscInt n,const PetscInt rows[],PetscScalar diag,Vec x,Vec b)
766 {
767   Mat_IS         *is = (Mat_IS*)A->data;
768   PetscErrorCode ierr;
769   PetscInt       i;
770   PetscScalar    *array;
771 
772   PetscFunctionBegin;
773   if (x && b) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"No support");
774   {
775     /*
776        Set up is->x as a "counting vector". This is in order to MatMult_IS
777        work properly in the interface nodes.
778     */
779     Vec counter;
780     ierr = MatCreateVecs(A,NULL,&counter);CHKERRQ(ierr);
781     ierr = VecSet(counter,0.);CHKERRQ(ierr);
782     ierr = VecSet(is->y,1.);CHKERRQ(ierr);
783     ierr = VecScatterBegin(is->rctx,is->y,counter,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
784     ierr = VecScatterEnd(is->rctx,is->y,counter,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
785     ierr = VecScatterBegin(is->rctx,counter,is->y,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
786     ierr = VecScatterEnd(is->rctx,counter,is->y,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
787     ierr = VecDestroy(&counter);CHKERRQ(ierr);
788   }
789   if (!n) {
790     is->pure_neumann = PETSC_TRUE;
791   } else {
792     is->pure_neumann = PETSC_FALSE;
793 
794     ierr = VecGetArray(is->y,&array);CHKERRQ(ierr);
795     ierr = MatZeroRows(is->A,n,rows,diag,0,0);CHKERRQ(ierr);
796     for (i=0; i<n; i++) {
797       ierr = MatSetValue(is->A,rows[i],rows[i],diag/(array[rows[i]]),INSERT_VALUES);CHKERRQ(ierr);
798     }
799     ierr = MatAssemblyBegin(is->A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
800     ierr = MatAssemblyEnd(is->A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
801     ierr = VecRestoreArray(is->y,&array);CHKERRQ(ierr);
802   }
803   PetscFunctionReturn(0);
804 }
805 
806 #undef __FUNCT__
807 #define __FUNCT__ "MatAssemblyBegin_IS"
808 PetscErrorCode MatAssemblyBegin_IS(Mat A,MatAssemblyType type)
809 {
810   Mat_IS         *is = (Mat_IS*)A->data;
811   PetscErrorCode ierr;
812 
813   PetscFunctionBegin;
814   ierr = MatAssemblyBegin(is->A,type);CHKERRQ(ierr);
815   PetscFunctionReturn(0);
816 }
817 
818 #undef __FUNCT__
819 #define __FUNCT__ "MatAssemblyEnd_IS"
820 PetscErrorCode MatAssemblyEnd_IS(Mat A,MatAssemblyType type)
821 {
822   Mat_IS         *is = (Mat_IS*)A->data;
823   PetscErrorCode ierr;
824 
825   PetscFunctionBegin;
826   ierr = MatAssemblyEnd(is->A,type);CHKERRQ(ierr);
827   PetscFunctionReturn(0);
828 }
829 
830 #undef __FUNCT__
831 #define __FUNCT__ "MatISGetLocalMat_IS"
832 PetscErrorCode MatISGetLocalMat_IS(Mat mat,Mat *local)
833 {
834   Mat_IS *is = (Mat_IS*)mat->data;
835 
836   PetscFunctionBegin;
837   *local = is->A;
838   PetscFunctionReturn(0);
839 }
840 
841 #undef __FUNCT__
842 #define __FUNCT__ "MatISGetLocalMat"
843 /*@
844     MatISGetLocalMat - Gets the local matrix stored inside a MATIS matrix.
845 
846   Input Parameter:
847 .  mat - the matrix
848 
849   Output Parameter:
850 .  local - the local matrix
851 
852   Level: advanced
853 
854   Notes:
855     This can be called if you have precomputed the nonzero structure of the
856   matrix and want to provide it to the inner matrix object to improve the performance
857   of the MatSetValues() operation.
858 
859 .seealso: MATIS
860 @*/
861 PetscErrorCode MatISGetLocalMat(Mat mat,Mat *local)
862 {
863   PetscErrorCode ierr;
864 
865   PetscFunctionBegin;
866   PetscValidHeaderSpecific(mat,MAT_CLASSID,1);
867   PetscValidPointer(local,2);
868   ierr = PetscUseMethod(mat,"MatISGetLocalMat_C",(Mat,Mat*),(mat,local));CHKERRQ(ierr);
869   PetscFunctionReturn(0);
870 }
871 
872 #undef __FUNCT__
873 #define __FUNCT__ "MatISSetLocalMat_IS"
874 PetscErrorCode MatISSetLocalMat_IS(Mat mat,Mat local)
875 {
876   Mat_IS         *is = (Mat_IS*)mat->data;
877   PetscInt       nrows,ncols,orows,ocols;
878   PetscErrorCode ierr;
879 
880   PetscFunctionBegin;
881   if (is->A) {
882     ierr = MatGetSize(is->A,&orows,&ocols);CHKERRQ(ierr);
883     ierr = MatGetSize(local,&nrows,&ncols);CHKERRQ(ierr);
884     if (orows != nrows || ocols != ncols) SETERRQ4(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Local MATIS matrix should be of size %dx%d (you passed a %dx%d matrix)\n",orows,ocols,nrows,ncols);
885   }
886   ierr  = PetscObjectReference((PetscObject)local);CHKERRQ(ierr);
887   ierr  = MatDestroy(&is->A);CHKERRQ(ierr);
888   is->A = local;
889   PetscFunctionReturn(0);
890 }
891 
892 #undef __FUNCT__
893 #define __FUNCT__ "MatISSetLocalMat"
894 /*@
895     MatISSetLocalMat - Replace the local matrix stored inside a MATIS object.
896 
897   Input Parameter:
898 .  mat - the matrix
899 .  local - the local matrix
900 
901   Output Parameter:
902 
903   Level: advanced
904 
905   Notes:
906     This can be called if you have precomputed the local matrix and
907   want to provide it to the matrix object MATIS.
908 
909 .seealso: MATIS
910 @*/
911 PetscErrorCode MatISSetLocalMat(Mat mat,Mat local)
912 {
913   PetscErrorCode ierr;
914 
915   PetscFunctionBegin;
916   PetscValidHeaderSpecific(mat,MAT_CLASSID,1);
917   PetscValidHeaderSpecific(local,MAT_CLASSID,2);
918   ierr = PetscUseMethod(mat,"MatISSetLocalMat_C",(Mat,Mat),(mat,local));CHKERRQ(ierr);
919   PetscFunctionReturn(0);
920 }
921 
922 #undef __FUNCT__
923 #define __FUNCT__ "MatZeroEntries_IS"
924 PetscErrorCode MatZeroEntries_IS(Mat A)
925 {
926   Mat_IS         *a = (Mat_IS*)A->data;
927   PetscErrorCode ierr;
928 
929   PetscFunctionBegin;
930   ierr = MatZeroEntries(a->A);CHKERRQ(ierr);
931   PetscFunctionReturn(0);
932 }
933 
934 #undef __FUNCT__
935 #define __FUNCT__ "MatScale_IS"
936 PetscErrorCode MatScale_IS(Mat A,PetscScalar a)
937 {
938   Mat_IS         *is = (Mat_IS*)A->data;
939   PetscErrorCode ierr;
940 
941   PetscFunctionBegin;
942   ierr = MatScale(is->A,a);CHKERRQ(ierr);
943   PetscFunctionReturn(0);
944 }
945 
946 #undef __FUNCT__
947 #define __FUNCT__ "MatGetDiagonal_IS"
948 PetscErrorCode MatGetDiagonal_IS(Mat A, Vec v)
949 {
950   Mat_IS         *is = (Mat_IS*)A->data;
951   PetscErrorCode ierr;
952 
953   PetscFunctionBegin;
954   /* get diagonal of the local matrix */
955   ierr = MatGetDiagonal(is->A,is->y);CHKERRQ(ierr);
956 
957   /* scatter diagonal back into global vector */
958   ierr = VecSet(v,0);CHKERRQ(ierr);
959   ierr = VecScatterBegin(is->rctx,is->y,v,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
960   ierr = VecScatterEnd(is->rctx,is->y,v,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
961   PetscFunctionReturn(0);
962 }
963 
964 #undef __FUNCT__
965 #define __FUNCT__ "MatSetOption_IS"
966 PetscErrorCode MatSetOption_IS(Mat A,MatOption op,PetscBool flg)
967 {
968   Mat_IS         *a = (Mat_IS*)A->data;
969   PetscErrorCode ierr;
970 
971   PetscFunctionBegin;
972   ierr = MatSetOption(a->A,op,flg);CHKERRQ(ierr);
973   PetscFunctionReturn(0);
974 }
975 
976 #undef __FUNCT__
977 #define __FUNCT__ "MatCreateIS"
978 /*@
979     MatCreateIS - Creates a "process" unassmembled matrix, it is assembled on each
980        process but not across processes.
981 
982    Input Parameters:
983 +     comm    - MPI communicator that will share the matrix
984 .     bs      - block size of the matrix
985 .     m,n,M,N - local and/or global sizes of the left and right vector used in matrix vector products
986 .     rmap    - local to global map for rows
987 -     cmap    - local to global map for cols
988 
989    Output Parameter:
990 .    A - the resulting matrix
991 
992    Level: advanced
993 
994    Notes: See MATIS for more details
995           m and n are NOT related to the size of the map, they are the size of the part of the vector owned
996           by that process. The sizes of rmap and cmap define the size of the local matrices.
997           If either rmap or cmap are NULL, than the matrix is assumed to be square
998 
999 .seealso: MATIS, MatSetLocalToGlobalMapping()
1000 @*/
1001 PetscErrorCode  MatCreateIS(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,ISLocalToGlobalMapping rmap,ISLocalToGlobalMapping cmap,Mat *A)
1002 {
1003   PetscErrorCode ierr;
1004 
1005   PetscFunctionBegin;
1006   if (!rmap && !cmap) SETERRQ(comm,PETSC_ERR_USER,"You need to provide at least one of the mapping");
1007   ierr = MatCreate(comm,A);CHKERRQ(ierr);
1008   ierr = MatSetBlockSize(*A,bs);CHKERRQ(ierr);
1009   ierr = MatSetSizes(*A,m,n,M,N);CHKERRQ(ierr);
1010   ierr = MatSetType(*A,MATIS);CHKERRQ(ierr);
1011   ierr = MatSetUp(*A);CHKERRQ(ierr);
1012   if (rmap && cmap) {
1013     ierr = MatSetLocalToGlobalMapping(*A,rmap,cmap);CHKERRQ(ierr);
1014   } else if (!rmap) {
1015     ierr = MatSetLocalToGlobalMapping(*A,cmap,cmap);CHKERRQ(ierr);
1016   } else {
1017     ierr = MatSetLocalToGlobalMapping(*A,rmap,rmap);CHKERRQ(ierr);
1018   }
1019   PetscFunctionReturn(0);
1020 }
1021 
1022 /*MC
1023    MATIS - MATIS = "is" - A matrix type to be used for using the non-overlapping domain decomposition type preconditioners (e.g. PCBDDC).
1024    This stores the matrices in globally unassembled form. Each processor
1025    assembles only its local Neumann problem and the parallel matrix vector
1026    product is handled "implicitly".
1027 
1028    Operations Provided:
1029 +  MatMult()
1030 .  MatMultAdd()
1031 .  MatMultTranspose()
1032 .  MatMultTransposeAdd()
1033 .  MatZeroEntries()
1034 .  MatSetOption()
1035 .  MatZeroRows()
1036 .  MatZeroRowsLocal()
1037 .  MatSetValues()
1038 .  MatSetValuesLocal()
1039 .  MatScale()
1040 .  MatGetDiagonal()
1041 -  MatSetLocalToGlobalMapping()
1042 
1043    Options Database Keys:
1044 . -mat_type is - sets the matrix type to "is" during a call to MatSetFromOptions()
1045 
1046    Notes: Options prefix for the inner matrix are given by -is_mat_xxx
1047 
1048           You must call MatSetLocalToGlobalMapping() before using this matrix type.
1049 
1050           You can do matrix preallocation on the local matrix after you obtain it with
1051           MatISGetLocalMat(); otherwise, you could use MatISSetPreallocation()
1052 
1053   Level: advanced
1054 
1055 .seealso: Mat, MatISGetLocalMat(), MatSetLocalToGlobalMapping(), MatISSetPreallocation(), PCBDDC
1056 
1057 M*/
1058 
1059 #undef __FUNCT__
1060 #define __FUNCT__ "MatCreate_IS"
1061 PETSC_EXTERN PetscErrorCode MatCreate_IS(Mat A)
1062 {
1063   PetscErrorCode ierr;
1064   Mat_IS         *b;
1065 
1066   PetscFunctionBegin;
1067   ierr    = PetscNewLog(A,&b);CHKERRQ(ierr);
1068   A->data = (void*)b;
1069 
1070   /* matrix ops */
1071   ierr    = PetscMemzero(A->ops,sizeof(struct _MatOps));CHKERRQ(ierr);
1072   A->ops->mult                    = MatMult_IS;
1073   A->ops->multadd                 = MatMultAdd_IS;
1074   A->ops->multtranspose           = MatMultTranspose_IS;
1075   A->ops->multtransposeadd        = MatMultTransposeAdd_IS;
1076   A->ops->destroy                 = MatDestroy_IS;
1077   A->ops->setlocaltoglobalmapping = MatSetLocalToGlobalMapping_IS;
1078   A->ops->setvalues               = MatSetValues_IS;
1079   A->ops->setvalueslocal          = MatSetValuesLocal_IS;
1080   A->ops->setvaluesblockedlocal   = MatSetValuesBlockedLocal_IS;
1081   A->ops->zerorows                = MatZeroRows_IS;
1082   A->ops->zerorowslocal           = MatZeroRowsLocal_IS;
1083   A->ops->assemblybegin           = MatAssemblyBegin_IS;
1084   A->ops->assemblyend             = MatAssemblyEnd_IS;
1085   A->ops->view                    = MatView_IS;
1086   A->ops->zeroentries             = MatZeroEntries_IS;
1087   A->ops->scale                   = MatScale_IS;
1088   A->ops->getdiagonal             = MatGetDiagonal_IS;
1089   A->ops->setoption               = MatSetOption_IS;
1090   A->ops->ishermitian             = MatIsHermitian_IS;
1091   A->ops->issymmetric             = MatIsSymmetric_IS;
1092   A->ops->duplicate               = MatDuplicate_IS;
1093 
1094   /* special MATIS functions */
1095   ierr = PetscObjectComposeFunction((PetscObject)A,"MatISGetLocalMat_C",MatISGetLocalMat_IS);CHKERRQ(ierr);
1096   ierr = PetscObjectComposeFunction((PetscObject)A,"MatISSetLocalMat_C",MatISSetLocalMat_IS);CHKERRQ(ierr);
1097   ierr = PetscObjectComposeFunction((PetscObject)A,"MatISGetMPIXAIJ_C",MatISGetMPIXAIJ_IS);CHKERRQ(ierr);
1098   ierr = PetscObjectComposeFunction((PetscObject)A,"MatISSetPreallocation_C",MatISSetPreallocation_IS);CHKERRQ(ierr);
1099   ierr = PetscObjectChangeTypeName((PetscObject)A,MATIS);CHKERRQ(ierr);
1100   PetscFunctionReturn(0);
1101 }
1102