xref: /petsc/src/mat/impls/is/matis.c (revision dced61a5cfeeeda68dfa4ee3e53b34d3cfb8da9f)
1 
2 /*
3     Creates a matrix class for using the Neumann-Neumann type preconditioners.
4    This stores the matrices in globally unassembled form. Each processor
5    assembles only its local Neumann problem and the parallel matrix vector
6    product is handled "implicitly".
7 
8      We provide:
9          MatMult()
10 
11     Currently this allows for only one subdomain per processor.
12 
13 */
14 
15 #include <../src/mat/impls/is/matis.h>      /*I "petscmat.h" I*/
16 
17 static PetscErrorCode MatISComputeSF_Private(Mat);
18 
19 #undef __FUNCT__
20 #define __FUNCT__ "MatISComputeSF_Private"
21 static PetscErrorCode MatISComputeSF_Private(Mat B)
22 {
23   Mat_IS         *matis = (Mat_IS*)(B->data);
24   const PetscInt *gidxs;
25   PetscErrorCode ierr;
26 
27   PetscFunctionBegin;
28   ierr = MatGetSize(matis->A,&matis->sf_nleaves,NULL);CHKERRQ(ierr);
29   ierr = MatGetLocalSize(B,&matis->sf_nroots,NULL);CHKERRQ(ierr);
30   ierr = PetscSFCreate(PetscObjectComm((PetscObject)B),&matis->sf);CHKERRQ(ierr);
31   ierr = ISLocalToGlobalMappingGetIndices(B->rmap->mapping,&gidxs);CHKERRQ(ierr);
32   /* PETSC_OWN_POINTER refers to ilocal which is NULL */
33   ierr = PetscSFSetGraphLayout(matis->sf,B->rmap,matis->sf_nleaves,NULL,PETSC_OWN_POINTER,gidxs);CHKERRQ(ierr);
34   ierr = ISLocalToGlobalMappingRestoreIndices(B->rmap->mapping,&gidxs);CHKERRQ(ierr);
35   ierr = PetscMalloc2(matis->sf_nroots,&matis->sf_rootdata,matis->sf_nleaves,&matis->sf_leafdata);CHKERRQ(ierr);
36   PetscFunctionReturn(0);
37 }
38 
39 #undef __FUNCT__
40 #define __FUNCT__ "MatISSetPreallocation"
41 /*@
42    MatISSetPreallocation - Preallocates memory for a MATIS parallel matrix.
43 
44    Collective on MPI_Comm
45 
46    Input Parameters:
47 +  B - the matrix
48 .  d_nz  - number of nonzeros per row in DIAGONAL portion of local submatrix
49            (same value is used for all local rows)
50 .  d_nnz - array containing the number of nonzeros in the various rows of the
51            DIAGONAL portion of the local submatrix (possibly different for each row)
52            or NULL, if d_nz is used to specify the nonzero structure.
53            The size of this array is equal to the number of local rows, i.e 'm'.
54            For matrices that will be factored, you must leave room for (and set)
55            the diagonal entry even if it is zero.
56 .  o_nz  - number of nonzeros per row in the OFF-DIAGONAL portion of local
57            submatrix (same value is used for all local rows).
58 -  o_nnz - array containing the number of nonzeros in the various rows of the
59            OFF-DIAGONAL portion of the local submatrix (possibly different for
60            each row) or NULL, if o_nz is used to specify the nonzero
61            structure. The size of this array is equal to the number
62            of local rows, i.e 'm'.
63 
64    If the *_nnz parameter is given then the *_nz parameter is ignored
65 
66    Level: intermediate
67 
68    Notes: This function has the same interface as the MPIAIJ preallocation routine in order to simplify the transition
69           from the asssembled format to the unassembled one. It overestimates the preallocation of MATIS local
70           matrices; for exact preallocation, the user should set the preallocation directly on local matrix objects.
71 
72 .keywords: matrix
73 
74 .seealso: MatCreate(), MatCreateIS(), MatMPIAIJSetPreallocation(), MatISGetLocalMat()
75 @*/
76 PetscErrorCode  MatISSetPreallocation(Mat B,PetscInt d_nz,const PetscInt d_nnz[],PetscInt o_nz,const PetscInt o_nnz[])
77 {
78   PetscErrorCode ierr;
79 
80   PetscFunctionBegin;
81   PetscValidHeaderSpecific(B,MAT_CLASSID,1);
82   PetscValidType(B,1);
83   ierr = PetscTryMethod(B,"MatISSetPreallocation_C",(Mat,PetscInt,const PetscInt[],PetscInt,const PetscInt[]),(B,d_nz,d_nnz,o_nz,o_nnz));CHKERRQ(ierr);
84   PetscFunctionReturn(0);
85 }
86 
87 #undef __FUNCT__
88 #define __FUNCT__ "MatISSetPreallocation_IS"
89 PetscErrorCode  MatISSetPreallocation_IS(Mat B,PetscInt d_nz,const PetscInt d_nnz[],PetscInt o_nz,const PetscInt o_nnz[])
90 {
91   Mat_IS         *matis = (Mat_IS*)(B->data);
92   PetscInt       bs,i,nlocalcols;
93   PetscErrorCode ierr;
94 
95   PetscFunctionBegin;
96   if (!matis->A) {
97     SETERRQ(PetscObjectComm((PetscObject)B),PETSC_ERR_SUP,"You should first call MatSetLocalToGlobalMapping");
98   }
99   if (!matis->sf) { /* setup SF if not yet created and allocate rootdata and leafdata */
100     ierr = MatISComputeSF_Private(B);CHKERRQ(ierr);
101   }
102   if (!d_nnz) {
103     for (i=0;i<matis->sf_nroots;i++) matis->sf_rootdata[i] = d_nz;
104   } else {
105     for (i=0;i<matis->sf_nroots;i++) matis->sf_rootdata[i] = d_nnz[i];
106   }
107   if (!o_nnz) {
108     for (i=0;i<matis->sf_nroots;i++) matis->sf_rootdata[i] += o_nz;
109   } else {
110     for (i=0;i<matis->sf_nroots;i++) matis->sf_rootdata[i] += o_nnz[i];
111   }
112   ierr = PetscSFBcastBegin(matis->sf,MPIU_INT,matis->sf_rootdata,matis->sf_leafdata);CHKERRQ(ierr);
113   ierr = MatGetSize(matis->A,NULL,&nlocalcols);CHKERRQ(ierr);
114   ierr = MatGetBlockSize(matis->A,&bs);CHKERRQ(ierr);
115   ierr = PetscSFBcastEnd(matis->sf,MPIU_INT,matis->sf_rootdata,matis->sf_leafdata);CHKERRQ(ierr);
116   for (i=0;i<matis->sf_nleaves;i++) {
117     matis->sf_leafdata[i] = PetscMin(matis->sf_leafdata[i],nlocalcols);
118   }
119   ierr = MatSeqAIJSetPreallocation(matis->A,0,matis->sf_leafdata);CHKERRQ(ierr);
120   for (i=0;i<matis->sf_nleaves/bs;i++) {
121     matis->sf_leafdata[i] = matis->sf_leafdata[i*bs]/bs;
122   }
123   ierr = MatSeqBAIJSetPreallocation(matis->A,bs,0,matis->sf_leafdata);CHKERRQ(ierr);
124   for (i=0;i<matis->sf_nleaves/bs;i++) {
125     matis->sf_leafdata[i] = matis->sf_leafdata[i]-i;
126   }
127   ierr = MatSeqSBAIJSetPreallocation(matis->A,bs,0,matis->sf_leafdata);CHKERRQ(ierr);
128   PetscFunctionReturn(0);
129 }
130 
131 #undef __FUNCT__
132 #define __FUNCT__ "MatISSetMPIXAIJPreallocation_Private"
133 PETSC_EXTERN PetscErrorCode MatISSetMPIXAIJPreallocation_Private(Mat A, Mat B, PetscBool maxreduce)
134 {
135   Mat_IS          *matis = (Mat_IS*)(A->data);
136   PetscInt        *my_dnz,*my_onz,*dnz,*onz,*mat_ranges,*row_ownership;
137   const PetscInt  *global_indices_r,*global_indices_c;
138   PetscInt        i,j,bs,rows,cols;
139   PetscInt        lrows,lcols;
140   PetscInt        local_rows,local_cols;
141   PetscMPIInt     nsubdomains;
142   PetscBool       isdense,issbaij;
143   PetscErrorCode  ierr;
144 
145   PetscFunctionBegin;
146   ierr = MPI_Comm_size(PetscObjectComm((PetscObject)A),&nsubdomains);CHKERRQ(ierr);
147   ierr = MatGetSize(A,&rows,&cols);CHKERRQ(ierr);
148   ierr = MatGetBlockSize(A,&bs);CHKERRQ(ierr);
149   ierr = MatGetSize(matis->A,&local_rows,&local_cols);CHKERRQ(ierr);
150   ierr = PetscObjectTypeCompare((PetscObject)matis->A,MATSEQDENSE,&isdense);CHKERRQ(ierr);
151   ierr = PetscObjectTypeCompare((PetscObject)matis->A,MATSEQSBAIJ,&issbaij);CHKERRQ(ierr);
152   ierr = ISLocalToGlobalMappingGetIndices(A->rmap->mapping,&global_indices_r);CHKERRQ(ierr);
153   if (A->rmap->mapping != A->cmap->mapping) {
154     ierr = ISLocalToGlobalMappingGetIndices(A->rmap->mapping,&global_indices_c);CHKERRQ(ierr);
155   } else {
156     global_indices_c = global_indices_r;
157   }
158 
159   if (issbaij) {
160     ierr = MatGetRowUpperTriangular(matis->A);CHKERRQ(ierr);
161   }
162   /*
163      An SF reduce is needed to sum up properly on shared rows.
164      Note that generally preallocation is not exact, since it overestimates nonzeros
165   */
166   if (!matis->sf) { /* setup SF if not yet created and allocate rootdata and leafdata */
167     ierr = MatISComputeSF_Private(A);CHKERRQ(ierr);
168   }
169   ierr = MatGetLocalSize(A,&lrows,&lcols);CHKERRQ(ierr);
170   ierr = MatPreallocateInitialize(PetscObjectComm((PetscObject)A),lrows,lcols,dnz,onz);CHKERRQ(ierr);
171   /* All processes need to compute entire row ownership */
172   ierr = PetscMalloc1(rows,&row_ownership);CHKERRQ(ierr);
173   ierr = MatGetOwnershipRanges(A,(const PetscInt**)&mat_ranges);CHKERRQ(ierr);
174   for (i=0;i<nsubdomains;i++) {
175     for (j=mat_ranges[i];j<mat_ranges[i+1];j++) {
176       row_ownership[j] = i;
177     }
178   }
179 
180   /*
181      my_dnz and my_onz contains exact contribution to preallocation from each local mat
182      then, they will be summed up properly. This way, preallocation is always sufficient
183   */
184   ierr = PetscCalloc2(local_rows,&my_dnz,local_rows,&my_onz);CHKERRQ(ierr);
185   /* preallocation as a MATAIJ */
186   if (isdense) { /* special case for dense local matrices */
187     for (i=0;i<local_rows;i++) {
188       PetscInt index_row = global_indices_r[i];
189       for (j=i;j<local_rows;j++) {
190         PetscInt owner = row_ownership[index_row];
191         PetscInt index_col = global_indices_c[j];
192         if (index_col > mat_ranges[owner]-1 && index_col < mat_ranges[owner+1] ) { /* diag block */
193           my_dnz[i] += 1;
194         } else { /* offdiag block */
195           my_onz[i] += 1;
196         }
197         /* same as before, interchanging rows and cols */
198         if (i != j) {
199           owner = row_ownership[index_col];
200           if (index_row > mat_ranges[owner]-1 && index_row < mat_ranges[owner+1] ) {
201             my_dnz[j] += 1;
202           } else {
203             my_onz[j] += 1;
204           }
205         }
206       }
207     }
208   } else { /* TODO: this could be optimized using MatGetRowIJ */
209     for (i=0;i<local_rows;i++) {
210       const PetscInt *cols;
211       PetscInt       ncols,index_row = global_indices_r[i];
212       ierr = MatGetRow(matis->A,i,&ncols,&cols,NULL);CHKERRQ(ierr);
213       for (j=0;j<ncols;j++) {
214         PetscInt owner = row_ownership[index_row];
215         PetscInt index_col = global_indices_c[cols[j]];
216         if (index_col > mat_ranges[owner]-1 && index_col < mat_ranges[owner+1] ) { /* diag block */
217           my_dnz[i] += 1;
218         } else { /* offdiag block */
219           my_onz[i] += 1;
220         }
221         /* same as before, interchanging rows and cols */
222         if (issbaij && index_col != index_row) {
223           owner = row_ownership[index_col];
224           if (index_row > mat_ranges[owner]-1 && index_row < mat_ranges[owner+1] ) {
225             my_dnz[cols[j]] += 1;
226           } else {
227             my_onz[cols[j]] += 1;
228           }
229         }
230       }
231       ierr = MatRestoreRow(matis->A,i,&ncols,&cols,NULL);CHKERRQ(ierr);
232     }
233   }
234   ierr = ISLocalToGlobalMappingRestoreIndices(A->rmap->mapping,&global_indices_r);CHKERRQ(ierr);
235   if (global_indices_c != global_indices_r) {
236     ierr = ISLocalToGlobalMappingRestoreIndices(A->rmap->mapping,&global_indices_c);CHKERRQ(ierr);
237   }
238   ierr = PetscFree(row_ownership);CHKERRQ(ierr);
239 
240   /* Reduce my_dnz and my_onz */
241   if (maxreduce) {
242     ierr = PetscSFReduceBegin(matis->sf,MPIU_INT,my_dnz,dnz,MPI_MAX);CHKERRQ(ierr);
243     ierr = PetscSFReduceEnd(matis->sf,MPIU_INT,my_dnz,dnz,MPI_MAX);CHKERRQ(ierr);
244     ierr = PetscSFReduceBegin(matis->sf,MPIU_INT,my_onz,onz,MPI_MAX);CHKERRQ(ierr);
245     ierr = PetscSFReduceEnd(matis->sf,MPIU_INT,my_onz,onz,MPI_MAX);CHKERRQ(ierr);
246   } else {
247     ierr = PetscSFReduceBegin(matis->sf,MPIU_INT,my_dnz,dnz,MPI_SUM);CHKERRQ(ierr);
248     ierr = PetscSFReduceEnd(matis->sf,MPIU_INT,my_dnz,dnz,MPI_SUM);CHKERRQ(ierr);
249     ierr = PetscSFReduceBegin(matis->sf,MPIU_INT,my_onz,onz,MPI_SUM);CHKERRQ(ierr);
250     ierr = PetscSFReduceEnd(matis->sf,MPIU_INT,my_onz,onz,MPI_SUM);CHKERRQ(ierr);
251   }
252   ierr = PetscFree2(my_dnz,my_onz);CHKERRQ(ierr);
253 
254   /* Resize preallocation if overestimated */
255   for (i=0;i<lrows;i++) {
256     dnz[i] = PetscMin(dnz[i],lcols);
257     onz[i] = PetscMin(onz[i],cols-lcols);
258   }
259   /* set preallocation */
260   ierr = MatMPIAIJSetPreallocation(B,0,dnz,0,onz);CHKERRQ(ierr);
261   for (i=0;i<lrows/bs;i++) {
262     dnz[i] = dnz[i*bs]/bs;
263     onz[i] = onz[i*bs]/bs;
264   }
265   ierr = MatMPIBAIJSetPreallocation(B,bs,0,dnz,0,onz);CHKERRQ(ierr);
266   ierr = MatMPISBAIJSetPreallocation(B,bs,0,dnz,0,onz);CHKERRQ(ierr);
267   ierr = MatPreallocateFinalize(dnz,onz);CHKERRQ(ierr);
268   if (issbaij) {
269     ierr = MatRestoreRowUpperTriangular(matis->A);CHKERRQ(ierr);
270   }
271   PetscFunctionReturn(0);
272 }
273 
274 #undef __FUNCT__
275 #define __FUNCT__ "MatISGetMPIXAIJ_IS"
276 PetscErrorCode MatISGetMPIXAIJ_IS(Mat mat, MatReuse reuse, Mat *M)
277 {
278   Mat_IS         *matis = (Mat_IS*)(mat->data);
279   Mat            local_mat;
280   /* info on mat */
281   PetscInt       bs,rows,cols,lrows,lcols;
282   PetscInt       local_rows,local_cols;
283   PetscBool      isdense,issbaij,isseqaij;
284   PetscMPIInt    nsubdomains;
285   /* values insertion */
286   PetscScalar    *array;
287   /* work */
288   PetscErrorCode ierr;
289 
290   PetscFunctionBegin;
291   /* get info from mat */
292   ierr = MPI_Comm_size(PetscObjectComm((PetscObject)mat),&nsubdomains);CHKERRQ(ierr);
293   if (nsubdomains == 1) {
294     if (reuse == MAT_INITIAL_MATRIX) {
295       ierr = MatDuplicate(matis->A,MAT_COPY_VALUES,&(*M));CHKERRQ(ierr);
296     } else {
297       ierr = MatCopy(matis->A,*M,SAME_NONZERO_PATTERN);CHKERRQ(ierr);
298     }
299     PetscFunctionReturn(0);
300   }
301   ierr = MatGetSize(mat,&rows,&cols);CHKERRQ(ierr);
302   ierr = MatGetBlockSize(mat,&bs);CHKERRQ(ierr);
303   ierr = MatGetLocalSize(mat,&lrows,&lcols);CHKERRQ(ierr);
304   ierr = MatGetSize(matis->A,&local_rows,&local_cols);CHKERRQ(ierr);
305   ierr = PetscObjectTypeCompare((PetscObject)matis->A,MATSEQDENSE,&isdense);CHKERRQ(ierr);
306   ierr = PetscObjectTypeCompare((PetscObject)matis->A,MATSEQAIJ,&isseqaij);CHKERRQ(ierr);
307   ierr = PetscObjectTypeCompare((PetscObject)matis->A,MATSEQSBAIJ,&issbaij);CHKERRQ(ierr);
308 
309   if (reuse == MAT_INITIAL_MATRIX) {
310     MatType     new_mat_type;
311     PetscBool   issbaij_red;
312 
313     /* determining new matrix type */
314     ierr = MPI_Allreduce(&issbaij,&issbaij_red,1,MPIU_BOOL,MPI_LAND,PetscObjectComm((PetscObject)mat));CHKERRQ(ierr);
315     if (issbaij_red) {
316       new_mat_type = MATSBAIJ;
317     } else {
318       if (bs>1) {
319         new_mat_type = MATBAIJ;
320       } else {
321         new_mat_type = MATAIJ;
322       }
323     }
324 
325     ierr = MatCreate(PetscObjectComm((PetscObject)mat),M);CHKERRQ(ierr);
326     ierr = MatSetSizes(*M,lrows,lcols,rows,cols);CHKERRQ(ierr);
327     ierr = MatSetBlockSize(*M,bs);CHKERRQ(ierr);
328     ierr = MatSetType(*M,new_mat_type);CHKERRQ(ierr);
329     ierr = MatISSetMPIXAIJPreallocation_Private(mat,*M,PETSC_FALSE);CHKERRQ(ierr);
330   } else {
331     PetscInt mbs,mrows,mcols,mlrows,mlcols;
332     /* some checks */
333     ierr = MatGetBlockSize(*M,&mbs);CHKERRQ(ierr);
334     ierr = MatGetSize(*M,&mrows,&mcols);CHKERRQ(ierr);
335     ierr = MatGetLocalSize(*M,&mlrows,&mlcols);CHKERRQ(ierr);
336     if (mrows != rows) {
337       SETERRQ2(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Cannot reuse matrix. Wrong number of rows (%d != %d)",rows,mrows);
338     }
339     if (mcols != cols) {
340       SETERRQ2(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Cannot reuse matrix. Wrong number of cols (%d != %d)",cols,mcols);
341     }
342     if (mlrows != lrows) {
343       SETERRQ2(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Cannot reuse matrix. Wrong number of local rows (%d != %d)",lrows,mlrows);
344     }
345     if (mlcols != lcols) {
346       SETERRQ2(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Cannot reuse matrix. Wrong number of local cols (%d != %d)",lcols,mlcols);
347     }
348     if (mbs != bs) {
349       SETERRQ2(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Cannot reuse matrix. Wrong block size (%d != %d)",bs,mbs);
350     }
351     ierr = MatZeroEntries(*M);CHKERRQ(ierr);
352   }
353 
354   if (issbaij) {
355     ierr = MatConvert(matis->A,MATSEQBAIJ,MAT_INITIAL_MATRIX,&local_mat);CHKERRQ(ierr);
356   } else {
357     ierr = PetscObjectReference((PetscObject)matis->A);CHKERRQ(ierr);
358     local_mat = matis->A;
359   }
360 
361   /* Set values */
362   ierr = MatSetLocalToGlobalMapping(*M,mat->rmap->mapping,mat->cmap->mapping);CHKERRQ(ierr);
363   if (isdense) { /* special case for dense local matrices */
364     PetscInt i,*dummy_rows,*dummy_cols;
365 
366     if (local_rows != local_cols) {
367       ierr = PetscMalloc2(local_rows,&dummy_rows,local_cols,&dummy_cols);CHKERRQ(ierr);
368       for (i=0;i<local_rows;i++) dummy_rows[i] = i;
369       for (i=0;i<local_cols;i++) dummy_cols[i] = i;
370     } else {
371       ierr = PetscMalloc1(local_rows,&dummy_rows);CHKERRQ(ierr);
372       for (i=0;i<local_rows;i++) dummy_rows[i] = i;
373       dummy_cols = dummy_rows;
374     }
375     ierr = MatSetOption(*M,MAT_ROW_ORIENTED,PETSC_FALSE);CHKERRQ(ierr);
376     ierr = MatDenseGetArray(local_mat,&array);CHKERRQ(ierr);
377     ierr = MatSetValuesLocal(*M,local_rows,dummy_rows,local_cols,dummy_cols,array,ADD_VALUES);CHKERRQ(ierr);
378     ierr = MatDenseRestoreArray(local_mat,&array);CHKERRQ(ierr);
379     if (dummy_rows != dummy_cols) {
380       ierr = PetscFree2(dummy_rows,dummy_cols);CHKERRQ(ierr);
381     } else {
382       ierr = PetscFree(dummy_rows);CHKERRQ(ierr);
383     }
384   } else if (isseqaij) {
385     PetscInt  i,nvtxs,*xadj,*adjncy;
386     PetscBool done;
387 
388     ierr = MatGetRowIJ(local_mat,0,PETSC_FALSE,PETSC_FALSE,&nvtxs,(const PetscInt**)&xadj,(const PetscInt**)&adjncy,&done);CHKERRQ(ierr);
389     if (!done) {
390       SETERRQ1(PetscObjectComm((PetscObject)local_mat),PETSC_ERR_PLIB,"Error in MatRestoreRowIJ called in %s\n",__FUNCT__);
391     }
392     ierr = MatSeqAIJGetArray(local_mat,&array);CHKERRQ(ierr);
393     for (i=0;i<nvtxs;i++) {
394       ierr = MatSetValuesLocal(*M,1,&i,xadj[i+1]-xadj[i],adjncy+xadj[i],array+xadj[i],ADD_VALUES);CHKERRQ(ierr);
395     }
396     ierr = MatRestoreRowIJ(local_mat,0,PETSC_FALSE,PETSC_FALSE,&nvtxs,(const PetscInt**)&xadj,(const PetscInt**)&adjncy,&done);CHKERRQ(ierr);
397     if (!done) {
398       SETERRQ1(PetscObjectComm((PetscObject)local_mat),PETSC_ERR_PLIB,"Error in MatRestoreRowIJ called in %s\n",__FUNCT__);
399     }
400     ierr = MatSeqAIJRestoreArray(local_mat,&array);CHKERRQ(ierr);
401   } else { /* very basic values insertion for all other matrix types */
402     PetscInt i;
403 
404     for (i=0;i<local_rows;i++) {
405       PetscInt       j;
406       const PetscInt *local_indices_cols;
407 
408       ierr = MatGetRow(local_mat,i,&j,&local_indices_cols,(const PetscScalar**)&array);CHKERRQ(ierr);
409       ierr = MatSetValuesLocal(*M,1,&i,j,local_indices_cols,array,ADD_VALUES);CHKERRQ(ierr);
410       ierr = MatRestoreRow(local_mat,i,&j,&local_indices_cols,(const PetscScalar**)&array);CHKERRQ(ierr);
411     }
412   }
413   ierr = MatAssemblyBegin(*M,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
414   ierr = MatDestroy(&local_mat);CHKERRQ(ierr);
415   ierr = MatAssemblyEnd(*M,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
416   if (isdense) {
417     ierr = MatSetOption(*M,MAT_ROW_ORIENTED,PETSC_TRUE);CHKERRQ(ierr);
418   }
419   PetscFunctionReturn(0);
420 }
421 
422 #undef __FUNCT__
423 #define __FUNCT__ "MatISGetMPIXAIJ"
424 /*@
425     MatISGetMPIXAIJ - Converts MATIS matrix into a parallel AIJ format
426 
427   Input Parameter:
428 .  mat - the matrix (should be of type MATIS)
429 .  reuse - either MAT_INITIAL_MATRIX or MAT_REUSE_MATRIX
430 
431   Output Parameter:
432 .  newmat - the matrix in AIJ format
433 
434   Level: developer
435 
436   Notes: mat and *newmat cannot be the same object when MAT_REUSE_MATRIX is requested.
437 
438 .seealso: MATIS
439 @*/
440 PetscErrorCode MatISGetMPIXAIJ(Mat mat, MatReuse reuse, Mat *newmat)
441 {
442   PetscErrorCode ierr;
443 
444   PetscFunctionBegin;
445   PetscValidHeaderSpecific(mat,MAT_CLASSID,1);
446   PetscValidLogicalCollectiveEnum(mat,reuse,2);
447   PetscValidPointer(newmat,3);
448   if (reuse != MAT_INITIAL_MATRIX) {
449     PetscValidHeaderSpecific(*newmat,MAT_CLASSID,3);
450     PetscCheckSameComm(mat,1,*newmat,3);
451     if (mat == *newmat) {
452       SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Cannot reuse the same matrix");
453     }
454   }
455   ierr = PetscUseMethod(mat,"MatISGetMPIXAIJ_C",(Mat,MatReuse,Mat*),(mat,reuse,newmat));CHKERRQ(ierr);
456   PetscFunctionReturn(0);
457 }
458 
459 #undef __FUNCT__
460 #define __FUNCT__ "MatDuplicate_IS"
461 PetscErrorCode MatDuplicate_IS(Mat mat,MatDuplicateOption op,Mat *newmat)
462 {
463   PetscErrorCode ierr;
464   Mat_IS         *matis = (Mat_IS*)(mat->data);
465   PetscInt       bs,m,n,M,N;
466   Mat            B,localmat;
467 
468   PetscFunctionBegin;
469   ierr = MatGetBlockSize(mat,&bs);CHKERRQ(ierr);
470   ierr = MatGetSize(mat,&M,&N);CHKERRQ(ierr);
471   ierr = MatGetLocalSize(mat,&m,&n);CHKERRQ(ierr);
472   ierr = MatCreateIS(PetscObjectComm((PetscObject)mat),bs,m,n,M,N,mat->rmap->mapping,mat->cmap->mapping,&B);CHKERRQ(ierr);
473   ierr = MatDuplicate(matis->A,op,&localmat);CHKERRQ(ierr);
474   ierr = MatISSetLocalMat(B,localmat);CHKERRQ(ierr);
475   ierr = MatDestroy(&localmat);CHKERRQ(ierr);
476   ierr = MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
477   ierr = MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
478   *newmat = B;
479   PetscFunctionReturn(0);
480 }
481 
482 #undef __FUNCT__
483 #define __FUNCT__ "MatIsHermitian_IS"
484 PetscErrorCode MatIsHermitian_IS(Mat A,PetscReal tol,PetscBool  *flg)
485 {
486   PetscErrorCode ierr;
487   Mat_IS         *matis = (Mat_IS*)A->data;
488   PetscBool      local_sym;
489 
490   PetscFunctionBegin;
491   ierr = MatIsHermitian(matis->A,tol,&local_sym);CHKERRQ(ierr);
492   ierr = MPI_Allreduce(&local_sym,flg,1,MPIU_BOOL,MPI_LAND,PetscObjectComm((PetscObject)A));CHKERRQ(ierr);
493   PetscFunctionReturn(0);
494 }
495 
496 #undef __FUNCT__
497 #define __FUNCT__ "MatIsSymmetric_IS"
498 PetscErrorCode MatIsSymmetric_IS(Mat A,PetscReal tol,PetscBool  *flg)
499 {
500   PetscErrorCode ierr;
501   Mat_IS         *matis = (Mat_IS*)A->data;
502   PetscBool      local_sym;
503 
504   PetscFunctionBegin;
505   ierr = MatIsSymmetric(matis->A,tol,&local_sym);CHKERRQ(ierr);
506   ierr = MPI_Allreduce(&local_sym,flg,1,MPIU_BOOL,MPI_LAND,PetscObjectComm((PetscObject)A));CHKERRQ(ierr);
507   PetscFunctionReturn(0);
508 }
509 
510 #undef __FUNCT__
511 #define __FUNCT__ "MatDestroy_IS"
512 PetscErrorCode MatDestroy_IS(Mat A)
513 {
514   PetscErrorCode ierr;
515   Mat_IS         *b = (Mat_IS*)A->data;
516 
517   PetscFunctionBegin;
518   ierr = MatDestroy(&b->A);CHKERRQ(ierr);
519   ierr = VecScatterDestroy(&b->cctx);CHKERRQ(ierr);
520   ierr = VecScatterDestroy(&b->rctx);CHKERRQ(ierr);
521   ierr = VecDestroy(&b->x);CHKERRQ(ierr);
522   ierr = VecDestroy(&b->y);CHKERRQ(ierr);
523   ierr = PetscSFDestroy(&b->sf);CHKERRQ(ierr);
524   ierr = PetscFree2(b->sf_rootdata,b->sf_leafdata);CHKERRQ(ierr);
525   ierr = PetscFree(A->data);CHKERRQ(ierr);
526   ierr = PetscObjectChangeTypeName((PetscObject)A,0);CHKERRQ(ierr);
527   ierr = PetscObjectComposeFunction((PetscObject)A,"MatISGetLocalMat_C",NULL);CHKERRQ(ierr);
528   ierr = PetscObjectComposeFunction((PetscObject)A,"MatISSetLocalMat_C",NULL);CHKERRQ(ierr);
529   ierr = PetscObjectComposeFunction((PetscObject)A,"MatISGetMPIXAIJ_C",NULL);CHKERRQ(ierr);
530   ierr = PetscObjectComposeFunction((PetscObject)A,"MatISSetPreallocation_C",NULL);CHKERRQ(ierr);
531   PetscFunctionReturn(0);
532 }
533 
534 #undef __FUNCT__
535 #define __FUNCT__ "MatMult_IS"
536 PetscErrorCode MatMult_IS(Mat A,Vec x,Vec y)
537 {
538   PetscErrorCode ierr;
539   Mat_IS         *is  = (Mat_IS*)A->data;
540   PetscScalar    zero = 0.0;
541 
542   PetscFunctionBegin;
543   /*  scatter the global vector x into the local work vector */
544   ierr = VecScatterBegin(is->cctx,x,is->x,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
545   ierr = VecScatterEnd(is->cctx,x,is->x,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
546 
547   /* multiply the local matrix */
548   ierr = MatMult(is->A,is->x,is->y);CHKERRQ(ierr);
549 
550   /* scatter product back into global memory */
551   ierr = VecSet(y,zero);CHKERRQ(ierr);
552   ierr = VecScatterBegin(is->rctx,is->y,y,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
553   ierr = VecScatterEnd(is->rctx,is->y,y,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
554   PetscFunctionReturn(0);
555 }
556 
557 #undef __FUNCT__
558 #define __FUNCT__ "MatMultAdd_IS"
559 PetscErrorCode MatMultAdd_IS(Mat A,Vec v1,Vec v2,Vec v3)
560 {
561   Vec            temp_vec;
562   PetscErrorCode ierr;
563 
564   PetscFunctionBegin; /*  v3 = v2 + A * v1.*/
565   if (v3 != v2) {
566     ierr = MatMult(A,v1,v3);CHKERRQ(ierr);
567     ierr = VecAXPY(v3,1.0,v2);CHKERRQ(ierr);
568   } else {
569     ierr = VecDuplicate(v2,&temp_vec);CHKERRQ(ierr);
570     ierr = MatMult(A,v1,temp_vec);CHKERRQ(ierr);
571     ierr = VecAXPY(temp_vec,1.0,v2);CHKERRQ(ierr);
572     ierr = VecCopy(temp_vec,v3);CHKERRQ(ierr);
573     ierr = VecDestroy(&temp_vec);CHKERRQ(ierr);
574   }
575   PetscFunctionReturn(0);
576 }
577 
578 #undef __FUNCT__
579 #define __FUNCT__ "MatMultTranspose_IS"
580 PetscErrorCode MatMultTranspose_IS(Mat A,Vec y,Vec x)
581 {
582   Mat_IS         *is = (Mat_IS*)A->data;
583   PetscErrorCode ierr;
584 
585   PetscFunctionBegin;
586   /*  scatter the global vector x into the local work vector */
587   ierr = VecScatterBegin(is->rctx,y,is->y,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
588   ierr = VecScatterEnd(is->rctx,y,is->y,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
589 
590   /* multiply the local matrix */
591   ierr = MatMultTranspose(is->A,is->y,is->x);CHKERRQ(ierr);
592 
593   /* scatter product back into global vector */
594   ierr = VecSet(x,0);CHKERRQ(ierr);
595   ierr = VecScatterBegin(is->cctx,is->x,x,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
596   ierr = VecScatterEnd(is->cctx,is->x,x,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
597   PetscFunctionReturn(0);
598 }
599 
600 #undef __FUNCT__
601 #define __FUNCT__ "MatMultTransposeAdd_IS"
602 PetscErrorCode MatMultTransposeAdd_IS(Mat A,Vec v1,Vec v2,Vec v3)
603 {
604   Vec            temp_vec;
605   PetscErrorCode ierr;
606 
607   PetscFunctionBegin; /*  v3 = v2 + A' * v1.*/
608   if (v3 != v2) {
609     ierr = MatMultTranspose(A,v1,v3);CHKERRQ(ierr);
610     ierr = VecAXPY(v3,1.0,v2);CHKERRQ(ierr);
611   } else {
612     ierr = VecDuplicate(v2,&temp_vec);CHKERRQ(ierr);
613     ierr = MatMultTranspose(A,v1,temp_vec);CHKERRQ(ierr);
614     ierr = VecAXPY(temp_vec,1.0,v2);CHKERRQ(ierr);
615     ierr = VecCopy(temp_vec,v3);CHKERRQ(ierr);
616     ierr = VecDestroy(&temp_vec);CHKERRQ(ierr);
617   }
618   PetscFunctionReturn(0);
619 }
620 
621 #undef __FUNCT__
622 #define __FUNCT__ "MatView_IS"
623 PetscErrorCode MatView_IS(Mat A,PetscViewer viewer)
624 {
625   Mat_IS         *a = (Mat_IS*)A->data;
626   PetscErrorCode ierr;
627   PetscViewer    sviewer;
628 
629   PetscFunctionBegin;
630   ierr = PetscViewerGetSubViewer(viewer,PETSC_COMM_SELF,&sviewer);CHKERRQ(ierr);
631   ierr = MatView(a->A,sviewer);CHKERRQ(ierr);
632   ierr = PetscViewerRestoreSubViewer(viewer,PETSC_COMM_SELF,&sviewer);CHKERRQ(ierr);
633   PetscFunctionReturn(0);
634 }
635 
636 #undef __FUNCT__
637 #define __FUNCT__ "MatSetLocalToGlobalMapping_IS"
638 PetscErrorCode MatSetLocalToGlobalMapping_IS(Mat A,ISLocalToGlobalMapping rmapping,ISLocalToGlobalMapping cmapping)
639 {
640   PetscErrorCode ierr;
641   PetscInt       nr,rbs,nc,cbs;
642   Mat_IS         *is = (Mat_IS*)A->data;
643   IS             from,to;
644   Vec            cglobal,rglobal;
645 
646   PetscFunctionBegin;
647   PetscCheckSameComm(A,1,rmapping,2);
648   PetscCheckSameComm(A,1,cmapping,3);
649   /* Destroy any previous data */
650   ierr = VecDestroy(&is->x);CHKERRQ(ierr);
651   ierr = VecDestroy(&is->y);CHKERRQ(ierr);
652   ierr = VecScatterDestroy(&is->rctx);CHKERRQ(ierr);
653   ierr = VecScatterDestroy(&is->cctx);CHKERRQ(ierr);
654   ierr = MatDestroy(&is->A);CHKERRQ(ierr);
655   ierr = PetscSFDestroy(&is->sf);CHKERRQ(ierr);
656   ierr = PetscFree2(is->sf_rootdata,is->sf_leafdata);CHKERRQ(ierr);
657 
658   /* Setup Layout and set local to global maps */
659   ierr = PetscLayoutSetUp(A->rmap);CHKERRQ(ierr);
660   ierr = PetscLayoutSetUp(A->cmap);CHKERRQ(ierr);
661   ierr = PetscLayoutSetISLocalToGlobalMapping(A->rmap,rmapping);CHKERRQ(ierr);
662   ierr = PetscLayoutSetISLocalToGlobalMapping(A->cmap,cmapping);CHKERRQ(ierr);
663 
664   /* Create the local matrix A */
665   ierr = ISLocalToGlobalMappingGetSize(rmapping,&nr);CHKERRQ(ierr);
666   ierr = ISLocalToGlobalMappingGetBlockSize(rmapping,&rbs);CHKERRQ(ierr);
667   ierr = ISLocalToGlobalMappingGetSize(cmapping,&nc);CHKERRQ(ierr);
668   ierr = ISLocalToGlobalMappingGetBlockSize(cmapping,&cbs);CHKERRQ(ierr);
669   ierr = MatCreate(PETSC_COMM_SELF,&is->A);CHKERRQ(ierr);
670   ierr = MatSetType(is->A,MATAIJ);CHKERRQ(ierr);
671   ierr = MatSetSizes(is->A,nr,nc,nr,nc);CHKERRQ(ierr);
672   ierr = MatSetBlockSizes(is->A,rbs,cbs);CHKERRQ(ierr);
673   ierr = MatSetOptionsPrefix(is->A,((PetscObject)A)->prefix);CHKERRQ(ierr);
674   ierr = MatAppendOptionsPrefix(is->A,"is_");CHKERRQ(ierr);
675   ierr = MatSetFromOptions(is->A);CHKERRQ(ierr);
676 
677   /* Create the local work vectors */
678   ierr = MatCreateVecs(is->A,&is->x,&is->y);CHKERRQ(ierr);
679 
680   /* setup the global to local scatters */
681   ierr = MatCreateVecs(A,&cglobal,&rglobal);CHKERRQ(ierr);
682   ierr = ISCreateStride(PETSC_COMM_SELF,nr,0,1,&to);CHKERRQ(ierr);
683   ierr = ISLocalToGlobalMappingApplyIS(rmapping,to,&from);CHKERRQ(ierr);
684   ierr = VecScatterCreate(rglobal,from,is->y,to,&is->rctx);CHKERRQ(ierr);
685   if (rmapping != cmapping) {
686     ierr = ISDestroy(&to);CHKERRQ(ierr);
687     ierr = ISDestroy(&from);CHKERRQ(ierr);
688     ierr = ISCreateStride(PETSC_COMM_SELF,nc,0,1,&to);CHKERRQ(ierr);
689     ierr = ISLocalToGlobalMappingApplyIS(cmapping,to,&from);CHKERRQ(ierr);
690     ierr = VecScatterCreate(cglobal,from,is->x,to,&is->cctx);CHKERRQ(ierr);
691   } else {
692     ierr = PetscObjectReference((PetscObject)is->rctx);CHKERRQ(ierr);
693     is->cctx = is->rctx;
694   }
695   ierr = VecDestroy(&rglobal);CHKERRQ(ierr);
696   ierr = VecDestroy(&cglobal);CHKERRQ(ierr);
697   ierr = ISDestroy(&to);CHKERRQ(ierr);
698   ierr = ISDestroy(&from);CHKERRQ(ierr);
699   PetscFunctionReturn(0);
700 }
701 
702 #undef __FUNCT__
703 #define __FUNCT__ "MatSetValues_IS"
704 PetscErrorCode MatSetValues_IS(Mat mat, PetscInt m,const PetscInt *rows, PetscInt n,const PetscInt *cols, const PetscScalar *values, InsertMode addv)
705 {
706   Mat_IS         *is = (Mat_IS*)mat->data;
707   PetscInt       rows_l[2048],cols_l[2048];
708   PetscErrorCode ierr;
709 
710   PetscFunctionBegin;
711 #if defined(PETSC_USE_DEBUG)
712   if (m > 2048 || n > 2048) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_SUP,"Number of row/column indices must be <= 2048: they are %D %D",m,n);
713 #endif
714   ierr = ISG2LMapApply(mat->rmap->mapping,m,rows,rows_l);CHKERRQ(ierr);
715   ierr = ISG2LMapApply(mat->cmap->mapping,n,cols,cols_l);CHKERRQ(ierr);
716   ierr = MatSetValues(is->A,m,rows_l,n,cols_l,values,addv);CHKERRQ(ierr);
717   PetscFunctionReturn(0);
718 }
719 
720 #undef __FUNCT__
721 #define __FUNCT__ "MatSetValuesLocal_IS"
722 PetscErrorCode MatSetValuesLocal_IS(Mat A,PetscInt m,const PetscInt *rows, PetscInt n,const PetscInt *cols,const PetscScalar *values,InsertMode addv)
723 {
724   PetscErrorCode ierr;
725   Mat_IS         *is = (Mat_IS*)A->data;
726 
727   PetscFunctionBegin;
728   ierr = MatSetValues(is->A,m,rows,n,cols,values,addv);CHKERRQ(ierr);
729   PetscFunctionReturn(0);
730 }
731 
732 #undef __FUNCT__
733 #define __FUNCT__ "MatSetValuesBlockedLocal_IS"
734 PetscErrorCode MatSetValuesBlockedLocal_IS(Mat A,PetscInt m,const PetscInt *rows, PetscInt n,const PetscInt *cols,const PetscScalar *values,InsertMode addv)
735 {
736   PetscErrorCode ierr;
737   Mat_IS         *is = (Mat_IS*)A->data;
738 
739   PetscFunctionBegin;
740   ierr = MatSetValuesBlocked(is->A,m,rows,n,cols,values,addv);CHKERRQ(ierr);
741   PetscFunctionReturn(0);
742 }
743 
744 #undef __FUNCT__
745 #define __FUNCT__ "MatZeroRows_IS"
746 PetscErrorCode MatZeroRows_IS(Mat A,PetscInt n,const PetscInt rows[],PetscScalar diag,Vec x,Vec b)
747 {
748   PetscInt       n_l = 0, *rows_l = NULL;
749   PetscErrorCode ierr;
750 
751   PetscFunctionBegin;
752   if (x && b) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"No support");
753   if (n) {
754     ierr = PetscMalloc1(n,&rows_l);CHKERRQ(ierr);
755     ierr = ISGlobalToLocalMappingApply(A->rmap->mapping,IS_GTOLM_DROP,n,rows,&n_l,rows_l);CHKERRQ(ierr);
756   }
757   ierr = MatZeroRowsLocal(A,n_l,rows_l,diag,x,b);CHKERRQ(ierr);
758   ierr = PetscFree(rows_l);CHKERRQ(ierr);
759   PetscFunctionReturn(0);
760 }
761 
762 #undef __FUNCT__
763 #define __FUNCT__ "MatZeroRowsLocal_IS"
764 PetscErrorCode MatZeroRowsLocal_IS(Mat A,PetscInt n,const PetscInt rows[],PetscScalar diag,Vec x,Vec b)
765 {
766   Mat_IS         *is = (Mat_IS*)A->data;
767   PetscErrorCode ierr;
768   PetscInt       i;
769   PetscScalar    *array;
770 
771   PetscFunctionBegin;
772   if (x && b) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"No support");
773   {
774     /*
775        Set up is->x as a "counting vector". This is in order to MatMult_IS
776        work properly in the interface nodes.
777     */
778     Vec counter;
779     ierr = MatCreateVecs(A,NULL,&counter);CHKERRQ(ierr);
780     ierr = VecSet(counter,0.);CHKERRQ(ierr);
781     ierr = VecSet(is->y,1.);CHKERRQ(ierr);
782     ierr = VecScatterBegin(is->rctx,is->y,counter,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
783     ierr = VecScatterEnd(is->rctx,is->y,counter,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
784     ierr = VecScatterBegin(is->rctx,counter,is->y,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
785     ierr = VecScatterEnd(is->rctx,counter,is->y,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
786     ierr = VecDestroy(&counter);CHKERRQ(ierr);
787   }
788   if (!n) {
789     is->pure_neumann = PETSC_TRUE;
790   } else {
791     is->pure_neumann = PETSC_FALSE;
792 
793     ierr = VecGetArray(is->y,&array);CHKERRQ(ierr);
794     ierr = MatZeroRows(is->A,n,rows,diag,0,0);CHKERRQ(ierr);
795     for (i=0; i<n; i++) {
796       ierr = MatSetValue(is->A,rows[i],rows[i],diag/(array[rows[i]]),INSERT_VALUES);CHKERRQ(ierr);
797     }
798     ierr = MatAssemblyBegin(is->A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
799     ierr = MatAssemblyEnd(is->A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
800     ierr = VecRestoreArray(is->y,&array);CHKERRQ(ierr);
801   }
802   PetscFunctionReturn(0);
803 }
804 
805 #undef __FUNCT__
806 #define __FUNCT__ "MatAssemblyBegin_IS"
807 PetscErrorCode MatAssemblyBegin_IS(Mat A,MatAssemblyType type)
808 {
809   Mat_IS         *is = (Mat_IS*)A->data;
810   PetscErrorCode ierr;
811 
812   PetscFunctionBegin;
813   ierr = MatAssemblyBegin(is->A,type);CHKERRQ(ierr);
814   PetscFunctionReturn(0);
815 }
816 
817 #undef __FUNCT__
818 #define __FUNCT__ "MatAssemblyEnd_IS"
819 PetscErrorCode MatAssemblyEnd_IS(Mat A,MatAssemblyType type)
820 {
821   Mat_IS         *is = (Mat_IS*)A->data;
822   PetscErrorCode ierr;
823 
824   PetscFunctionBegin;
825   ierr = MatAssemblyEnd(is->A,type);CHKERRQ(ierr);
826   PetscFunctionReturn(0);
827 }
828 
829 #undef __FUNCT__
830 #define __FUNCT__ "MatISGetLocalMat_IS"
831 PetscErrorCode MatISGetLocalMat_IS(Mat mat,Mat *local)
832 {
833   Mat_IS *is = (Mat_IS*)mat->data;
834 
835   PetscFunctionBegin;
836   *local = is->A;
837   PetscFunctionReturn(0);
838 }
839 
840 #undef __FUNCT__
841 #define __FUNCT__ "MatISGetLocalMat"
842 /*@
843     MatISGetLocalMat - Gets the local matrix stored inside a MATIS matrix.
844 
845   Input Parameter:
846 .  mat - the matrix
847 
848   Output Parameter:
849 .  local - the local matrix
850 
851   Level: advanced
852 
853   Notes:
854     This can be called if you have precomputed the nonzero structure of the
855   matrix and want to provide it to the inner matrix object to improve the performance
856   of the MatSetValues() operation.
857 
858 .seealso: MATIS
859 @*/
860 PetscErrorCode MatISGetLocalMat(Mat mat,Mat *local)
861 {
862   PetscErrorCode ierr;
863 
864   PetscFunctionBegin;
865   PetscValidHeaderSpecific(mat,MAT_CLASSID,1);
866   PetscValidPointer(local,2);
867   ierr = PetscUseMethod(mat,"MatISGetLocalMat_C",(Mat,Mat*),(mat,local));CHKERRQ(ierr);
868   PetscFunctionReturn(0);
869 }
870 
871 #undef __FUNCT__
872 #define __FUNCT__ "MatISSetLocalMat_IS"
873 PetscErrorCode MatISSetLocalMat_IS(Mat mat,Mat local)
874 {
875   Mat_IS         *is = (Mat_IS*)mat->data;
876   PetscInt       nrows,ncols,orows,ocols;
877   PetscErrorCode ierr;
878 
879   PetscFunctionBegin;
880   if (is->A) {
881     ierr = MatGetSize(is->A,&orows,&ocols);CHKERRQ(ierr);
882     ierr = MatGetSize(local,&nrows,&ncols);CHKERRQ(ierr);
883     if (orows != nrows || ocols != ncols) SETERRQ4(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Local MATIS matrix should be of size %dx%d (you passed a %dx%d matrix)\n",orows,ocols,nrows,ncols);
884   }
885   ierr  = PetscObjectReference((PetscObject)local);CHKERRQ(ierr);
886   ierr  = MatDestroy(&is->A);CHKERRQ(ierr);
887   is->A = local;
888   PetscFunctionReturn(0);
889 }
890 
891 #undef __FUNCT__
892 #define __FUNCT__ "MatISSetLocalMat"
893 /*@
894     MatISSetLocalMat - Replace the local matrix stored inside a MATIS object.
895 
896   Input Parameter:
897 .  mat - the matrix
898 .  local - the local matrix
899 
900   Output Parameter:
901 
902   Level: advanced
903 
904   Notes:
905     This can be called if you have precomputed the local matrix and
906   want to provide it to the matrix object MATIS.
907 
908 .seealso: MATIS
909 @*/
910 PetscErrorCode MatISSetLocalMat(Mat mat,Mat local)
911 {
912   PetscErrorCode ierr;
913 
914   PetscFunctionBegin;
915   PetscValidHeaderSpecific(mat,MAT_CLASSID,1);
916   PetscValidHeaderSpecific(local,MAT_CLASSID,2);
917   ierr = PetscUseMethod(mat,"MatISSetLocalMat_C",(Mat,Mat),(mat,local));CHKERRQ(ierr);
918   PetscFunctionReturn(0);
919 }
920 
921 #undef __FUNCT__
922 #define __FUNCT__ "MatZeroEntries_IS"
923 PetscErrorCode MatZeroEntries_IS(Mat A)
924 {
925   Mat_IS         *a = (Mat_IS*)A->data;
926   PetscErrorCode ierr;
927 
928   PetscFunctionBegin;
929   ierr = MatZeroEntries(a->A);CHKERRQ(ierr);
930   PetscFunctionReturn(0);
931 }
932 
933 #undef __FUNCT__
934 #define __FUNCT__ "MatScale_IS"
935 PetscErrorCode MatScale_IS(Mat A,PetscScalar a)
936 {
937   Mat_IS         *is = (Mat_IS*)A->data;
938   PetscErrorCode ierr;
939 
940   PetscFunctionBegin;
941   ierr = MatScale(is->A,a);CHKERRQ(ierr);
942   PetscFunctionReturn(0);
943 }
944 
945 #undef __FUNCT__
946 #define __FUNCT__ "MatGetDiagonal_IS"
947 PetscErrorCode MatGetDiagonal_IS(Mat A, Vec v)
948 {
949   Mat_IS         *is = (Mat_IS*)A->data;
950   PetscErrorCode ierr;
951 
952   PetscFunctionBegin;
953   /* get diagonal of the local matrix */
954   ierr = MatGetDiagonal(is->A,is->y);CHKERRQ(ierr);
955 
956   /* scatter diagonal back into global vector */
957   ierr = VecSet(v,0);CHKERRQ(ierr);
958   ierr = VecScatterBegin(is->rctx,is->y,v,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
959   ierr = VecScatterEnd(is->rctx,is->y,v,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
960   PetscFunctionReturn(0);
961 }
962 
963 #undef __FUNCT__
964 #define __FUNCT__ "MatSetOption_IS"
965 PetscErrorCode MatSetOption_IS(Mat A,MatOption op,PetscBool flg)
966 {
967   Mat_IS         *a = (Mat_IS*)A->data;
968   PetscErrorCode ierr;
969 
970   PetscFunctionBegin;
971   ierr = MatSetOption(a->A,op,flg);CHKERRQ(ierr);
972   PetscFunctionReturn(0);
973 }
974 
975 #undef __FUNCT__
976 #define __FUNCT__ "MatCreateIS"
977 /*@
978     MatCreateIS - Creates a "process" unassmembled matrix, it is assembled on each
979        process but not across processes.
980 
981    Input Parameters:
982 +     comm    - MPI communicator that will share the matrix
983 .     bs      - block size of the matrix
984 .     m,n,M,N - local and/or global sizes of the left and right vector used in matrix vector products
985 .     rmap    - local to global map for rows
986 -     cmap    - local to global map for cols
987 
988    Output Parameter:
989 .    A - the resulting matrix
990 
991    Level: advanced
992 
993    Notes: See MATIS for more details
994           m and n are NOT related to the size of the map, they are the size of the part of the vector owned
995           by that process. The sizes of rmap and cmap define the size of the local matrices.
996           If either rmap or cmap are NULL, than the matrix is assumed to be square
997 
998 .seealso: MATIS, MatSetLocalToGlobalMapping()
999 @*/
1000 PetscErrorCode  MatCreateIS(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,ISLocalToGlobalMapping rmap,ISLocalToGlobalMapping cmap,Mat *A)
1001 {
1002   PetscErrorCode ierr;
1003 
1004   PetscFunctionBegin;
1005   if (!rmap && !cmap) SETERRQ(comm,PETSC_ERR_USER,"You need to provide at least one of the mapping");
1006   ierr = MatCreate(comm,A);CHKERRQ(ierr);
1007   ierr = MatSetBlockSize(*A,bs);CHKERRQ(ierr);
1008   ierr = MatSetSizes(*A,m,n,M,N);CHKERRQ(ierr);
1009   ierr = MatSetType(*A,MATIS);CHKERRQ(ierr);
1010   ierr = MatSetUp(*A);CHKERRQ(ierr);
1011   if (rmap && cmap) {
1012     ierr = MatSetLocalToGlobalMapping(*A,rmap,cmap);CHKERRQ(ierr);
1013   } else if (!rmap) {
1014     ierr = MatSetLocalToGlobalMapping(*A,cmap,cmap);CHKERRQ(ierr);
1015   } else {
1016     ierr = MatSetLocalToGlobalMapping(*A,rmap,rmap);CHKERRQ(ierr);
1017   }
1018   PetscFunctionReturn(0);
1019 }
1020 
1021 /*MC
1022    MATIS - MATIS = "is" - A matrix type to be used for using the non-overlapping domain decomposition type preconditioners (e.g. PCBDDC).
1023    This stores the matrices in globally unassembled form. Each processor
1024    assembles only its local Neumann problem and the parallel matrix vector
1025    product is handled "implicitly".
1026 
1027    Operations Provided:
1028 +  MatMult()
1029 .  MatMultAdd()
1030 .  MatMultTranspose()
1031 .  MatMultTransposeAdd()
1032 .  MatZeroEntries()
1033 .  MatSetOption()
1034 .  MatZeroRows()
1035 .  MatZeroRowsLocal()
1036 .  MatSetValues()
1037 .  MatSetValuesLocal()
1038 .  MatScale()
1039 .  MatGetDiagonal()
1040 -  MatSetLocalToGlobalMapping()
1041 
1042    Options Database Keys:
1043 . -mat_type is - sets the matrix type to "is" during a call to MatSetFromOptions()
1044 
1045    Notes: Options prefix for the inner matrix are given by -is_mat_xxx
1046 
1047           You must call MatSetLocalToGlobalMapping() before using this matrix type.
1048 
1049           You can do matrix preallocation on the local matrix after you obtain it with
1050           MatISGetLocalMat(); otherwise, you could use MatISSetPreallocation()
1051 
1052   Level: advanced
1053 
1054 .seealso: Mat, MatISGetLocalMat(), MatSetLocalToGlobalMapping(), MatISSetPreallocation(), PCBDDC
1055 
1056 M*/
1057 
1058 #undef __FUNCT__
1059 #define __FUNCT__ "MatCreate_IS"
1060 PETSC_EXTERN PetscErrorCode MatCreate_IS(Mat A)
1061 {
1062   PetscErrorCode ierr;
1063   Mat_IS         *b;
1064 
1065   PetscFunctionBegin;
1066   ierr    = PetscNewLog(A,&b);CHKERRQ(ierr);
1067   A->data = (void*)b;
1068 
1069   /* matrix ops */
1070   ierr    = PetscMemzero(A->ops,sizeof(struct _MatOps));CHKERRQ(ierr);
1071   A->ops->mult                    = MatMult_IS;
1072   A->ops->multadd                 = MatMultAdd_IS;
1073   A->ops->multtranspose           = MatMultTranspose_IS;
1074   A->ops->multtransposeadd        = MatMultTransposeAdd_IS;
1075   A->ops->destroy                 = MatDestroy_IS;
1076   A->ops->setlocaltoglobalmapping = MatSetLocalToGlobalMapping_IS;
1077   A->ops->setvalues               = MatSetValues_IS;
1078   A->ops->setvalueslocal          = MatSetValuesLocal_IS;
1079   A->ops->setvaluesblockedlocal   = MatSetValuesBlockedLocal_IS;
1080   A->ops->zerorows                = MatZeroRows_IS;
1081   A->ops->zerorowslocal           = MatZeroRowsLocal_IS;
1082   A->ops->assemblybegin           = MatAssemblyBegin_IS;
1083   A->ops->assemblyend             = MatAssemblyEnd_IS;
1084   A->ops->view                    = MatView_IS;
1085   A->ops->zeroentries             = MatZeroEntries_IS;
1086   A->ops->scale                   = MatScale_IS;
1087   A->ops->getdiagonal             = MatGetDiagonal_IS;
1088   A->ops->setoption               = MatSetOption_IS;
1089   A->ops->ishermitian             = MatIsHermitian_IS;
1090   A->ops->issymmetric             = MatIsSymmetric_IS;
1091   A->ops->duplicate               = MatDuplicate_IS;
1092 
1093   /* special MATIS functions */
1094   ierr = PetscObjectComposeFunction((PetscObject)A,"MatISGetLocalMat_C",MatISGetLocalMat_IS);CHKERRQ(ierr);
1095   ierr = PetscObjectComposeFunction((PetscObject)A,"MatISSetLocalMat_C",MatISSetLocalMat_IS);CHKERRQ(ierr);
1096   ierr = PetscObjectComposeFunction((PetscObject)A,"MatISGetMPIXAIJ_C",MatISGetMPIXAIJ_IS);CHKERRQ(ierr);
1097   ierr = PetscObjectComposeFunction((PetscObject)A,"MatISSetPreallocation_C",MatISSetPreallocation_IS);CHKERRQ(ierr);
1098   ierr = PetscObjectChangeTypeName((PetscObject)A,MATIS);CHKERRQ(ierr);
1099   PetscFunctionReturn(0);
1100 }
1101