xref: /petsc/src/mat/impls/is/matis.c (revision 7fa8f2d3c229ab24d2648dd75b628cc252fb1e95)
1 /*
2     Creates a matrix class for using the Neumann-Neumann type preconditioners.
3     This stores the matrices in globally unassembled form. Each processor
4     assembles only its local Neumann problem and the parallel matrix vector
5     product is handled "implicitly".
6 
7     Currently this allows for only one subdomain per processor.
8 */
9 
10 #include <../src/mat/impls/is/matis.h>      /*I "petscmat.h" I*/
11 
12 #define MATIS_MAX_ENTRIES_INSERTION 2048
13 
14 static PetscErrorCode MatISComputeSF_Private(Mat);
15 
16 #undef __FUNCT__
17 #define __FUNCT__ "MatGetInfo_IS"
18 static PetscErrorCode MatGetInfo_IS(Mat A,MatInfoType flag,MatInfo *ginfo)
19 {
20   Mat_IS         *matis = (Mat_IS*)A->data;
21   MatInfo        info;
22   PetscReal      isend[5],irecv[5];
23   PetscInt       bs;
24   PetscErrorCode ierr;
25 
26   PetscFunctionBegin;
27   ierr     = MatGetBlockSize(A,&bs);CHKERRQ(ierr);
28   ierr     = MatGetInfo(matis->A,MAT_LOCAL,&info);CHKERRQ(ierr);
29   isend[0] = info.nz_used;
30   isend[1] = info.nz_allocated;
31   isend[2] = info.nz_unneeded;
32   isend[3] = info.memory;
33   isend[4] = info.mallocs;
34   if (flag == MAT_LOCAL) {
35     ginfo->nz_used      = isend[0];
36     ginfo->nz_allocated = isend[1];
37     ginfo->nz_unneeded  = isend[2];
38     ginfo->memory       = isend[3];
39     ginfo->mallocs      = isend[4];
40     ginfo->assemblies   = matis->A->num_ass;
41   } else if (flag == MAT_GLOBAL_MAX) {
42     ierr = MPIU_Allreduce(isend,irecv,5,MPIU_REAL,MPIU_MAX,PetscObjectComm((PetscObject)A));CHKERRQ(ierr);
43 
44     ginfo->nz_used      = irecv[0];
45     ginfo->nz_allocated = irecv[1];
46     ginfo->nz_unneeded  = irecv[2];
47     ginfo->memory       = irecv[3];
48     ginfo->mallocs      = irecv[4];
49     ginfo->assemblies   = A->num_ass;
50   } else if (flag == MAT_GLOBAL_SUM) {
51     ierr = MPIU_Allreduce(isend,irecv,5,MPIU_REAL,MPIU_SUM,PetscObjectComm((PetscObject)A));CHKERRQ(ierr);
52 
53     ginfo->nz_used      = irecv[0];
54     ginfo->nz_allocated = irecv[1];
55     ginfo->nz_unneeded  = irecv[2];
56     ginfo->memory       = irecv[3];
57     ginfo->mallocs      = irecv[4];
58     ginfo->assemblies   = A->num_ass;
59   }
60   ginfo->block_size        = bs;
61   ginfo->fill_ratio_given  = 0;
62   ginfo->fill_ratio_needed = 0;
63   ginfo->factor_mallocs    = 0;
64   PetscFunctionReturn(0);
65 }
66 
67 #undef __FUNCT__
68 #define __FUNCT__ "MatTranspose_IS"
69 PetscErrorCode MatTranspose_IS(Mat A,MatReuse reuse,Mat *B)
70 {
71   Mat                    C,lC,lA;
72   ISLocalToGlobalMapping rl2g,cl2g;
73   PetscBool              notset = PETSC_FALSE,cong = PETSC_TRUE;
74   PetscErrorCode         ierr;
75 
76   PetscFunctionBegin;
77   if (reuse == MAT_REUSE_MATRIX) {
78     PetscBool rcong,ccong;
79 
80     ierr = PetscLayoutCompare((*B)->cmap,A->rmap,&rcong);CHKERRQ(ierr);
81     ierr = PetscLayoutCompare((*B)->rmap,A->cmap,&ccong);CHKERRQ(ierr);
82     cong = (PetscBool)(rcong || ccong);
83   }
84   if (reuse == MAT_INITIAL_MATRIX || *B == A || !cong) {
85     ierr = MatCreate(PetscObjectComm((PetscObject)A),&C);CHKERRQ(ierr);
86   } else {
87     ierr = PetscObjectTypeCompare((PetscObject)*B,MATIS,&notset);CHKERRQ(ierr);
88     C = *B;
89   }
90 
91   if (!notset) {
92     ierr = MatSetSizes(C,A->cmap->n,A->rmap->n,A->cmap->N,A->rmap->N);CHKERRQ(ierr);
93     ierr = MatSetBlockSizes(C,PetscAbs(A->cmap->bs),PetscAbs(A->rmap->bs));CHKERRQ(ierr);
94     ierr = MatSetType(C,MATIS);CHKERRQ(ierr);
95     ierr = MatGetLocalToGlobalMapping(A,&rl2g,&cl2g);CHKERRQ(ierr);
96     ierr = MatSetLocalToGlobalMapping(C,cl2g,rl2g);CHKERRQ(ierr);
97   }
98 
99   /* perform local transposition */
100   ierr = MatISGetLocalMat(A,&lA);CHKERRQ(ierr);
101   ierr = MatTranspose(lA,MAT_INITIAL_MATRIX,&lC);CHKERRQ(ierr);
102   ierr = MatISSetLocalMat(C,lC);CHKERRQ(ierr);
103   ierr = MatDestroy(&lC);CHKERRQ(ierr);
104 
105   ierr = MatAssemblyBegin(C,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
106   ierr = MatAssemblyEnd(C,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
107 
108   if (reuse == MAT_INITIAL_MATRIX || *B != A) {
109     if (!cong) {
110       ierr = MatDestroy(B);CHKERRQ(ierr);
111     }
112     *B = C;
113   } else {
114     ierr = MatHeaderMerge(A,&C);CHKERRQ(ierr);
115   }
116   PetscFunctionReturn(0);
117 }
118 
119 #undef __FUNCT__
120 #define __FUNCT__ "MatDiagonalSet_IS"
121 PetscErrorCode  MatDiagonalSet_IS(Mat A,Vec D,InsertMode insmode)
122 {
123   Mat_IS         *is = (Mat_IS*)A->data;
124   PetscErrorCode ierr;
125 
126   PetscFunctionBegin;
127   if (!D) { /* this code branch is used by MatShift_IS */
128     ierr = VecSet(is->y,1.);CHKERRQ(ierr);
129   } else {
130     ierr = VecScatterBegin(is->rctx,D,is->y,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
131     ierr = VecScatterEnd(is->rctx,D,is->y,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
132   }
133   ierr = VecPointwiseDivide(is->y,is->y,is->counter);CHKERRQ(ierr);
134   ierr = MatDiagonalSet(is->A,is->y,insmode);CHKERRQ(ierr);
135   PetscFunctionReturn(0);
136 }
137 
138 #undef __FUNCT__
139 #define __FUNCT__ "MatShift_IS"
140 PetscErrorCode  MatShift_IS(Mat A,PetscScalar a)
141 {
142   PetscErrorCode ierr;
143 
144   PetscFunctionBegin;
145   ierr = MatDiagonalSet_IS(A,NULL,ADD_VALUES);CHKERRQ(ierr);
146   PetscFunctionReturn(0);
147 }
148 
149 #undef __FUNCT__
150 #define __FUNCT__ "MatSetValuesLocal_SubMat_IS"
151 static PetscErrorCode MatSetValuesLocal_SubMat_IS(Mat A,PetscInt m,const PetscInt *rows, PetscInt n,const PetscInt *cols,const PetscScalar *values,InsertMode addv)
152 {
153   PetscErrorCode ierr;
154   Mat_IS         *is = (Mat_IS*)A->data;
155   PetscInt       rows_l[MATIS_MAX_ENTRIES_INSERTION],cols_l[MATIS_MAX_ENTRIES_INSERTION];
156 
157   PetscFunctionBegin;
158 #if defined(PETSC_USE_DEBUG)
159   if (m > MATIS_MAX_ENTRIES_INSERTION || n > MATIS_MAX_ENTRIES_INSERTION) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_SUP,"Number of row/column indices must be <= %D: they are %D %D",MATIS_MAX_ENTRIES_INSERTION,m,n);
160 #endif
161   ierr = ISLocalToGlobalMappingApply(A->rmap->mapping,m,rows,rows_l);CHKERRQ(ierr);
162   ierr = ISLocalToGlobalMappingApply(A->cmap->mapping,n,cols,cols_l);CHKERRQ(ierr);
163   ierr = MatSetValues(is->A,m,rows_l,n,cols_l,values,addv);CHKERRQ(ierr);
164   PetscFunctionReturn(0);
165 }
166 
167 #undef __FUNCT__
168 #define __FUNCT__ "MatSetValuesBlockedLocal_SubMat_IS"
169 static PetscErrorCode MatSetValuesBlockedLocal_SubMat_IS(Mat A,PetscInt m,const PetscInt *rows, PetscInt n,const PetscInt *cols,const PetscScalar *values,InsertMode addv)
170 {
171   PetscErrorCode ierr;
172   Mat_IS         *is = (Mat_IS*)A->data;
173   PetscInt       rows_l[MATIS_MAX_ENTRIES_INSERTION],cols_l[MATIS_MAX_ENTRIES_INSERTION];
174 
175   PetscFunctionBegin;
176 #if defined(PETSC_USE_DEBUG)
177   if (m > MATIS_MAX_ENTRIES_INSERTION || n > MATIS_MAX_ENTRIES_INSERTION) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_SUP,"Number of row/column block indices must be <= %D: they are %D %D",MATIS_MAX_ENTRIES_INSERTION,m,n);
178 #endif
179   ierr = ISLocalToGlobalMappingApplyBlock(A->rmap->mapping,m,rows,rows_l);CHKERRQ(ierr);
180   ierr = ISLocalToGlobalMappingApplyBlock(A->cmap->mapping,n,cols,cols_l);CHKERRQ(ierr);
181   ierr = MatSetValuesBlocked(is->A,m,rows_l,n,cols_l,values,addv);CHKERRQ(ierr);
182   PetscFunctionReturn(0);
183 }
184 
185 #undef __FUNCT__
186 #define __FUNCT__ "PetscLayoutMapLocal_Private"
187 static PetscErrorCode PetscLayoutMapLocal_Private(PetscLayout map,PetscInt N,const PetscInt idxs[], PetscInt *on,PetscInt **oidxs,PetscInt **ogidxs)
188 {
189   PetscInt      *owners = map->range;
190   PetscInt       n      = map->n;
191   PetscSF        sf;
192   PetscInt      *lidxs,*work = NULL;
193   PetscSFNode   *ridxs;
194   PetscMPIInt    rank;
195   PetscInt       r, p = 0, len = 0;
196   PetscErrorCode ierr;
197 
198   PetscFunctionBegin;
199   /* Create SF where leaves are input idxs and roots are owned idxs (code adapted from MatZeroRowsMapLocal_Private) */
200   ierr = MPI_Comm_rank(map->comm,&rank);CHKERRQ(ierr);
201   ierr = PetscMalloc1(n,&lidxs);CHKERRQ(ierr);
202   for (r = 0; r < n; ++r) lidxs[r] = -1;
203   ierr = PetscMalloc1(N,&ridxs);CHKERRQ(ierr);
204   for (r = 0; r < N; ++r) {
205     const PetscInt idx = idxs[r];
206     if (idx < 0 || map->N <= idx) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Index %D out of range [0,%D)",idx,map->N);
207     if (idx < owners[p] || owners[p+1] <= idx) { /* short-circuit the search if the last p owns this idx too */
208       ierr = PetscLayoutFindOwner(map,idx,&p);CHKERRQ(ierr);
209     }
210     ridxs[r].rank = p;
211     ridxs[r].index = idxs[r] - owners[p];
212   }
213   ierr = PetscSFCreate(map->comm,&sf);CHKERRQ(ierr);
214   ierr = PetscSFSetGraph(sf,n,N,NULL,PETSC_OWN_POINTER,ridxs,PETSC_OWN_POINTER);CHKERRQ(ierr);
215   ierr = PetscSFReduceBegin(sf,MPIU_INT,(PetscInt*)idxs,lidxs,MPI_LOR);CHKERRQ(ierr);
216   ierr = PetscSFReduceEnd(sf,MPIU_INT,(PetscInt*)idxs,lidxs,MPI_LOR);CHKERRQ(ierr);
217   if (ogidxs) { /* communicate global idxs */
218     PetscInt cum = 0,start,*work2;
219 
220     ierr = PetscMalloc1(n,&work);CHKERRQ(ierr);
221     ierr = PetscCalloc1(N,&work2);CHKERRQ(ierr);
222     for (r = 0; r < N; ++r) if (idxs[r] >=0) cum++;
223     ierr = MPI_Scan(&cum,&start,1,MPIU_INT,MPI_SUM,map->comm);CHKERRQ(ierr);
224     start -= cum;
225     cum = 0;
226     for (r = 0; r < N; ++r) if (idxs[r] >=0) work2[r] = start+cum++;
227     ierr = PetscSFReduceBegin(sf,MPIU_INT,work2,work,MPIU_REPLACE);CHKERRQ(ierr);
228     ierr = PetscSFReduceEnd(sf,MPIU_INT,work2,work,MPIU_REPLACE);CHKERRQ(ierr);
229     ierr = PetscFree(work2);CHKERRQ(ierr);
230   }
231   ierr = PetscSFDestroy(&sf);CHKERRQ(ierr);
232   /* Compress and put in indices */
233   for (r = 0; r < n; ++r)
234     if (lidxs[r] >= 0) {
235       if (work) work[len] = work[r];
236       lidxs[len++] = r;
237     }
238   if (on) *on = len;
239   if (oidxs) *oidxs = lidxs;
240   if (ogidxs) *ogidxs = work;
241   PetscFunctionReturn(0);
242 }
243 
244 #undef __FUNCT__
245 #define __FUNCT__ "MatGetSubMatrix_IS"
246 static PetscErrorCode MatGetSubMatrix_IS(Mat mat,IS irow,IS icol,MatReuse scall,Mat *newmat)
247 {
248   Mat               locmat,newlocmat;
249   Mat_IS            *newmatis;
250 #if defined(PETSC_USE_DEBUG)
251   Vec               rtest,ltest;
252   const PetscScalar *array;
253 #endif
254   const PetscInt    *idxs;
255   PetscInt          i,m,n;
256   PetscErrorCode    ierr;
257 
258   PetscFunctionBegin;
259   if (scall == MAT_REUSE_MATRIX) {
260     PetscBool ismatis;
261 
262     ierr = PetscObjectTypeCompare((PetscObject)*newmat,MATIS,&ismatis);CHKERRQ(ierr);
263     if (!ismatis) SETERRQ(PetscObjectComm((PetscObject)*newmat),PETSC_ERR_ARG_WRONG,"Cannot reuse matrix! Not of MATIS type");
264     newmatis = (Mat_IS*)(*newmat)->data;
265     if (!newmatis->getsub_ris) SETERRQ(PetscObjectComm((PetscObject)*newmat),PETSC_ERR_ARG_WRONG,"Cannot reuse matrix! Misses local row IS");
266     if (!newmatis->getsub_cis) SETERRQ(PetscObjectComm((PetscObject)*newmat),PETSC_ERR_ARG_WRONG,"Cannot reuse matrix! Misses local col IS");
267   }
268   /* irow and icol may not have duplicate entries */
269 #if defined(PETSC_USE_DEBUG)
270   ierr = MatCreateVecs(mat,&ltest,&rtest);CHKERRQ(ierr);
271   ierr = ISGetLocalSize(irow,&n);CHKERRQ(ierr);
272   ierr = ISGetIndices(irow,&idxs);CHKERRQ(ierr);
273   for (i=0;i<n;i++) {
274     ierr = VecSetValue(rtest,idxs[i],1.0,ADD_VALUES);CHKERRQ(ierr);
275   }
276   ierr = VecAssemblyBegin(rtest);CHKERRQ(ierr);
277   ierr = VecAssemblyEnd(rtest);CHKERRQ(ierr);
278   ierr = VecGetLocalSize(rtest,&n);CHKERRQ(ierr);
279   ierr = VecGetOwnershipRange(rtest,&m,NULL);CHKERRQ(ierr);
280   ierr = VecGetArrayRead(rtest,&array);CHKERRQ(ierr);
281   for (i=0;i<n;i++) if (array[i] != 0. && array[i] != 1.) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_SUP,"Index %D counted %D times! Irow may not have duplicate entries",i+m,(PetscInt)PetscRealPart(array[i]));
282   ierr = VecRestoreArrayRead(rtest,&array);CHKERRQ(ierr);
283   ierr = ISRestoreIndices(irow,&idxs);CHKERRQ(ierr);
284   ierr = ISGetLocalSize(icol,&n);CHKERRQ(ierr);
285   ierr = ISGetIndices(icol,&idxs);CHKERRQ(ierr);
286   for (i=0;i<n;i++) {
287     ierr = VecSetValue(ltest,idxs[i],1.0,ADD_VALUES);CHKERRQ(ierr);
288   }
289   ierr = VecAssemblyBegin(ltest);CHKERRQ(ierr);
290   ierr = VecAssemblyEnd(ltest);CHKERRQ(ierr);
291   ierr = VecGetLocalSize(ltest,&n);CHKERRQ(ierr);
292   ierr = VecGetOwnershipRange(ltest,&m,NULL);CHKERRQ(ierr);
293   ierr = VecGetArrayRead(ltest,&array);CHKERRQ(ierr);
294   for (i=0;i<n;i++) if (array[i] != 0. && array[i] != 1.) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_SUP,"Index %D counted %D times! Icol may not have duplicate entries",i+m,(PetscInt)PetscRealPart(array[i]));
295   ierr = VecRestoreArrayRead(ltest,&array);CHKERRQ(ierr);
296   ierr = ISRestoreIndices(icol,&idxs);CHKERRQ(ierr);
297   ierr = VecDestroy(&rtest);CHKERRQ(ierr);
298   ierr = VecDestroy(&ltest);CHKERRQ(ierr);
299 #endif
300   if (scall == MAT_INITIAL_MATRIX) {
301     Mat_IS                 *matis = (Mat_IS*)mat->data;
302     ISLocalToGlobalMapping rl2g;
303     IS                     is;
304     PetscInt               *lidxs,*lgidxs,*newgidxs;
305     PetscInt               ll,newloc;
306     MPI_Comm               comm;
307 
308     ierr = PetscObjectGetComm((PetscObject)mat,&comm);CHKERRQ(ierr);
309     ierr = ISGetLocalSize(irow,&m);CHKERRQ(ierr);
310     ierr = ISGetLocalSize(icol,&n);CHKERRQ(ierr);
311     ierr = MatCreate(comm,newmat);CHKERRQ(ierr);
312     ierr = MatSetType(*newmat,MATIS);CHKERRQ(ierr);
313     ierr = MatSetSizes(*newmat,m,n,PETSC_DECIDE,PETSC_DECIDE);CHKERRQ(ierr);
314     /* communicate irow to their owners in the layout */
315     ierr = ISGetIndices(irow,&idxs);CHKERRQ(ierr);
316     ierr = PetscLayoutMapLocal_Private(mat->rmap,m,idxs,&ll,&lidxs,&lgidxs);CHKERRQ(ierr);
317     ierr = ISRestoreIndices(irow,&idxs);CHKERRQ(ierr);
318     if (!matis->sf) { /* setup SF if not yet created and allocate rootdata and leafdata */
319       ierr = MatISComputeSF_Private(mat);CHKERRQ(ierr);
320     }
321     ierr = PetscMemzero(matis->sf_rootdata,matis->sf_nroots*sizeof(PetscInt));CHKERRQ(ierr);
322     for (i=0;i<ll;i++) matis->sf_rootdata[lidxs[i]] = lgidxs[i]+1;
323     ierr = PetscFree(lidxs);CHKERRQ(ierr);
324     ierr = PetscFree(lgidxs);CHKERRQ(ierr);
325     ierr = PetscSFBcastBegin(matis->sf,MPIU_INT,matis->sf_rootdata,matis->sf_leafdata);CHKERRQ(ierr);
326     ierr = PetscSFBcastEnd(matis->sf,MPIU_INT,matis->sf_rootdata,matis->sf_leafdata);CHKERRQ(ierr);
327     for (i=0,newloc=0;i<matis->sf_nleaves;i++) if (matis->sf_leafdata[i]) newloc++;
328     ierr = PetscMalloc1(newloc,&newgidxs);CHKERRQ(ierr);
329     ierr = PetscMalloc1(newloc,&lidxs);CHKERRQ(ierr);
330     for (i=0,newloc=0;i<matis->sf_nleaves;i++)
331       if (matis->sf_leafdata[i]) {
332         lidxs[newloc] = i;
333         newgidxs[newloc++] = matis->sf_leafdata[i]-1;
334       }
335     ierr = ISCreateGeneral(comm,newloc,newgidxs,PETSC_OWN_POINTER,&is);CHKERRQ(ierr);
336     ierr = ISLocalToGlobalMappingCreateIS(is,&rl2g);CHKERRQ(ierr);
337     ierr = ISDestroy(&is);CHKERRQ(ierr);
338     /* local is to extract local submatrix */
339     newmatis = (Mat_IS*)(*newmat)->data;
340     ierr = ISCreateGeneral(comm,newloc,lidxs,PETSC_OWN_POINTER,&newmatis->getsub_ris);CHKERRQ(ierr);
341     if (mat->congruentlayouts == -1) { /* first time we compare rows and cols layouts */
342       PetscBool cong;
343       ierr = PetscLayoutCompare(mat->rmap,mat->cmap,&cong);CHKERRQ(ierr);
344       if (cong) mat->congruentlayouts = 1;
345       else      mat->congruentlayouts = 0;
346     }
347     if (mat->congruentlayouts && irow == icol) {
348       ierr = MatSetLocalToGlobalMapping(*newmat,rl2g,rl2g);CHKERRQ(ierr);
349       ierr = PetscObjectReference((PetscObject)newmatis->getsub_ris);CHKERRQ(ierr);
350       newmatis->getsub_cis = newmatis->getsub_ris;
351     } else {
352       ISLocalToGlobalMapping cl2g;
353 
354       /* communicate icol to their owners in the layout */
355       ierr = ISGetIndices(icol,&idxs);CHKERRQ(ierr);
356       ierr = PetscLayoutMapLocal_Private(mat->cmap,n,idxs,&ll,&lidxs,&lgidxs);CHKERRQ(ierr);
357       ierr = ISRestoreIndices(icol,&idxs);CHKERRQ(ierr);
358       ierr = PetscMemzero(matis->csf_rootdata,matis->csf_nroots*sizeof(PetscInt));CHKERRQ(ierr);
359       for (i=0;i<ll;i++) matis->csf_rootdata[lidxs[i]] = lgidxs[i]+1;
360       ierr = PetscFree(lidxs);CHKERRQ(ierr);
361       ierr = PetscFree(lgidxs);CHKERRQ(ierr);
362       ierr = PetscSFBcastBegin(matis->csf,MPIU_INT,matis->csf_rootdata,matis->csf_leafdata);CHKERRQ(ierr);
363       ierr = PetscSFBcastEnd(matis->csf,MPIU_INT,matis->csf_rootdata,matis->csf_leafdata);CHKERRQ(ierr);
364       for (i=0,newloc=0;i<matis->csf_nleaves;i++) if (matis->csf_leafdata[i]) newloc++;
365       ierr = PetscMalloc1(newloc,&newgidxs);CHKERRQ(ierr);
366       ierr = PetscMalloc1(newloc,&lidxs);CHKERRQ(ierr);
367       for (i=0,newloc=0;i<matis->csf_nleaves;i++)
368         if (matis->csf_leafdata[i]) {
369           lidxs[newloc] = i;
370           newgidxs[newloc++] = matis->csf_leafdata[i]-1;
371         }
372       ierr = ISCreateGeneral(comm,newloc,newgidxs,PETSC_OWN_POINTER,&is);CHKERRQ(ierr);
373       ierr = ISLocalToGlobalMappingCreateIS(is,&cl2g);CHKERRQ(ierr);
374       ierr = ISDestroy(&is);CHKERRQ(ierr);
375       /* local is to extract local submatrix */
376       ierr = ISCreateGeneral(comm,newloc,lidxs,PETSC_OWN_POINTER,&newmatis->getsub_cis);CHKERRQ(ierr);
377       ierr = MatSetLocalToGlobalMapping(*newmat,rl2g,cl2g);CHKERRQ(ierr);
378       ierr = ISLocalToGlobalMappingDestroy(&cl2g);CHKERRQ(ierr);
379     }
380     ierr = ISLocalToGlobalMappingDestroy(&rl2g);CHKERRQ(ierr);
381   } else {
382     ierr = MatISGetLocalMat(*newmat,&newlocmat);CHKERRQ(ierr);
383   }
384   ierr = MatISGetLocalMat(mat,&locmat);CHKERRQ(ierr);
385   newmatis = (Mat_IS*)(*newmat)->data;
386   ierr = MatGetSubMatrix(locmat,newmatis->getsub_ris,newmatis->getsub_cis,scall,&newlocmat);CHKERRQ(ierr);
387   if (scall == MAT_INITIAL_MATRIX) {
388     ierr = MatISSetLocalMat(*newmat,newlocmat);CHKERRQ(ierr);
389     ierr = MatDestroy(&newlocmat);CHKERRQ(ierr);
390   }
391   ierr = MatAssemblyBegin(*newmat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
392   ierr = MatAssemblyEnd(*newmat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
393   PetscFunctionReturn(0);
394 }
395 
396 #undef __FUNCT__
397 #define __FUNCT__ "MatCopy_IS"
398 static PetscErrorCode MatCopy_IS(Mat A,Mat B,MatStructure str)
399 {
400   Mat_IS         *a = (Mat_IS*)A->data,*b;
401   PetscBool      ismatis;
402   PetscErrorCode ierr;
403 
404   PetscFunctionBegin;
405   ierr = PetscObjectTypeCompare((PetscObject)B,MATIS,&ismatis);CHKERRQ(ierr);
406   if (!ismatis) SETERRQ(PetscObjectComm((PetscObject)B),PETSC_ERR_SUP,"Need to be implemented");
407   b = (Mat_IS*)B->data;
408   ierr = MatCopy(a->A,b->A,str);CHKERRQ(ierr);
409   PetscFunctionReturn(0);
410 }
411 
412 #undef __FUNCT__
413 #define __FUNCT__ "MatMissingDiagonal_IS"
414 static PetscErrorCode MatMissingDiagonal_IS(Mat A,PetscBool  *missing,PetscInt *d)
415 {
416   Vec               v;
417   const PetscScalar *array;
418   PetscInt          i,n;
419   PetscErrorCode    ierr;
420 
421   PetscFunctionBegin;
422   *missing = PETSC_FALSE;
423   ierr = MatCreateVecs(A,NULL,&v);CHKERRQ(ierr);
424   ierr = MatGetDiagonal(A,v);CHKERRQ(ierr);
425   ierr = VecGetLocalSize(v,&n);CHKERRQ(ierr);
426   ierr = VecGetArrayRead(v,&array);CHKERRQ(ierr);
427   for (i=0;i<n;i++) if (array[i] == 0.) break;
428   ierr = VecRestoreArrayRead(v,&array);CHKERRQ(ierr);
429   ierr = VecDestroy(&v);CHKERRQ(ierr);
430   if (i != n) *missing = PETSC_TRUE;
431   if (d) {
432     *d = -1;
433     if (*missing) {
434       PetscInt rstart;
435       ierr = MatGetOwnershipRange(A,&rstart,NULL);CHKERRQ(ierr);
436       *d = i+rstart;
437     }
438   }
439   PetscFunctionReturn(0);
440 }
441 
442 #undef __FUNCT__
443 #define __FUNCT__ "MatISComputeSF_Private"
444 static PetscErrorCode MatISComputeSF_Private(Mat B)
445 {
446   Mat_IS         *matis = (Mat_IS*)(B->data);
447   const PetscInt *gidxs;
448   PetscErrorCode ierr;
449 
450   PetscFunctionBegin;
451   ierr = MatGetSize(matis->A,&matis->sf_nleaves,NULL);CHKERRQ(ierr);
452   ierr = MatGetLocalSize(B,&matis->sf_nroots,NULL);CHKERRQ(ierr);
453   ierr = PetscSFCreate(PetscObjectComm((PetscObject)B),&matis->sf);CHKERRQ(ierr);
454   ierr = ISLocalToGlobalMappingGetIndices(B->rmap->mapping,&gidxs);CHKERRQ(ierr);
455   ierr = PetscSFSetGraphLayout(matis->sf,B->rmap,matis->sf_nleaves,NULL,PETSC_OWN_POINTER,gidxs);CHKERRQ(ierr);
456   ierr = ISLocalToGlobalMappingRestoreIndices(B->rmap->mapping,&gidxs);CHKERRQ(ierr);
457   ierr = PetscMalloc2(matis->sf_nroots,&matis->sf_rootdata,matis->sf_nleaves,&matis->sf_leafdata);CHKERRQ(ierr);
458   if (B->rmap->mapping != B->cmap->mapping) { /* setup SF for columns */
459     ierr = MatGetSize(matis->A,NULL,&matis->csf_nleaves);CHKERRQ(ierr);
460     ierr = MatGetLocalSize(B,NULL,&matis->csf_nroots);CHKERRQ(ierr);
461     ierr = PetscSFCreate(PetscObjectComm((PetscObject)B),&matis->csf);CHKERRQ(ierr);
462     ierr = ISLocalToGlobalMappingGetIndices(B->cmap->mapping,&gidxs);CHKERRQ(ierr);
463     ierr = PetscSFSetGraphLayout(matis->csf,B->cmap,matis->csf_nleaves,NULL,PETSC_OWN_POINTER,gidxs);CHKERRQ(ierr);
464     ierr = ISLocalToGlobalMappingRestoreIndices(B->cmap->mapping,&gidxs);CHKERRQ(ierr);
465     ierr = PetscMalloc2(matis->csf_nroots,&matis->csf_rootdata,matis->csf_nleaves,&matis->csf_leafdata);CHKERRQ(ierr);
466   } else {
467     matis->csf = matis->sf;
468     matis->csf_nleaves = matis->sf_nleaves;
469     matis->csf_nroots = matis->sf_nroots;
470     matis->csf_leafdata = matis->sf_leafdata;
471     matis->csf_rootdata = matis->sf_rootdata;
472   }
473   PetscFunctionReturn(0);
474 }
475 
476 #undef __FUNCT__
477 #define __FUNCT__ "MatISSetPreallocation"
478 /*@
479    MatISSetPreallocation - Preallocates memory for a MATIS parallel matrix.
480 
481    Collective on MPI_Comm
482 
483    Input Parameters:
484 +  B - the matrix
485 .  d_nz  - number of nonzeros per row in DIAGONAL portion of local submatrix
486            (same value is used for all local rows)
487 .  d_nnz - array containing the number of nonzeros in the various rows of the
488            DIAGONAL portion of the local submatrix (possibly different for each row)
489            or NULL, if d_nz is used to specify the nonzero structure.
490            The size of this array is equal to the number of local rows, i.e 'm'.
491            For matrices that will be factored, you must leave room for (and set)
492            the diagonal entry even if it is zero.
493 .  o_nz  - number of nonzeros per row in the OFF-DIAGONAL portion of local
494            submatrix (same value is used for all local rows).
495 -  o_nnz - array containing the number of nonzeros in the various rows of the
496            OFF-DIAGONAL portion of the local submatrix (possibly different for
497            each row) or NULL, if o_nz is used to specify the nonzero
498            structure. The size of this array is equal to the number
499            of local rows, i.e 'm'.
500 
501    If the *_nnz parameter is given then the *_nz parameter is ignored
502 
503    Level: intermediate
504 
505    Notes: This function has the same interface as the MPIAIJ preallocation routine in order to simplify the transition
506           from the asssembled format to the unassembled one. It overestimates the preallocation of MATIS local
507           matrices; for exact preallocation, the user should set the preallocation directly on local matrix objects.
508 
509 .keywords: matrix
510 
511 .seealso: MatCreate(), MatCreateIS(), MatMPIAIJSetPreallocation(), MatISGetLocalMat(), MATIS
512 @*/
513 PetscErrorCode  MatISSetPreallocation(Mat B,PetscInt d_nz,const PetscInt d_nnz[],PetscInt o_nz,const PetscInt o_nnz[])
514 {
515   PetscErrorCode ierr;
516 
517   PetscFunctionBegin;
518   PetscValidHeaderSpecific(B,MAT_CLASSID,1);
519   PetscValidType(B,1);
520   ierr = PetscTryMethod(B,"MatISSetPreallocation_C",(Mat,PetscInt,const PetscInt[],PetscInt,const PetscInt[]),(B,d_nz,d_nnz,o_nz,o_nnz));CHKERRQ(ierr);
521   PetscFunctionReturn(0);
522 }
523 
524 #undef __FUNCT__
525 #define __FUNCT__ "MatISSetPreallocation_IS"
526 static PetscErrorCode  MatISSetPreallocation_IS(Mat B,PetscInt d_nz,const PetscInt d_nnz[],PetscInt o_nz,const PetscInt o_nnz[])
527 {
528   Mat_IS         *matis = (Mat_IS*)(B->data);
529   PetscInt       bs,i,nlocalcols;
530   PetscErrorCode ierr;
531 
532   PetscFunctionBegin;
533   if (!matis->A) SETERRQ(PetscObjectComm((PetscObject)B),PETSC_ERR_SUP,"You should first call MatSetLocalToGlobalMapping");
534   if (!matis->sf) { /* setup SF if not yet created and allocate rootdata and leafdata */
535     ierr = MatISComputeSF_Private(B);CHKERRQ(ierr);
536   }
537   if (!d_nnz) {
538     for (i=0;i<matis->sf_nroots;i++) matis->sf_rootdata[i] = d_nz;
539   } else {
540     for (i=0;i<matis->sf_nroots;i++) matis->sf_rootdata[i] = d_nnz[i];
541   }
542   if (!o_nnz) {
543     for (i=0;i<matis->sf_nroots;i++) matis->sf_rootdata[i] += o_nz;
544   } else {
545     for (i=0;i<matis->sf_nroots;i++) matis->sf_rootdata[i] += o_nnz[i];
546   }
547   ierr = PetscSFBcastBegin(matis->sf,MPIU_INT,matis->sf_rootdata,matis->sf_leafdata);CHKERRQ(ierr);
548   ierr = MatGetSize(matis->A,NULL,&nlocalcols);CHKERRQ(ierr);
549   ierr = MatGetBlockSize(matis->A,&bs);CHKERRQ(ierr);
550   ierr = PetscSFBcastEnd(matis->sf,MPIU_INT,matis->sf_rootdata,matis->sf_leafdata);CHKERRQ(ierr);
551   for (i=0;i<matis->sf_nleaves;i++) {
552     matis->sf_leafdata[i] = PetscMin(matis->sf_leafdata[i],nlocalcols);
553   }
554   ierr = MatSeqAIJSetPreallocation(matis->A,0,matis->sf_leafdata);CHKERRQ(ierr);
555   for (i=0;i<matis->sf_nleaves/bs;i++) {
556     matis->sf_leafdata[i] = matis->sf_leafdata[i*bs]/bs;
557   }
558   ierr = MatSeqBAIJSetPreallocation(matis->A,bs,0,matis->sf_leafdata);CHKERRQ(ierr);
559   for (i=0;i<matis->sf_nleaves/bs;i++) {
560     matis->sf_leafdata[i] = matis->sf_leafdata[i]-i;
561   }
562   ierr = MatSeqSBAIJSetPreallocation(matis->A,bs,0,matis->sf_leafdata);CHKERRQ(ierr);
563   PetscFunctionReturn(0);
564 }
565 
566 #undef __FUNCT__
567 #define __FUNCT__ "MatISSetMPIXAIJPreallocation_Private"
568 PETSC_EXTERN PetscErrorCode MatISSetMPIXAIJPreallocation_Private(Mat A, Mat B, PetscBool maxreduce)
569 {
570   Mat_IS          *matis = (Mat_IS*)(A->data);
571   PetscInt        *my_dnz,*my_onz,*dnz,*onz,*mat_ranges,*row_ownership;
572   const PetscInt  *global_indices_r,*global_indices_c;
573   PetscInt        i,j,bs,rows,cols;
574   PetscInt        lrows,lcols;
575   PetscInt        local_rows,local_cols;
576   PetscMPIInt     nsubdomains;
577   PetscBool       isdense,issbaij;
578   PetscErrorCode  ierr;
579 
580   PetscFunctionBegin;
581   ierr = MPI_Comm_size(PetscObjectComm((PetscObject)A),&nsubdomains);CHKERRQ(ierr);
582   ierr = MatGetSize(A,&rows,&cols);CHKERRQ(ierr);
583   ierr = MatGetBlockSize(A,&bs);CHKERRQ(ierr);
584   ierr = MatGetSize(matis->A,&local_rows,&local_cols);CHKERRQ(ierr);
585   ierr = PetscObjectTypeCompare((PetscObject)matis->A,MATSEQDENSE,&isdense);CHKERRQ(ierr);
586   ierr = PetscObjectTypeCompare((PetscObject)matis->A,MATSEQSBAIJ,&issbaij);CHKERRQ(ierr);
587   ierr = ISLocalToGlobalMappingGetIndices(A->rmap->mapping,&global_indices_r);CHKERRQ(ierr);
588   if (A->rmap->mapping != A->cmap->mapping) {
589     ierr = ISLocalToGlobalMappingGetIndices(A->cmap->mapping,&global_indices_c);CHKERRQ(ierr);
590   } else {
591     global_indices_c = global_indices_r;
592   }
593 
594   if (issbaij) {
595     ierr = MatGetRowUpperTriangular(matis->A);CHKERRQ(ierr);
596   }
597   /*
598      An SF reduce is needed to sum up properly on shared rows.
599      Note that generally preallocation is not exact, since it overestimates nonzeros
600   */
601   if (!matis->sf) { /* setup SF if not yet created and allocate rootdata and leafdata */
602     ierr = MatISComputeSF_Private(A);CHKERRQ(ierr);
603   }
604   ierr = MatGetLocalSize(A,&lrows,&lcols);CHKERRQ(ierr);
605   ierr = MatPreallocateInitialize(PetscObjectComm((PetscObject)A),lrows,lcols,dnz,onz);CHKERRQ(ierr);
606   /* All processes need to compute entire row ownership */
607   ierr = PetscMalloc1(rows,&row_ownership);CHKERRQ(ierr);
608   ierr = MatGetOwnershipRanges(A,(const PetscInt**)&mat_ranges);CHKERRQ(ierr);
609   for (i=0;i<nsubdomains;i++) {
610     for (j=mat_ranges[i];j<mat_ranges[i+1];j++) {
611       row_ownership[j] = i;
612     }
613   }
614   ierr = MatGetOwnershipRangesColumn(A,(const PetscInt**)&mat_ranges);CHKERRQ(ierr);
615 
616   /*
617      my_dnz and my_onz contains exact contribution to preallocation from each local mat
618      then, they will be summed up properly. This way, preallocation is always sufficient
619   */
620   ierr = PetscCalloc2(local_rows,&my_dnz,local_rows,&my_onz);CHKERRQ(ierr);
621   /* preallocation as a MATAIJ */
622   if (isdense) { /* special case for dense local matrices */
623     for (i=0;i<local_rows;i++) {
624       PetscInt index_row = global_indices_r[i];
625       for (j=i;j<local_rows;j++) {
626         PetscInt owner = row_ownership[index_row];
627         PetscInt index_col = global_indices_c[j];
628         if (index_col > mat_ranges[owner]-1 && index_col < mat_ranges[owner+1] ) { /* diag block */
629           my_dnz[i] += 1;
630         } else { /* offdiag block */
631           my_onz[i] += 1;
632         }
633         /* same as before, interchanging rows and cols */
634         if (i != j) {
635           owner = row_ownership[index_col];
636           if (index_row > mat_ranges[owner]-1 && index_row < mat_ranges[owner+1] ) {
637             my_dnz[j] += 1;
638           } else {
639             my_onz[j] += 1;
640           }
641         }
642       }
643     }
644   } else if (matis->A->ops->getrowij) {
645     const PetscInt *ii,*jj,*jptr;
646     PetscBool      done;
647     ierr = MatGetRowIJ(matis->A,0,PETSC_FALSE,PETSC_FALSE,&local_rows,&ii,&jj,&done);CHKERRQ(ierr);
648     if (!done) SETERRQ1(PetscObjectComm((PetscObject)(matis->A)),PETSC_ERR_PLIB,"Error in MatGetRowIJ called from %s\n",__FUNCT__);
649     jptr = jj;
650     for (i=0;i<local_rows;i++) {
651       PetscInt index_row = global_indices_r[i];
652       for (j=0;j<ii[i+1]-ii[i];j++,jptr++) {
653         PetscInt owner = row_ownership[index_row];
654         PetscInt index_col = global_indices_c[*jptr];
655         if (index_col > mat_ranges[owner]-1 && index_col < mat_ranges[owner+1] ) { /* diag block */
656           my_dnz[i] += 1;
657         } else { /* offdiag block */
658           my_onz[i] += 1;
659         }
660         /* same as before, interchanging rows and cols */
661         if (issbaij && index_col != index_row) {
662           owner = row_ownership[index_col];
663           if (index_row > mat_ranges[owner]-1 && index_row < mat_ranges[owner+1] ) {
664             my_dnz[*jptr] += 1;
665           } else {
666             my_onz[*jptr] += 1;
667           }
668         }
669       }
670     }
671     ierr = MatRestoreRowIJ(matis->A,0,PETSC_FALSE,PETSC_FALSE,&local_rows,&ii,&jj,&done);CHKERRQ(ierr);
672     if (!done) SETERRQ1(PetscObjectComm((PetscObject)(matis->A)),PETSC_ERR_PLIB,"Error in MatRestoreRowIJ called from %s\n",__FUNCT__);
673   } else { /* loop over rows and use MatGetRow */
674     for (i=0;i<local_rows;i++) {
675       const PetscInt *cols;
676       PetscInt       ncols,index_row = global_indices_r[i];
677       ierr = MatGetRow(matis->A,i,&ncols,&cols,NULL);CHKERRQ(ierr);
678       for (j=0;j<ncols;j++) {
679         PetscInt owner = row_ownership[index_row];
680         PetscInt index_col = global_indices_c[cols[j]];
681         if (index_col > mat_ranges[owner]-1 && index_col < mat_ranges[owner+1] ) { /* diag block */
682           my_dnz[i] += 1;
683         } else { /* offdiag block */
684           my_onz[i] += 1;
685         }
686         /* same as before, interchanging rows and cols */
687         if (issbaij && index_col != index_row) {
688           owner = row_ownership[index_col];
689           if (index_row > mat_ranges[owner]-1 && index_row < mat_ranges[owner+1] ) {
690             my_dnz[cols[j]] += 1;
691           } else {
692             my_onz[cols[j]] += 1;
693           }
694         }
695       }
696       ierr = MatRestoreRow(matis->A,i,&ncols,&cols,NULL);CHKERRQ(ierr);
697     }
698   }
699   ierr = ISLocalToGlobalMappingRestoreIndices(A->rmap->mapping,&global_indices_r);CHKERRQ(ierr);
700   if (global_indices_c != global_indices_r) {
701     ierr = ISLocalToGlobalMappingRestoreIndices(A->cmap->mapping,&global_indices_c);CHKERRQ(ierr);
702   }
703   ierr = PetscFree(row_ownership);CHKERRQ(ierr);
704 
705   /* Reduce my_dnz and my_onz */
706   if (maxreduce) {
707     ierr = PetscSFReduceBegin(matis->sf,MPIU_INT,my_dnz,dnz,MPI_MAX);CHKERRQ(ierr);
708     ierr = PetscSFReduceBegin(matis->sf,MPIU_INT,my_onz,onz,MPI_MAX);CHKERRQ(ierr);
709     ierr = PetscSFReduceEnd(matis->sf,MPIU_INT,my_dnz,dnz,MPI_MAX);CHKERRQ(ierr);
710     ierr = PetscSFReduceEnd(matis->sf,MPIU_INT,my_onz,onz,MPI_MAX);CHKERRQ(ierr);
711   } else {
712     ierr = PetscSFReduceBegin(matis->sf,MPIU_INT,my_dnz,dnz,MPI_SUM);CHKERRQ(ierr);
713     ierr = PetscSFReduceBegin(matis->sf,MPIU_INT,my_onz,onz,MPI_SUM);CHKERRQ(ierr);
714     ierr = PetscSFReduceEnd(matis->sf,MPIU_INT,my_dnz,dnz,MPI_SUM);CHKERRQ(ierr);
715     ierr = PetscSFReduceEnd(matis->sf,MPIU_INT,my_onz,onz,MPI_SUM);CHKERRQ(ierr);
716   }
717   ierr = PetscFree2(my_dnz,my_onz);CHKERRQ(ierr);
718 
719   /* Resize preallocation if overestimated */
720   for (i=0;i<lrows;i++) {
721     dnz[i] = PetscMin(dnz[i],lcols);
722     onz[i] = PetscMin(onz[i],cols-lcols);
723   }
724   /* set preallocation */
725   ierr = MatMPIAIJSetPreallocation(B,0,dnz,0,onz);CHKERRQ(ierr);
726   for (i=0;i<lrows/bs;i++) {
727     dnz[i] = dnz[i*bs]/bs;
728     onz[i] = onz[i*bs]/bs;
729   }
730   ierr = MatMPIBAIJSetPreallocation(B,bs,0,dnz,0,onz);CHKERRQ(ierr);
731   ierr = MatMPISBAIJSetPreallocation(B,bs,0,dnz,0,onz);CHKERRQ(ierr);
732   ierr = MatPreallocateFinalize(dnz,onz);CHKERRQ(ierr);
733   if (issbaij) {
734     ierr = MatRestoreRowUpperTriangular(matis->A);CHKERRQ(ierr);
735   }
736   PetscFunctionReturn(0);
737 }
738 
739 #undef __FUNCT__
740 #define __FUNCT__ "MatISGetMPIXAIJ_IS"
741 static PetscErrorCode MatISGetMPIXAIJ_IS(Mat mat, MatReuse reuse, Mat *M)
742 {
743   Mat_IS         *matis = (Mat_IS*)(mat->data);
744   Mat            local_mat;
745   /* info on mat */
746   PetscInt       bs,rows,cols,lrows,lcols;
747   PetscInt       local_rows,local_cols;
748   PetscBool      isdense,issbaij,isseqaij;
749   PetscMPIInt    nsubdomains;
750   /* values insertion */
751   PetscScalar    *array;
752   /* work */
753   PetscErrorCode ierr;
754 
755   PetscFunctionBegin;
756   /* get info from mat */
757   ierr = MPI_Comm_size(PetscObjectComm((PetscObject)mat),&nsubdomains);CHKERRQ(ierr);
758   if (nsubdomains == 1) {
759     if (reuse == MAT_INITIAL_MATRIX) {
760       ierr = MatDuplicate(matis->A,MAT_COPY_VALUES,&(*M));CHKERRQ(ierr);
761     } else {
762       ierr = MatCopy(matis->A,*M,SAME_NONZERO_PATTERN);CHKERRQ(ierr);
763     }
764     PetscFunctionReturn(0);
765   }
766   ierr = MatGetSize(mat,&rows,&cols);CHKERRQ(ierr);
767   ierr = MatGetBlockSize(mat,&bs);CHKERRQ(ierr);
768   ierr = MatGetLocalSize(mat,&lrows,&lcols);CHKERRQ(ierr);
769   ierr = MatGetSize(matis->A,&local_rows,&local_cols);CHKERRQ(ierr);
770   ierr = PetscObjectTypeCompare((PetscObject)matis->A,MATSEQDENSE,&isdense);CHKERRQ(ierr);
771   ierr = PetscObjectTypeCompare((PetscObject)matis->A,MATSEQAIJ,&isseqaij);CHKERRQ(ierr);
772   ierr = PetscObjectTypeCompare((PetscObject)matis->A,MATSEQSBAIJ,&issbaij);CHKERRQ(ierr);
773 
774   if (reuse == MAT_INITIAL_MATRIX) {
775     MatType     new_mat_type;
776     PetscBool   issbaij_red;
777 
778     /* determining new matrix type */
779     ierr = MPIU_Allreduce(&issbaij,&issbaij_red,1,MPIU_BOOL,MPI_LAND,PetscObjectComm((PetscObject)mat));CHKERRQ(ierr);
780     if (issbaij_red) {
781       new_mat_type = MATSBAIJ;
782     } else {
783       if (bs>1) {
784         new_mat_type = MATBAIJ;
785       } else {
786         new_mat_type = MATAIJ;
787       }
788     }
789 
790     ierr = MatCreate(PetscObjectComm((PetscObject)mat),M);CHKERRQ(ierr);
791     ierr = MatSetSizes(*M,lrows,lcols,rows,cols);CHKERRQ(ierr);
792     ierr = MatSetBlockSize(*M,bs);CHKERRQ(ierr);
793     ierr = MatSetType(*M,new_mat_type);CHKERRQ(ierr);
794     ierr = MatISSetMPIXAIJPreallocation_Private(mat,*M,PETSC_FALSE);CHKERRQ(ierr);
795   } else {
796     PetscInt mbs,mrows,mcols,mlrows,mlcols;
797     /* some checks */
798     ierr = MatGetBlockSize(*M,&mbs);CHKERRQ(ierr);
799     ierr = MatGetSize(*M,&mrows,&mcols);CHKERRQ(ierr);
800     ierr = MatGetLocalSize(*M,&mlrows,&mlcols);CHKERRQ(ierr);
801     if (mrows != rows) SETERRQ2(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Cannot reuse matrix. Wrong number of rows (%d != %d)",rows,mrows);
802     if (mcols != cols) SETERRQ2(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Cannot reuse matrix. Wrong number of cols (%d != %d)",cols,mcols);
803     if (mlrows != lrows) SETERRQ2(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Cannot reuse matrix. Wrong number of local rows (%d != %d)",lrows,mlrows);
804     if (mlcols != lcols) SETERRQ2(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Cannot reuse matrix. Wrong number of local cols (%d != %d)",lcols,mlcols);
805     if (mbs != bs) SETERRQ2(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Cannot reuse matrix. Wrong block size (%d != %d)",bs,mbs);
806     ierr = MatZeroEntries(*M);CHKERRQ(ierr);
807   }
808 
809   if (issbaij) {
810     ierr = MatConvert(matis->A,MATSEQBAIJ,MAT_INITIAL_MATRIX,&local_mat);CHKERRQ(ierr);
811   } else {
812     ierr = PetscObjectReference((PetscObject)matis->A);CHKERRQ(ierr);
813     local_mat = matis->A;
814   }
815 
816   /* Set values */
817   ierr = MatSetLocalToGlobalMapping(*M,mat->rmap->mapping,mat->cmap->mapping);CHKERRQ(ierr);
818   if (isdense) { /* special case for dense local matrices */
819     PetscInt i,*dummy_rows,*dummy_cols;
820 
821     if (local_rows != local_cols) {
822       ierr = PetscMalloc2(local_rows,&dummy_rows,local_cols,&dummy_cols);CHKERRQ(ierr);
823       for (i=0;i<local_rows;i++) dummy_rows[i] = i;
824       for (i=0;i<local_cols;i++) dummy_cols[i] = i;
825     } else {
826       ierr = PetscMalloc1(local_rows,&dummy_rows);CHKERRQ(ierr);
827       for (i=0;i<local_rows;i++) dummy_rows[i] = i;
828       dummy_cols = dummy_rows;
829     }
830     ierr = MatSetOption(*M,MAT_ROW_ORIENTED,PETSC_FALSE);CHKERRQ(ierr);
831     ierr = MatDenseGetArray(local_mat,&array);CHKERRQ(ierr);
832     ierr = MatSetValuesLocal(*M,local_rows,dummy_rows,local_cols,dummy_cols,array,ADD_VALUES);CHKERRQ(ierr);
833     ierr = MatDenseRestoreArray(local_mat,&array);CHKERRQ(ierr);
834     if (dummy_rows != dummy_cols) {
835       ierr = PetscFree2(dummy_rows,dummy_cols);CHKERRQ(ierr);
836     } else {
837       ierr = PetscFree(dummy_rows);CHKERRQ(ierr);
838     }
839   } else if (isseqaij) {
840     PetscInt  i,nvtxs,*xadj,*adjncy;
841     PetscBool done;
842 
843     ierr = MatGetRowIJ(local_mat,0,PETSC_FALSE,PETSC_FALSE,&nvtxs,(const PetscInt**)&xadj,(const PetscInt**)&adjncy,&done);CHKERRQ(ierr);
844     if (!done) SETERRQ1(PetscObjectComm((PetscObject)local_mat),PETSC_ERR_PLIB,"Error in MatGetRowIJ called from %s\n",__FUNCT__);
845     ierr = MatSeqAIJGetArray(local_mat,&array);CHKERRQ(ierr);
846     for (i=0;i<nvtxs;i++) {
847       ierr = MatSetValuesLocal(*M,1,&i,xadj[i+1]-xadj[i],adjncy+xadj[i],array+xadj[i],ADD_VALUES);CHKERRQ(ierr);
848     }
849     ierr = MatRestoreRowIJ(local_mat,0,PETSC_FALSE,PETSC_FALSE,&nvtxs,(const PetscInt**)&xadj,(const PetscInt**)&adjncy,&done);CHKERRQ(ierr);
850     if (!done) SETERRQ1(PetscObjectComm((PetscObject)local_mat),PETSC_ERR_PLIB,"Error in MatRestoreRowIJ called from %s\n",__FUNCT__);
851     ierr = MatSeqAIJRestoreArray(local_mat,&array);CHKERRQ(ierr);
852   } else { /* very basic values insertion for all other matrix types */
853     PetscInt i;
854 
855     for (i=0;i<local_rows;i++) {
856       PetscInt       j;
857       const PetscInt *local_indices_cols;
858 
859       ierr = MatGetRow(local_mat,i,&j,&local_indices_cols,(const PetscScalar**)&array);CHKERRQ(ierr);
860       ierr = MatSetValuesLocal(*M,1,&i,j,local_indices_cols,array,ADD_VALUES);CHKERRQ(ierr);
861       ierr = MatRestoreRow(local_mat,i,&j,&local_indices_cols,(const PetscScalar**)&array);CHKERRQ(ierr);
862     }
863   }
864   ierr = MatAssemblyBegin(*M,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
865   ierr = MatDestroy(&local_mat);CHKERRQ(ierr);
866   ierr = MatAssemblyEnd(*M,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
867   if (isdense) {
868     ierr = MatSetOption(*M,MAT_ROW_ORIENTED,PETSC_TRUE);CHKERRQ(ierr);
869   }
870   PetscFunctionReturn(0);
871 }
872 
873 #undef __FUNCT__
874 #define __FUNCT__ "MatISGetMPIXAIJ"
875 /*@
876     MatISGetMPIXAIJ - Converts MATIS matrix into a parallel AIJ format
877 
878   Input Parameter:
879 .  mat - the matrix (should be of type MATIS)
880 .  reuse - either MAT_INITIAL_MATRIX or MAT_REUSE_MATRIX
881 
882   Output Parameter:
883 .  newmat - the matrix in AIJ format
884 
885   Level: developer
886 
887   Notes: mat and *newmat cannot be the same object when MAT_REUSE_MATRIX is requested.
888 
889 .seealso: MATIS
890 @*/
891 PetscErrorCode MatISGetMPIXAIJ(Mat mat, MatReuse reuse, Mat *newmat)
892 {
893   PetscErrorCode ierr;
894 
895   PetscFunctionBegin;
896   PetscValidHeaderSpecific(mat,MAT_CLASSID,1);
897   PetscValidLogicalCollectiveEnum(mat,reuse,2);
898   PetscValidPointer(newmat,3);
899   if (reuse != MAT_INITIAL_MATRIX) {
900     PetscValidHeaderSpecific(*newmat,MAT_CLASSID,3);
901     PetscCheckSameComm(mat,1,*newmat,3);
902     if (mat == *newmat) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Cannot reuse the same matrix");
903   }
904   ierr = PetscUseMethod(mat,"MatISGetMPIXAIJ_C",(Mat,MatReuse,Mat*),(mat,reuse,newmat));CHKERRQ(ierr);
905   PetscFunctionReturn(0);
906 }
907 
908 #undef __FUNCT__
909 #define __FUNCT__ "MatDuplicate_IS"
910 PetscErrorCode MatDuplicate_IS(Mat mat,MatDuplicateOption op,Mat *newmat)
911 {
912   PetscErrorCode ierr;
913   Mat_IS         *matis = (Mat_IS*)(mat->data);
914   PetscInt       bs,m,n,M,N;
915   Mat            B,localmat;
916 
917   PetscFunctionBegin;
918   ierr = MatGetBlockSize(mat,&bs);CHKERRQ(ierr);
919   ierr = MatGetSize(mat,&M,&N);CHKERRQ(ierr);
920   ierr = MatGetLocalSize(mat,&m,&n);CHKERRQ(ierr);
921   ierr = MatCreateIS(PetscObjectComm((PetscObject)mat),bs,m,n,M,N,mat->rmap->mapping,mat->cmap->mapping,&B);CHKERRQ(ierr);
922   ierr = MatDuplicate(matis->A,op,&localmat);CHKERRQ(ierr);
923   ierr = MatISSetLocalMat(B,localmat);CHKERRQ(ierr);
924   ierr = MatDestroy(&localmat);CHKERRQ(ierr);
925   ierr = MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
926   ierr = MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
927   *newmat = B;
928   PetscFunctionReturn(0);
929 }
930 
931 #undef __FUNCT__
932 #define __FUNCT__ "MatIsHermitian_IS"
933 static PetscErrorCode MatIsHermitian_IS(Mat A,PetscReal tol,PetscBool  *flg)
934 {
935   PetscErrorCode ierr;
936   Mat_IS         *matis = (Mat_IS*)A->data;
937   PetscBool      local_sym;
938 
939   PetscFunctionBegin;
940   ierr = MatIsHermitian(matis->A,tol,&local_sym);CHKERRQ(ierr);
941   ierr = MPIU_Allreduce(&local_sym,flg,1,MPIU_BOOL,MPI_LAND,PetscObjectComm((PetscObject)A));CHKERRQ(ierr);
942   PetscFunctionReturn(0);
943 }
944 
945 #undef __FUNCT__
946 #define __FUNCT__ "MatIsSymmetric_IS"
947 static PetscErrorCode MatIsSymmetric_IS(Mat A,PetscReal tol,PetscBool  *flg)
948 {
949   PetscErrorCode ierr;
950   Mat_IS         *matis = (Mat_IS*)A->data;
951   PetscBool      local_sym;
952 
953   PetscFunctionBegin;
954   ierr = MatIsSymmetric(matis->A,tol,&local_sym);CHKERRQ(ierr);
955   ierr = MPIU_Allreduce(&local_sym,flg,1,MPIU_BOOL,MPI_LAND,PetscObjectComm((PetscObject)A));CHKERRQ(ierr);
956   PetscFunctionReturn(0);
957 }
958 
959 #undef __FUNCT__
960 #define __FUNCT__ "MatDestroy_IS"
961 static PetscErrorCode MatDestroy_IS(Mat A)
962 {
963   PetscErrorCode ierr;
964   Mat_IS         *b = (Mat_IS*)A->data;
965 
966   PetscFunctionBegin;
967   ierr = MatDestroy(&b->A);CHKERRQ(ierr);
968   ierr = VecScatterDestroy(&b->cctx);CHKERRQ(ierr);
969   ierr = VecScatterDestroy(&b->rctx);CHKERRQ(ierr);
970   ierr = VecDestroy(&b->x);CHKERRQ(ierr);
971   ierr = VecDestroy(&b->y);CHKERRQ(ierr);
972   ierr = VecDestroy(&b->counter);CHKERRQ(ierr);
973   ierr = ISDestroy(&b->getsub_ris);CHKERRQ(ierr);
974   ierr = ISDestroy(&b->getsub_cis);CHKERRQ(ierr);
975   if (b->sf != b->csf) {
976     ierr = PetscSFDestroy(&b->csf);CHKERRQ(ierr);
977     ierr = PetscFree2(b->csf_rootdata,b->csf_leafdata);CHKERRQ(ierr);
978   }
979   ierr = PetscSFDestroy(&b->sf);CHKERRQ(ierr);
980   ierr = PetscFree2(b->sf_rootdata,b->sf_leafdata);CHKERRQ(ierr);
981   ierr = PetscFree(A->data);CHKERRQ(ierr);
982   ierr = PetscObjectChangeTypeName((PetscObject)A,0);CHKERRQ(ierr);
983   ierr = PetscObjectComposeFunction((PetscObject)A,"MatISGetLocalMat_C",NULL);CHKERRQ(ierr);
984   ierr = PetscObjectComposeFunction((PetscObject)A,"MatISSetLocalMat_C",NULL);CHKERRQ(ierr);
985   ierr = PetscObjectComposeFunction((PetscObject)A,"MatISGetMPIXAIJ_C",NULL);CHKERRQ(ierr);
986   ierr = PetscObjectComposeFunction((PetscObject)A,"MatISSetPreallocation_C",NULL);CHKERRQ(ierr);
987   PetscFunctionReturn(0);
988 }
989 
990 #undef __FUNCT__
991 #define __FUNCT__ "MatMult_IS"
992 static PetscErrorCode MatMult_IS(Mat A,Vec x,Vec y)
993 {
994   PetscErrorCode ierr;
995   Mat_IS         *is  = (Mat_IS*)A->data;
996   PetscScalar    zero = 0.0;
997 
998   PetscFunctionBegin;
999   /*  scatter the global vector x into the local work vector */
1000   ierr = VecScatterBegin(is->cctx,x,is->x,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
1001   ierr = VecScatterEnd(is->cctx,x,is->x,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
1002 
1003   /* multiply the local matrix */
1004   ierr = MatMult(is->A,is->x,is->y);CHKERRQ(ierr);
1005 
1006   /* scatter product back into global memory */
1007   ierr = VecSet(y,zero);CHKERRQ(ierr);
1008   ierr = VecScatterBegin(is->rctx,is->y,y,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
1009   ierr = VecScatterEnd(is->rctx,is->y,y,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
1010   PetscFunctionReturn(0);
1011 }
1012 
1013 #undef __FUNCT__
1014 #define __FUNCT__ "MatMultAdd_IS"
1015 static PetscErrorCode MatMultAdd_IS(Mat A,Vec v1,Vec v2,Vec v3)
1016 {
1017   Vec            temp_vec;
1018   PetscErrorCode ierr;
1019 
1020   PetscFunctionBegin; /*  v3 = v2 + A * v1.*/
1021   if (v3 != v2) {
1022     ierr = MatMult(A,v1,v3);CHKERRQ(ierr);
1023     ierr = VecAXPY(v3,1.0,v2);CHKERRQ(ierr);
1024   } else {
1025     ierr = VecDuplicate(v2,&temp_vec);CHKERRQ(ierr);
1026     ierr = MatMult(A,v1,temp_vec);CHKERRQ(ierr);
1027     ierr = VecAXPY(temp_vec,1.0,v2);CHKERRQ(ierr);
1028     ierr = VecCopy(temp_vec,v3);CHKERRQ(ierr);
1029     ierr = VecDestroy(&temp_vec);CHKERRQ(ierr);
1030   }
1031   PetscFunctionReturn(0);
1032 }
1033 
1034 #undef __FUNCT__
1035 #define __FUNCT__ "MatMultTranspose_IS"
1036 static PetscErrorCode MatMultTranspose_IS(Mat A,Vec y,Vec x)
1037 {
1038   Mat_IS         *is = (Mat_IS*)A->data;
1039   PetscErrorCode ierr;
1040 
1041   PetscFunctionBegin;
1042   /*  scatter the global vector x into the local work vector */
1043   ierr = VecScatterBegin(is->rctx,y,is->y,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
1044   ierr = VecScatterEnd(is->rctx,y,is->y,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
1045 
1046   /* multiply the local matrix */
1047   ierr = MatMultTranspose(is->A,is->y,is->x);CHKERRQ(ierr);
1048 
1049   /* scatter product back into global vector */
1050   ierr = VecSet(x,0);CHKERRQ(ierr);
1051   ierr = VecScatterBegin(is->cctx,is->x,x,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
1052   ierr = VecScatterEnd(is->cctx,is->x,x,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
1053   PetscFunctionReturn(0);
1054 }
1055 
1056 #undef __FUNCT__
1057 #define __FUNCT__ "MatMultTransposeAdd_IS"
1058 static PetscErrorCode MatMultTransposeAdd_IS(Mat A,Vec v1,Vec v2,Vec v3)
1059 {
1060   Vec            temp_vec;
1061   PetscErrorCode ierr;
1062 
1063   PetscFunctionBegin; /*  v3 = v2 + A' * v1.*/
1064   if (v3 != v2) {
1065     ierr = MatMultTranspose(A,v1,v3);CHKERRQ(ierr);
1066     ierr = VecAXPY(v3,1.0,v2);CHKERRQ(ierr);
1067   } else {
1068     ierr = VecDuplicate(v2,&temp_vec);CHKERRQ(ierr);
1069     ierr = MatMultTranspose(A,v1,temp_vec);CHKERRQ(ierr);
1070     ierr = VecAXPY(temp_vec,1.0,v2);CHKERRQ(ierr);
1071     ierr = VecCopy(temp_vec,v3);CHKERRQ(ierr);
1072     ierr = VecDestroy(&temp_vec);CHKERRQ(ierr);
1073   }
1074   PetscFunctionReturn(0);
1075 }
1076 
1077 #undef __FUNCT__
1078 #define __FUNCT__ "MatView_IS"
1079 static PetscErrorCode MatView_IS(Mat A,PetscViewer viewer)
1080 {
1081   Mat_IS         *a = (Mat_IS*)A->data;
1082   PetscErrorCode ierr;
1083   PetscViewer    sviewer;
1084 
1085   PetscFunctionBegin;
1086   ierr = PetscViewerGetSubViewer(viewer,PETSC_COMM_SELF,&sviewer);CHKERRQ(ierr);
1087   ierr = MatView(a->A,sviewer);CHKERRQ(ierr);
1088   ierr = PetscViewerRestoreSubViewer(viewer,PETSC_COMM_SELF,&sviewer);CHKERRQ(ierr);
1089   ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
1090   PetscFunctionReturn(0);
1091 }
1092 
1093 #undef __FUNCT__
1094 #define __FUNCT__ "MatSetLocalToGlobalMapping_IS"
1095 static PetscErrorCode MatSetLocalToGlobalMapping_IS(Mat A,ISLocalToGlobalMapping rmapping,ISLocalToGlobalMapping cmapping)
1096 {
1097   PetscErrorCode ierr;
1098   PetscInt       nr,rbs,nc,cbs;
1099   Mat_IS         *is = (Mat_IS*)A->data;
1100   IS             from,to;
1101   Vec            cglobal,rglobal;
1102 
1103   PetscFunctionBegin;
1104   PetscCheckSameComm(A,1,rmapping,2);
1105   PetscCheckSameComm(A,1,cmapping,3);
1106   /* Destroy any previous data */
1107   ierr = VecDestroy(&is->x);CHKERRQ(ierr);
1108   ierr = VecDestroy(&is->y);CHKERRQ(ierr);
1109   ierr = VecDestroy(&is->counter);CHKERRQ(ierr);
1110   ierr = VecScatterDestroy(&is->rctx);CHKERRQ(ierr);
1111   ierr = VecScatterDestroy(&is->cctx);CHKERRQ(ierr);
1112   ierr = MatDestroy(&is->A);CHKERRQ(ierr);
1113   ierr = PetscSFDestroy(&is->sf);CHKERRQ(ierr);
1114   ierr = PetscFree2(is->sf_rootdata,is->sf_leafdata);CHKERRQ(ierr);
1115 
1116   /* Setup Layout and set local to global maps */
1117   ierr = PetscLayoutSetUp(A->rmap);CHKERRQ(ierr);
1118   ierr = PetscLayoutSetUp(A->cmap);CHKERRQ(ierr);
1119   ierr = PetscLayoutSetISLocalToGlobalMapping(A->rmap,rmapping);CHKERRQ(ierr);
1120   ierr = PetscLayoutSetISLocalToGlobalMapping(A->cmap,cmapping);CHKERRQ(ierr);
1121 
1122   /* Create the local matrix A */
1123   ierr = ISLocalToGlobalMappingGetSize(rmapping,&nr);CHKERRQ(ierr);
1124   ierr = ISLocalToGlobalMappingGetBlockSize(rmapping,&rbs);CHKERRQ(ierr);
1125   ierr = ISLocalToGlobalMappingGetSize(cmapping,&nc);CHKERRQ(ierr);
1126   ierr = ISLocalToGlobalMappingGetBlockSize(cmapping,&cbs);CHKERRQ(ierr);
1127   ierr = MatCreate(PETSC_COMM_SELF,&is->A);CHKERRQ(ierr);
1128   ierr = MatSetType(is->A,MATAIJ);CHKERRQ(ierr);
1129   ierr = MatSetSizes(is->A,nr,nc,nr,nc);CHKERRQ(ierr);
1130   ierr = MatSetBlockSizes(is->A,rbs,cbs);CHKERRQ(ierr);
1131   ierr = MatSetOptionsPrefix(is->A,((PetscObject)A)->prefix);CHKERRQ(ierr);
1132   ierr = MatAppendOptionsPrefix(is->A,"is_");CHKERRQ(ierr);
1133   ierr = MatSetFromOptions(is->A);CHKERRQ(ierr);
1134 
1135   if (!is->islocalref) { /* setup scatters and local vectors for MatMult */
1136     /* Create the local work vectors */
1137     ierr = MatCreateVecs(is->A,&is->x,&is->y);CHKERRQ(ierr);
1138 
1139     /* setup the global to local scatters */
1140     ierr = MatCreateVecs(A,&cglobal,&rglobal);CHKERRQ(ierr);
1141     ierr = ISCreateStride(PETSC_COMM_SELF,nr,0,1,&to);CHKERRQ(ierr);
1142     ierr = ISLocalToGlobalMappingApplyIS(rmapping,to,&from);CHKERRQ(ierr);
1143     ierr = VecScatterCreate(rglobal,from,is->y,to,&is->rctx);CHKERRQ(ierr);
1144     if (rmapping != cmapping) {
1145       ierr = ISDestroy(&to);CHKERRQ(ierr);
1146       ierr = ISDestroy(&from);CHKERRQ(ierr);
1147       ierr = ISCreateStride(PETSC_COMM_SELF,nc,0,1,&to);CHKERRQ(ierr);
1148       ierr = ISLocalToGlobalMappingApplyIS(cmapping,to,&from);CHKERRQ(ierr);
1149       ierr = VecScatterCreate(cglobal,from,is->x,to,&is->cctx);CHKERRQ(ierr);
1150     } else {
1151       ierr = PetscObjectReference((PetscObject)is->rctx);CHKERRQ(ierr);
1152       is->cctx = is->rctx;
1153     }
1154 
1155     /* interface counter vector (local) */
1156     ierr = VecDuplicate(is->y,&is->counter);CHKERRQ(ierr);
1157     ierr = VecSet(is->y,1.);CHKERRQ(ierr);
1158     ierr = VecScatterBegin(is->rctx,is->y,rglobal,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
1159     ierr = VecScatterEnd(is->rctx,is->y,rglobal,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
1160     ierr = VecScatterBegin(is->rctx,rglobal,is->counter,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
1161     ierr = VecScatterEnd(is->rctx,rglobal,is->counter,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
1162 
1163     /* free workspace */
1164     ierr = VecDestroy(&rglobal);CHKERRQ(ierr);
1165     ierr = VecDestroy(&cglobal);CHKERRQ(ierr);
1166     ierr = ISDestroy(&to);CHKERRQ(ierr);
1167     ierr = ISDestroy(&from);CHKERRQ(ierr);
1168   }
1169   ierr = MatSetUp(A);CHKERRQ(ierr);
1170   PetscFunctionReturn(0);
1171 }
1172 
1173 #undef __FUNCT__
1174 #define __FUNCT__ "MatSetValues_IS"
1175 static PetscErrorCode MatSetValues_IS(Mat mat, PetscInt m,const PetscInt *rows, PetscInt n,const PetscInt *cols, const PetscScalar *values, InsertMode addv)
1176 {
1177   Mat_IS         *is = (Mat_IS*)mat->data;
1178   PetscErrorCode ierr;
1179 #if defined(PETSC_USE_DEBUG)
1180   PetscInt       i,zm,zn;
1181 #endif
1182   PetscInt       rows_l[MATIS_MAX_ENTRIES_INSERTION],cols_l[MATIS_MAX_ENTRIES_INSERTION];
1183 
1184   PetscFunctionBegin;
1185 #if defined(PETSC_USE_DEBUG)
1186   if (m > MATIS_MAX_ENTRIES_INSERTION || n > MATIS_MAX_ENTRIES_INSERTION) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_SUP,"Number of row/column indices must be <= %D: they are %D %D",MATIS_MAX_ENTRIES_INSERTION,m,n);
1187   /* count negative indices */
1188   for (i=0,zm=0;i<m;i++) if (rows[i] < 0) zm++;
1189   for (i=0,zn=0;i<n;i++) if (cols[i] < 0) zn++;
1190 #endif
1191   ierr = ISGlobalToLocalMappingApply(mat->rmap->mapping,IS_GTOLM_MASK,m,rows,&m,rows_l);CHKERRQ(ierr);
1192   ierr = ISGlobalToLocalMappingApply(mat->cmap->mapping,IS_GTOLM_MASK,n,cols,&n,cols_l);CHKERRQ(ierr);
1193 #if defined(PETSC_USE_DEBUG)
1194   /* count negative indices (should be the same as before) */
1195   for (i=0;i<m;i++) if (rows_l[i] < 0) zm--;
1196   for (i=0;i<n;i++) if (cols_l[i] < 0) zn--;
1197   if (zm) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Some of the row indices can not be mapped! Maybe you should not use MATIS");
1198   if (zn) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Some of the column indices can not be mapped! Maybe you should not use MATIS");
1199 #endif
1200   ierr = MatSetValues(is->A,m,rows_l,n,cols_l,values,addv);CHKERRQ(ierr);
1201   PetscFunctionReturn(0);
1202 }
1203 
1204 #undef __FUNCT__
1205 #define __FUNCT__ "MatSetValuesBlocked_IS"
1206 static PetscErrorCode MatSetValuesBlocked_IS(Mat mat, PetscInt m,const PetscInt *rows, PetscInt n,const PetscInt *cols, const PetscScalar *values, InsertMode addv)
1207 {
1208   Mat_IS         *is = (Mat_IS*)mat->data;
1209   PetscErrorCode ierr;
1210 #if defined(PETSC_USE_DEBUG)
1211   PetscInt       i,zm,zn;
1212 #endif
1213   PetscInt       rows_l[MATIS_MAX_ENTRIES_INSERTION],cols_l[MATIS_MAX_ENTRIES_INSERTION];
1214 
1215   PetscFunctionBegin;
1216 #if defined(PETSC_USE_DEBUG)
1217   if (m > MATIS_MAX_ENTRIES_INSERTION || n > MATIS_MAX_ENTRIES_INSERTION) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_SUP,"Number of row/column block indices must be <= %D: they are %D %D",MATIS_MAX_ENTRIES_INSERTION,m,n);
1218   /* count negative indices */
1219   for (i=0,zm=0;i<m;i++) if (rows[i] < 0) zm++;
1220   for (i=0,zn=0;i<n;i++) if (cols[i] < 0) zn++;
1221 #endif
1222   ierr = ISGlobalToLocalMappingApplyBlock(mat->rmap->mapping,IS_GTOLM_MASK,m,rows,&m,rows_l);CHKERRQ(ierr);
1223   ierr = ISGlobalToLocalMappingApplyBlock(mat->cmap->mapping,IS_GTOLM_MASK,n,cols,&n,cols_l);CHKERRQ(ierr);
1224 #if defined(PETSC_USE_DEBUG)
1225   /* count negative indices (should be the same as before) */
1226   for (i=0;i<m;i++) if (rows_l[i] < 0) zm--;
1227   for (i=0;i<n;i++) if (cols_l[i] < 0) zn--;
1228   if (zm) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Some of the row indices can not be mapped! Maybe you should not use MATIS");
1229   if (zn) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Some of the column indices can not be mapped! Maybe you should not use MATIS");
1230 #endif
1231   ierr = MatSetValuesBlocked(is->A,m,rows_l,n,cols_l,values,addv);CHKERRQ(ierr);
1232   PetscFunctionReturn(0);
1233 }
1234 
1235 #undef __FUNCT__
1236 #define __FUNCT__ "MatSetValuesLocal_IS"
1237 static PetscErrorCode MatSetValuesLocal_IS(Mat A,PetscInt m,const PetscInt *rows, PetscInt n,const PetscInt *cols,const PetscScalar *values,InsertMode addv)
1238 {
1239   PetscErrorCode ierr;
1240   Mat_IS         *is = (Mat_IS*)A->data;
1241 
1242   PetscFunctionBegin;
1243   ierr = MatSetValues(is->A,m,rows,n,cols,values,addv);CHKERRQ(ierr);
1244   PetscFunctionReturn(0);
1245 }
1246 
1247 #undef __FUNCT__
1248 #define __FUNCT__ "MatSetValuesBlockedLocal_IS"
1249 static PetscErrorCode MatSetValuesBlockedLocal_IS(Mat A,PetscInt m,const PetscInt *rows, PetscInt n,const PetscInt *cols,const PetscScalar *values,InsertMode addv)
1250 {
1251   PetscErrorCode ierr;
1252   Mat_IS         *is = (Mat_IS*)A->data;
1253 
1254   PetscFunctionBegin;
1255   ierr = MatSetValuesBlocked(is->A,m,rows,n,cols,values,addv);CHKERRQ(ierr);
1256   PetscFunctionReturn(0);
1257 }
1258 
1259 #undef __FUNCT__
1260 #define __FUNCT__ "MatISZeroRowsColumnsLocal_Private"
1261 static PetscErrorCode MatISZeroRowsColumnsLocal_Private(Mat A,PetscInt n,const PetscInt rows[],PetscScalar diag,PetscBool columns)
1262 {
1263   Mat_IS         *is = (Mat_IS*)A->data;
1264   PetscErrorCode ierr;
1265 
1266   PetscFunctionBegin;
1267   if (!n) {
1268     is->pure_neumann = PETSC_TRUE;
1269   } else {
1270     PetscInt i;
1271     is->pure_neumann = PETSC_FALSE;
1272 
1273     if (columns) {
1274       ierr = MatZeroRowsColumns(is->A,n,rows,diag,0,0);CHKERRQ(ierr);
1275     } else {
1276       ierr = MatZeroRows(is->A,n,rows,diag,0,0);CHKERRQ(ierr);
1277     }
1278     if (diag != 0.) {
1279       const PetscScalar *array;
1280       ierr = VecGetArrayRead(is->counter,&array);CHKERRQ(ierr);
1281       for (i=0; i<n; i++) {
1282         ierr = MatSetValue(is->A,rows[i],rows[i],diag/(array[rows[i]]),INSERT_VALUES);CHKERRQ(ierr);
1283       }
1284       ierr = VecRestoreArrayRead(is->counter,&array);CHKERRQ(ierr);
1285     }
1286     ierr = MatAssemblyBegin(is->A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
1287     ierr = MatAssemblyEnd(is->A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
1288   }
1289   PetscFunctionReturn(0);
1290 }
1291 
1292 #undef __FUNCT__
1293 #define __FUNCT__ "MatZeroRowsColumns_Private_IS"
1294 static PetscErrorCode MatZeroRowsColumns_Private_IS(Mat A,PetscInt n,const PetscInt rows[],PetscScalar diag,Vec x,Vec b,PetscBool columns)
1295 {
1296   Mat_IS         *matis = (Mat_IS*)A->data;
1297   PetscInt       nr,nl,len,i;
1298   PetscInt       *lrows;
1299   PetscErrorCode ierr;
1300 
1301   PetscFunctionBegin;
1302 #if defined(PETSC_USE_DEBUG)
1303   if (columns || diag != 0. || (x && b)) {
1304     PetscBool cong;
1305     ierr = PetscLayoutCompare(A->rmap,A->cmap,&cong);CHKERRQ(ierr);
1306     if (!cong && columns) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"Columns can be zeroed if and only if A->rmap and A->cmap are congruent for MATIS");
1307     if (!cong && diag != 0.) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"Nonzero diagonal value supported if and only if A->rmap and A->cmap are congruent for MATIS");
1308     if (!cong && x && b) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"A->rmap and A->cmap need to be congruent");
1309   }
1310 #endif
1311   /* get locally owned rows */
1312   ierr = PetscLayoutMapLocal_Private(A->rmap,n,rows,&len,&lrows,NULL);CHKERRQ(ierr);
1313   /* fix right hand side if needed */
1314   if (x && b) {
1315     const PetscScalar *xx;
1316     PetscScalar       *bb;
1317 
1318     ierr = VecGetArrayRead(x, &xx);CHKERRQ(ierr);
1319     ierr = VecGetArray(b, &bb);CHKERRQ(ierr);
1320     for (i=0;i<len;++i) bb[lrows[i]] = diag*xx[lrows[i]];
1321     ierr = VecRestoreArrayRead(x, &xx);CHKERRQ(ierr);
1322     ierr = VecRestoreArray(b, &bb);CHKERRQ(ierr);
1323   }
1324   /* get rows associated to the local matrices */
1325   if (!matis->sf) { /* setup SF if not yet created and allocate rootdata and leafdata */
1326     ierr = MatISComputeSF_Private(A);CHKERRQ(ierr);
1327   }
1328   ierr = MatGetSize(matis->A,&nl,NULL);CHKERRQ(ierr);
1329   ierr = PetscMemzero(matis->sf_leafdata,nl*sizeof(PetscInt));CHKERRQ(ierr);
1330   ierr = PetscMemzero(matis->sf_rootdata,A->rmap->n*sizeof(PetscInt));CHKERRQ(ierr);
1331   for (i=0;i<len;i++) matis->sf_rootdata[lrows[i]] = 1;
1332   ierr = PetscFree(lrows);CHKERRQ(ierr);
1333   ierr = PetscSFBcastBegin(matis->sf,MPIU_INT,matis->sf_rootdata,matis->sf_leafdata);CHKERRQ(ierr);
1334   ierr = PetscSFBcastEnd(matis->sf,MPIU_INT,matis->sf_rootdata,matis->sf_leafdata);CHKERRQ(ierr);
1335   ierr = PetscMalloc1(nl,&lrows);CHKERRQ(ierr);
1336   for (i=0,nr=0;i<nl;i++) if (matis->sf_leafdata[i]) lrows[nr++] = i;
1337   ierr = MatISZeroRowsColumnsLocal_Private(A,nr,lrows,diag,columns);CHKERRQ(ierr);
1338   ierr = PetscFree(lrows);CHKERRQ(ierr);
1339   PetscFunctionReturn(0);
1340 }
1341 
1342 #undef __FUNCT__
1343 #define __FUNCT__ "MatZeroRows_IS"
1344 static PetscErrorCode MatZeroRows_IS(Mat A,PetscInt n,const PetscInt rows[],PetscScalar diag,Vec x,Vec b)
1345 {
1346   PetscErrorCode ierr;
1347 
1348   PetscFunctionBegin;
1349   ierr = MatZeroRowsColumns_Private_IS(A,n,rows,diag,x,b,PETSC_FALSE);CHKERRQ(ierr);
1350   PetscFunctionReturn(0);
1351 }
1352 
1353 #undef __FUNCT__
1354 #define __FUNCT__ "MatZeroRowsColumns_IS"
1355 static PetscErrorCode MatZeroRowsColumns_IS(Mat A,PetscInt n,const PetscInt rows[],PetscScalar diag,Vec x,Vec b)
1356 {
1357   PetscErrorCode ierr;
1358 
1359   PetscFunctionBegin;
1360   ierr = MatZeroRowsColumns_Private_IS(A,n,rows,diag,x,b,PETSC_TRUE);CHKERRQ(ierr);
1361   PetscFunctionReturn(0);
1362 }
1363 
1364 #undef __FUNCT__
1365 #define __FUNCT__ "MatAssemblyBegin_IS"
1366 static PetscErrorCode MatAssemblyBegin_IS(Mat A,MatAssemblyType type)
1367 {
1368   Mat_IS         *is = (Mat_IS*)A->data;
1369   PetscErrorCode ierr;
1370 
1371   PetscFunctionBegin;
1372   ierr = MatAssemblyBegin(is->A,type);CHKERRQ(ierr);
1373   PetscFunctionReturn(0);
1374 }
1375 
1376 #undef __FUNCT__
1377 #define __FUNCT__ "MatAssemblyEnd_IS"
1378 static PetscErrorCode MatAssemblyEnd_IS(Mat A,MatAssemblyType type)
1379 {
1380   Mat_IS         *is = (Mat_IS*)A->data;
1381   PetscErrorCode ierr;
1382 
1383   PetscFunctionBegin;
1384   ierr = MatAssemblyEnd(is->A,type);CHKERRQ(ierr);
1385   PetscFunctionReturn(0);
1386 }
1387 
1388 #undef __FUNCT__
1389 #define __FUNCT__ "MatISGetLocalMat_IS"
1390 static PetscErrorCode MatISGetLocalMat_IS(Mat mat,Mat *local)
1391 {
1392   Mat_IS *is = (Mat_IS*)mat->data;
1393 
1394   PetscFunctionBegin;
1395   *local = is->A;
1396   PetscFunctionReturn(0);
1397 }
1398 
1399 #undef __FUNCT__
1400 #define __FUNCT__ "MatISGetLocalMat"
1401 /*@
1402     MatISGetLocalMat - Gets the local matrix stored inside a MATIS matrix.
1403 
1404   Input Parameter:
1405 .  mat - the matrix
1406 
1407   Output Parameter:
1408 .  local - the local matrix
1409 
1410   Level: advanced
1411 
1412   Notes:
1413     This can be called if you have precomputed the nonzero structure of the
1414   matrix and want to provide it to the inner matrix object to improve the performance
1415   of the MatSetValues() operation.
1416 
1417 .seealso: MATIS
1418 @*/
1419 PetscErrorCode MatISGetLocalMat(Mat mat,Mat *local)
1420 {
1421   PetscErrorCode ierr;
1422 
1423   PetscFunctionBegin;
1424   PetscValidHeaderSpecific(mat,MAT_CLASSID,1);
1425   PetscValidPointer(local,2);
1426   ierr = PetscUseMethod(mat,"MatISGetLocalMat_C",(Mat,Mat*),(mat,local));CHKERRQ(ierr);
1427   PetscFunctionReturn(0);
1428 }
1429 
1430 #undef __FUNCT__
1431 #define __FUNCT__ "MatISSetLocalMat_IS"
1432 static PetscErrorCode MatISSetLocalMat_IS(Mat mat,Mat local)
1433 {
1434   Mat_IS         *is = (Mat_IS*)mat->data;
1435   PetscInt       nrows,ncols,orows,ocols;
1436   PetscErrorCode ierr;
1437 
1438   PetscFunctionBegin;
1439   if (is->A) {
1440     ierr = MatGetSize(is->A,&orows,&ocols);CHKERRQ(ierr);
1441     ierr = MatGetSize(local,&nrows,&ncols);CHKERRQ(ierr);
1442     if (orows != nrows || ocols != ncols) SETERRQ4(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Local MATIS matrix should be of size %Dx%D (you passed a %Dx%D matrix)",orows,ocols,nrows,ncols);
1443   }
1444   ierr  = PetscObjectReference((PetscObject)local);CHKERRQ(ierr);
1445   ierr  = MatDestroy(&is->A);CHKERRQ(ierr);
1446   is->A = local;
1447   PetscFunctionReturn(0);
1448 }
1449 
1450 #undef __FUNCT__
1451 #define __FUNCT__ "MatISSetLocalMat"
1452 /*@
1453     MatISSetLocalMat - Replace the local matrix stored inside a MATIS object.
1454 
1455   Input Parameter:
1456 .  mat - the matrix
1457 .  local - the local matrix
1458 
1459   Output Parameter:
1460 
1461   Level: advanced
1462 
1463   Notes:
1464     This can be called if you have precomputed the local matrix and
1465   want to provide it to the matrix object MATIS.
1466 
1467 .seealso: MATIS
1468 @*/
1469 PetscErrorCode MatISSetLocalMat(Mat mat,Mat local)
1470 {
1471   PetscErrorCode ierr;
1472 
1473   PetscFunctionBegin;
1474   PetscValidHeaderSpecific(mat,MAT_CLASSID,1);
1475   PetscValidHeaderSpecific(local,MAT_CLASSID,2);
1476   ierr = PetscUseMethod(mat,"MatISSetLocalMat_C",(Mat,Mat),(mat,local));CHKERRQ(ierr);
1477   PetscFunctionReturn(0);
1478 }
1479 
1480 #undef __FUNCT__
1481 #define __FUNCT__ "MatZeroEntries_IS"
1482 static PetscErrorCode MatZeroEntries_IS(Mat A)
1483 {
1484   Mat_IS         *a = (Mat_IS*)A->data;
1485   PetscErrorCode ierr;
1486 
1487   PetscFunctionBegin;
1488   ierr = MatZeroEntries(a->A);CHKERRQ(ierr);
1489   PetscFunctionReturn(0);
1490 }
1491 
1492 #undef __FUNCT__
1493 #define __FUNCT__ "MatScale_IS"
1494 static PetscErrorCode MatScale_IS(Mat A,PetscScalar a)
1495 {
1496   Mat_IS         *is = (Mat_IS*)A->data;
1497   PetscErrorCode ierr;
1498 
1499   PetscFunctionBegin;
1500   ierr = MatScale(is->A,a);CHKERRQ(ierr);
1501   PetscFunctionReturn(0);
1502 }
1503 
1504 #undef __FUNCT__
1505 #define __FUNCT__ "MatGetDiagonal_IS"
1506 static PetscErrorCode MatGetDiagonal_IS(Mat A, Vec v)
1507 {
1508   Mat_IS         *is = (Mat_IS*)A->data;
1509   PetscErrorCode ierr;
1510 
1511   PetscFunctionBegin;
1512   /* get diagonal of the local matrix */
1513   ierr = MatGetDiagonal(is->A,is->y);CHKERRQ(ierr);
1514 
1515   /* scatter diagonal back into global vector */
1516   ierr = VecSet(v,0);CHKERRQ(ierr);
1517   ierr = VecScatterBegin(is->rctx,is->y,v,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
1518   ierr = VecScatterEnd(is->rctx,is->y,v,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
1519   PetscFunctionReturn(0);
1520 }
1521 
1522 #undef __FUNCT__
1523 #define __FUNCT__ "MatSetOption_IS"
1524 static PetscErrorCode MatSetOption_IS(Mat A,MatOption op,PetscBool flg)
1525 {
1526   Mat_IS         *a = (Mat_IS*)A->data;
1527   PetscErrorCode ierr;
1528 
1529   PetscFunctionBegin;
1530   ierr = MatSetOption(a->A,op,flg);CHKERRQ(ierr);
1531   PetscFunctionReturn(0);
1532 }
1533 
1534 #undef __FUNCT__
1535 #define __FUNCT__ "MatAXPY_IS"
1536 static PetscErrorCode MatAXPY_IS(Mat Y,PetscScalar a,Mat X,MatStructure str)
1537 {
1538   Mat_IS         *y = (Mat_IS*)Y->data;
1539   Mat_IS         *x;
1540 #if defined(PETSC_USE_DEBUG)
1541   PetscBool      ismatis;
1542 #endif
1543   PetscErrorCode ierr;
1544 
1545   PetscFunctionBegin;
1546 #if defined(PETSC_USE_DEBUG)
1547   ierr = PetscObjectTypeCompare((PetscObject)X,MATIS,&ismatis);CHKERRQ(ierr);
1548   if (!ismatis) SETERRQ(PetscObjectComm((PetscObject)Y),PETSC_ERR_SUP,"Cannot call MatAXPY(Y,a,X,str) with X not of type MATIS");
1549 #endif
1550   x = (Mat_IS*)X->data;
1551   ierr = MatAXPY(y->A,a,x->A,str);CHKERRQ(ierr);
1552   PetscFunctionReturn(0);
1553 }
1554 
1555 #undef __FUNCT__
1556 #define __FUNCT__ "MatGetLocalSubMatrix_IS"
1557 static PetscErrorCode MatGetLocalSubMatrix_IS(Mat A,IS row,IS col,Mat *submat)
1558 {
1559   Mat                    lA;
1560   Mat_IS                 *matis;
1561   ISLocalToGlobalMapping rl2g,cl2g;
1562   IS                     is;
1563   const PetscInt         *rg,*rl;
1564   PetscInt               nrg;
1565   PetscInt               N,M,nrl,i,*idxs;
1566   PetscErrorCode         ierr;
1567 
1568   PetscFunctionBegin;
1569   ierr = ISLocalToGlobalMappingGetIndices(A->rmap->mapping,&rg);CHKERRQ(ierr);
1570   ierr = ISGetLocalSize(row,&nrl);CHKERRQ(ierr);
1571   ierr = ISGetIndices(row,&rl);CHKERRQ(ierr);
1572   ierr = ISLocalToGlobalMappingGetSize(A->rmap->mapping,&nrg);CHKERRQ(ierr);
1573 #if defined(PETSC_USE_DEBUG)
1574   for (i=0;i<nrl;i++) if (rl[i]>=nrg) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Local row index %D -> %D greater than maximum possible %D",i,rl[i],nrg);
1575 #endif
1576   ierr = PetscMalloc1(nrg,&idxs);CHKERRQ(ierr);
1577   /* map from [0,nrl) to row */
1578   for (i=0;i<nrl;i++) idxs[i] = rl[i];
1579 #if defined(PETSC_USE_DEBUG)
1580   for (i=nrl;i<nrg;i++) idxs[i] = nrg;
1581 #else
1582   for (i=nrl;i<nrg;i++) idxs[i] = -1;
1583 #endif
1584   ierr = ISRestoreIndices(row,&rl);CHKERRQ(ierr);
1585   ierr = ISLocalToGlobalMappingRestoreIndices(A->rmap->mapping,&rg);CHKERRQ(ierr);
1586   ierr = ISCreateGeneral(PetscObjectComm((PetscObject)A),nrg,idxs,PETSC_OWN_POINTER,&is);CHKERRQ(ierr);
1587   ierr = ISLocalToGlobalMappingCreateIS(is,&rl2g);CHKERRQ(ierr);
1588   ierr = ISDestroy(&is);CHKERRQ(ierr);
1589   /* compute new l2g map for columns */
1590   if (col != row || A->rmap->mapping != A->cmap->mapping) {
1591     const PetscInt *cg,*cl;
1592     PetscInt       ncg;
1593     PetscInt       ncl;
1594 
1595     ierr = ISLocalToGlobalMappingGetIndices(A->cmap->mapping,&cg);CHKERRQ(ierr);
1596     ierr = ISGetLocalSize(col,&ncl);CHKERRQ(ierr);
1597     ierr = ISGetIndices(col,&cl);CHKERRQ(ierr);
1598     ierr = ISLocalToGlobalMappingGetSize(A->cmap->mapping,&ncg);CHKERRQ(ierr);
1599 #if defined(PETSC_USE_DEBUG)
1600     for (i=0;i<ncl;i++) if (cl[i]>=ncg) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Local column index %D -> %D greater than maximum possible %D",i,cl[i],ncg);
1601 #endif
1602     ierr = PetscMalloc1(ncg,&idxs);CHKERRQ(ierr);
1603     /* map from [0,ncl) to col */
1604     for (i=0;i<ncl;i++) idxs[i] = cl[i];
1605 #if defined(PETSC_USE_DEBUG)
1606     for (i=ncl;i<ncg;i++) idxs[i] = ncg;
1607 #else
1608     for (i=ncl;i<ncg;i++) idxs[i] = -1;
1609 #endif
1610     ierr = ISRestoreIndices(col,&cl);CHKERRQ(ierr);
1611     ierr = ISLocalToGlobalMappingRestoreIndices(A->cmap->mapping,&cg);CHKERRQ(ierr);
1612     ierr = ISCreateGeneral(PetscObjectComm((PetscObject)A),ncg,idxs,PETSC_OWN_POINTER,&is);CHKERRQ(ierr);
1613     ierr = ISLocalToGlobalMappingCreateIS(is,&cl2g);CHKERRQ(ierr);
1614     ierr = ISDestroy(&is);CHKERRQ(ierr);
1615   } else {
1616     ierr = PetscObjectReference((PetscObject)rl2g);CHKERRQ(ierr);
1617     cl2g = rl2g;
1618   }
1619   /* create the MATIS submatrix */
1620   ierr = MatGetSize(A,&M,&N);CHKERRQ(ierr);
1621   ierr = MatCreate(PetscObjectComm((PetscObject)A),submat);CHKERRQ(ierr);
1622   ierr = MatSetSizes(*submat,PETSC_DECIDE,PETSC_DECIDE,M,N);CHKERRQ(ierr);
1623   ierr = MatSetType(*submat,MATIS);CHKERRQ(ierr);
1624   matis = (Mat_IS*)((*submat)->data);
1625   matis->islocalref = PETSC_TRUE;
1626   ierr = MatSetLocalToGlobalMapping(*submat,rl2g,cl2g);CHKERRQ(ierr);
1627   ierr = MatISGetLocalMat(A,&lA);CHKERRQ(ierr);
1628   ierr = MatISSetLocalMat(*submat,lA);CHKERRQ(ierr);
1629   ierr = MatSetUp(*submat);CHKERRQ(ierr);
1630   ierr = MatAssemblyBegin(*submat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
1631   ierr = MatAssemblyEnd(*submat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
1632   ierr = ISLocalToGlobalMappingDestroy(&rl2g);CHKERRQ(ierr);
1633   ierr = ISLocalToGlobalMappingDestroy(&cl2g);CHKERRQ(ierr);
1634   /* remove unsupported ops */
1635   ierr = PetscMemzero((*submat)->ops,sizeof(struct _MatOps));CHKERRQ(ierr);
1636   (*submat)->ops->destroy               = MatDestroy_IS;
1637   (*submat)->ops->setvalueslocal        = MatSetValuesLocal_SubMat_IS;
1638   (*submat)->ops->setvaluesblockedlocal = MatSetValuesBlockedLocal_SubMat_IS;
1639   (*submat)->ops->assemblybegin         = MatAssemblyBegin_IS;
1640   (*submat)->ops->assemblyend           = MatAssemblyEnd_IS;
1641   PetscFunctionReturn(0);
1642 }
1643 
1644 #undef __FUNCT__
1645 #define __FUNCT__ "MatCreateIS"
1646 /*@
1647     MatCreateIS - Creates a "process" unassembled matrix, assembled on each
1648        process but not across processes.
1649 
1650    Input Parameters:
1651 +     comm    - MPI communicator that will share the matrix
1652 .     bs      - block size of the matrix
1653 .     m,n,M,N - local and/or global sizes of the left and right vector used in matrix vector products
1654 .     rmap    - local to global map for rows
1655 -     cmap    - local to global map for cols
1656 
1657    Output Parameter:
1658 .    A - the resulting matrix
1659 
1660    Level: advanced
1661 
1662    Notes: See MATIS for more details.
1663           m and n are NOT related to the size of the map; they represent the size of the local parts of the vectors
1664           used in MatMult operations. The sizes of rmap and cmap define the size of the local matrices.
1665           If either rmap or cmap are NULL, then the matrix is assumed to be square.
1666 
1667 .seealso: MATIS, MatSetLocalToGlobalMapping()
1668 @*/
1669 PetscErrorCode  MatCreateIS(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,ISLocalToGlobalMapping rmap,ISLocalToGlobalMapping cmap,Mat *A)
1670 {
1671   PetscErrorCode ierr;
1672 
1673   PetscFunctionBegin;
1674   if (!rmap && !cmap) SETERRQ(comm,PETSC_ERR_USER,"You need to provide at least one of the mappings");
1675   ierr = MatCreate(comm,A);CHKERRQ(ierr);
1676   if (bs > 0) {
1677     ierr = MatSetBlockSize(*A,bs);CHKERRQ(ierr);
1678   }
1679   ierr = MatSetSizes(*A,m,n,M,N);CHKERRQ(ierr);
1680   ierr = MatSetType(*A,MATIS);CHKERRQ(ierr);
1681   if (rmap && cmap) {
1682     ierr = MatSetLocalToGlobalMapping(*A,rmap,cmap);CHKERRQ(ierr);
1683   } else if (!rmap) {
1684     ierr = MatSetLocalToGlobalMapping(*A,cmap,cmap);CHKERRQ(ierr);
1685   } else {
1686     ierr = MatSetLocalToGlobalMapping(*A,rmap,rmap);CHKERRQ(ierr);
1687   }
1688   PetscFunctionReturn(0);
1689 }
1690 
1691 /*MC
1692    MATIS - MATIS = "is" - A matrix type to be used for using the non-overlapping domain decomposition methods (e.g. PCBDDC or KSPFETIDP).
1693    This stores the matrices in globally unassembled form. Each processor
1694    assembles only its local Neumann problem and the parallel matrix vector
1695    product is handled "implicitly".
1696 
1697    Operations Provided:
1698 +  MatMult()
1699 .  MatMultAdd()
1700 .  MatMultTranspose()
1701 .  MatMultTransposeAdd()
1702 .  MatZeroEntries()
1703 .  MatSetOption()
1704 .  MatZeroRows()
1705 .  MatSetValues()
1706 .  MatSetValuesBlocked()
1707 .  MatSetValuesLocal()
1708 .  MatSetValuesBlockedLocal()
1709 .  MatScale()
1710 .  MatGetDiagonal()
1711 .  MatMissingDiagonal()
1712 .  MatDuplicate()
1713 .  MatCopy()
1714 .  MatAXPY()
1715 .  MatGetSubMatrix()
1716 .  MatGetLocalSubMatrix()
1717 .  MatTranspose()
1718 -  MatSetLocalToGlobalMapping()
1719 
1720    Options Database Keys:
1721 . -mat_type is - sets the matrix type to "is" during a call to MatSetFromOptions()
1722 
1723    Notes: Options prefix for the inner matrix are given by -is_mat_xxx
1724 
1725           You must call MatSetLocalToGlobalMapping() before using this matrix type.
1726 
1727           You can do matrix preallocation on the local matrix after you obtain it with
1728           MatISGetLocalMat(); otherwise, you could use MatISSetPreallocation()
1729 
1730   Level: advanced
1731 
1732 .seealso: Mat, MatISGetLocalMat(), MatSetLocalToGlobalMapping(), MatISSetPreallocation(), MatCreateIS(), PCBDDC, KSPFETIDP
1733 
1734 M*/
1735 
1736 #undef __FUNCT__
1737 #define __FUNCT__ "MatCreate_IS"
1738 PETSC_EXTERN PetscErrorCode MatCreate_IS(Mat A)
1739 {
1740   PetscErrorCode ierr;
1741   Mat_IS         *b;
1742 
1743   PetscFunctionBegin;
1744   ierr    = PetscNewLog(A,&b);CHKERRQ(ierr);
1745   A->data = (void*)b;
1746 
1747   /* matrix ops */
1748   ierr    = PetscMemzero(A->ops,sizeof(struct _MatOps));CHKERRQ(ierr);
1749   A->ops->mult                    = MatMult_IS;
1750   A->ops->multadd                 = MatMultAdd_IS;
1751   A->ops->multtranspose           = MatMultTranspose_IS;
1752   A->ops->multtransposeadd        = MatMultTransposeAdd_IS;
1753   A->ops->destroy                 = MatDestroy_IS;
1754   A->ops->setlocaltoglobalmapping = MatSetLocalToGlobalMapping_IS;
1755   A->ops->setvalues               = MatSetValues_IS;
1756   A->ops->setvaluesblocked        = MatSetValuesBlocked_IS;
1757   A->ops->setvalueslocal          = MatSetValuesLocal_IS;
1758   A->ops->setvaluesblockedlocal   = MatSetValuesBlockedLocal_IS;
1759   A->ops->zerorows                = MatZeroRows_IS;
1760   A->ops->zerorowscolumns         = MatZeroRowsColumns_IS;
1761   A->ops->assemblybegin           = MatAssemblyBegin_IS;
1762   A->ops->assemblyend             = MatAssemblyEnd_IS;
1763   A->ops->view                    = MatView_IS;
1764   A->ops->zeroentries             = MatZeroEntries_IS;
1765   A->ops->scale                   = MatScale_IS;
1766   A->ops->getdiagonal             = MatGetDiagonal_IS;
1767   A->ops->setoption               = MatSetOption_IS;
1768   A->ops->ishermitian             = MatIsHermitian_IS;
1769   A->ops->issymmetric             = MatIsSymmetric_IS;
1770   A->ops->duplicate               = MatDuplicate_IS;
1771   A->ops->missingdiagonal         = MatMissingDiagonal_IS;
1772   A->ops->copy                    = MatCopy_IS;
1773   A->ops->getlocalsubmatrix       = MatGetLocalSubMatrix_IS;
1774   A->ops->getsubmatrix            = MatGetSubMatrix_IS;
1775   A->ops->axpy                    = MatAXPY_IS;
1776   A->ops->diagonalset             = MatDiagonalSet_IS;
1777   A->ops->shift                   = MatShift_IS;
1778   A->ops->transpose               = MatTranspose_IS;
1779   A->ops->getinfo                 = MatGetInfo_IS;
1780 
1781   /* special MATIS functions */
1782   ierr = PetscObjectComposeFunction((PetscObject)A,"MatISGetLocalMat_C",MatISGetLocalMat_IS);CHKERRQ(ierr);
1783   ierr = PetscObjectComposeFunction((PetscObject)A,"MatISSetLocalMat_C",MatISSetLocalMat_IS);CHKERRQ(ierr);
1784   ierr = PetscObjectComposeFunction((PetscObject)A,"MatISGetMPIXAIJ_C",MatISGetMPIXAIJ_IS);CHKERRQ(ierr);
1785   ierr = PetscObjectComposeFunction((PetscObject)A,"MatISSetPreallocation_C",MatISSetPreallocation_IS);CHKERRQ(ierr);
1786   ierr = PetscObjectChangeTypeName((PetscObject)A,MATIS);CHKERRQ(ierr);
1787   PetscFunctionReturn(0);
1788 }
1789