xref: /petsc/src/mat/impls/is/matis.c (revision fc6138e5bb0ddea085ccbafdbba92de7843135df)
1 /*
2     Creates a matrix class for using the Neumann-Neumann type preconditioners.
3     This stores the matrices in globally unassembled form. Each processor
4     assembles only its local Neumann problem and the parallel matrix vector
5     product is handled "implicitly".
6 
7     Currently this allows for only one subdomain per processor.
8 */
9 
10 #include <../src/mat/impls/is/matis.h>      /*I "petscmat.h" I*/
11 
12 #define MATIS_MAX_ENTRIES_INSERTION 2048
13 
14 static PetscErrorCode MatISComputeSF_Private(Mat);
15 
16 #undef __FUNCT__
17 #define __FUNCT__ "MatDiagonalSet_IS"
18 PetscErrorCode  MatDiagonalSet_IS(Mat A,Vec D,InsertMode insmode)
19 {
20   Mat_IS         *is = (Mat_IS*)A->data;
21   PetscErrorCode ierr;
22 
23   PetscFunctionBegin;
24   if (!D) { /* this code branch is used by MatShift_IS */
25     ierr = VecSet(is->y,1.);CHKERRQ(ierr);
26   } else {
27     ierr = VecScatterBegin(is->rctx,D,is->y,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
28     ierr = VecScatterEnd(is->rctx,D,is->y,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
29   }
30   ierr = VecPointwiseDivide(is->y,is->y,is->counter);CHKERRQ(ierr);
31   ierr = MatDiagonalSet(is->A,is->y,insmode);CHKERRQ(ierr);
32   PetscFunctionReturn(0);
33 }
34 
35 #undef __FUNCT__
36 #define __FUNCT__ "MatShift_IS"
37 PetscErrorCode  MatShift_IS(Mat A,PetscScalar a)
38 {
39   PetscErrorCode ierr;
40 
41   PetscFunctionBegin;
42   ierr = MatDiagonalSet_IS(A,NULL,ADD_VALUES);CHKERRQ(ierr);
43   PetscFunctionReturn(0);
44 }
45 
46 #undef __FUNCT__
47 #define __FUNCT__ "MatSetValuesLocal_SubMat_IS"
48 static PetscErrorCode MatSetValuesLocal_SubMat_IS(Mat A,PetscInt m,const PetscInt *rows, PetscInt n,const PetscInt *cols,const PetscScalar *values,InsertMode addv)
49 {
50   PetscErrorCode ierr;
51   Mat_IS         *is = (Mat_IS*)A->data;
52   PetscInt       rows_l[MATIS_MAX_ENTRIES_INSERTION],cols_l[MATIS_MAX_ENTRIES_INSERTION];
53 
54   PetscFunctionBegin;
55 #if defined(PETSC_USE_DEBUG)
56   if (m > MATIS_MAX_ENTRIES_INSERTION || n > MATIS_MAX_ENTRIES_INSERTION) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_SUP,"Number of row/column indices must be <= %D: they are %D %D",MATIS_MAX_ENTRIES_INSERTION,m,n);
57 #endif
58   ierr = ISLocalToGlobalMappingApply(A->rmap->mapping,m,rows,rows_l);CHKERRQ(ierr);
59   ierr = ISLocalToGlobalMappingApply(A->cmap->mapping,n,cols,cols_l);CHKERRQ(ierr);
60   ierr = MatSetValues(is->A,m,rows_l,n,cols_l,values,addv);CHKERRQ(ierr);
61   PetscFunctionReturn(0);
62 }
63 
64 #undef __FUNCT__
65 #define __FUNCT__ "MatSetValuesBlockedLocal_SubMat_IS"
66 static PetscErrorCode MatSetValuesBlockedLocal_SubMat_IS(Mat A,PetscInt m,const PetscInt *rows, PetscInt n,const PetscInt *cols,const PetscScalar *values,InsertMode addv)
67 {
68   PetscErrorCode ierr;
69   Mat_IS         *is = (Mat_IS*)A->data;
70   PetscInt       rows_l[MATIS_MAX_ENTRIES_INSERTION],cols_l[MATIS_MAX_ENTRIES_INSERTION];
71 
72   PetscFunctionBegin;
73 #if defined(PETSC_USE_DEBUG)
74   if (m > MATIS_MAX_ENTRIES_INSERTION || n > MATIS_MAX_ENTRIES_INSERTION) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_SUP,"Number of row/column block indices must be <= %D: they are %D %D",MATIS_MAX_ENTRIES_INSERTION,m,n);
75 #endif
76   ierr = ISLocalToGlobalMappingApplyBlock(A->rmap->mapping,m,rows,rows_l);CHKERRQ(ierr);
77   ierr = ISLocalToGlobalMappingApplyBlock(A->cmap->mapping,n,cols,cols_l);CHKERRQ(ierr);
78   ierr = MatSetValuesBlocked(is->A,m,rows_l,n,cols_l,values,addv);CHKERRQ(ierr);
79   PetscFunctionReturn(0);
80 }
81 
82 #undef __FUNCT__
83 #define __FUNCT__ "PetscLayoutMapLocal_Private"
84 static PetscErrorCode PetscLayoutMapLocal_Private(PetscLayout map,PetscInt N,const PetscInt idxs[], PetscInt *on,PetscInt **oidxs,PetscInt **ogidxs)
85 {
86   PetscInt      *owners = map->range;
87   PetscInt       n      = map->n;
88   PetscSF        sf;
89   PetscInt      *lidxs,*work = NULL;
90   PetscSFNode   *ridxs;
91   PetscMPIInt    rank;
92   PetscInt       r, p = 0, len = 0;
93   PetscErrorCode ierr;
94 
95   PetscFunctionBegin;
96   /* Create SF where leaves are input idxs and roots are owned idxs (code adapted from MatZeroRowsMapLocal_Private) */
97   ierr = MPI_Comm_rank(map->comm,&rank);CHKERRQ(ierr);
98   ierr = PetscMalloc1(n,&lidxs);CHKERRQ(ierr);
99   for (r = 0; r < n; ++r) lidxs[r] = -1;
100   ierr = PetscMalloc1(N,&ridxs);CHKERRQ(ierr);
101   for (r = 0; r < N; ++r) {
102     const PetscInt idx = idxs[r];
103     if (idx < 0 || map->N <= idx) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Index %D out of range [0,%D)",idx,map->N);
104     if (idx < owners[p] || owners[p+1] <= idx) { /* short-circuit the search if the last p owns this idx too */
105       ierr = PetscLayoutFindOwner(map,idx,&p);CHKERRQ(ierr);
106     }
107     ridxs[r].rank = p;
108     ridxs[r].index = idxs[r] - owners[p];
109   }
110   ierr = PetscSFCreate(map->comm,&sf);CHKERRQ(ierr);
111   ierr = PetscSFSetGraph(sf,n,N,NULL,PETSC_OWN_POINTER,ridxs,PETSC_OWN_POINTER);CHKERRQ(ierr);
112   ierr = PetscSFReduceBegin(sf,MPIU_INT,(PetscInt*)idxs,lidxs,MPI_LOR);CHKERRQ(ierr);
113   ierr = PetscSFReduceEnd(sf,MPIU_INT,(PetscInt*)idxs,lidxs,MPI_LOR);CHKERRQ(ierr);
114   if (ogidxs) { /* communicate global idxs */
115     PetscInt cum = 0,start,*work2;
116 
117     ierr = PetscMalloc1(n,&work);CHKERRQ(ierr);
118     ierr = PetscCalloc1(N,&work2);CHKERRQ(ierr);
119     for (r = 0; r < N; ++r) if (idxs[r] >=0) cum++;
120     ierr = MPI_Scan(&cum,&start,1,MPIU_INT,MPI_SUM,map->comm);CHKERRQ(ierr);
121     start -= cum;
122     cum = 0;
123     for (r = 0; r < N; ++r) if (idxs[r] >=0) work2[r] = start+cum++;
124     ierr = PetscSFReduceBegin(sf,MPIU_INT,work2,work,MPIU_REPLACE);CHKERRQ(ierr);
125     ierr = PetscSFReduceEnd(sf,MPIU_INT,work2,work,MPIU_REPLACE);CHKERRQ(ierr);
126     ierr = PetscFree(work2);CHKERRQ(ierr);
127   }
128   ierr = PetscSFDestroy(&sf);CHKERRQ(ierr);
129   /* Compress and put in indices */
130   for (r = 0; r < n; ++r)
131     if (lidxs[r] >= 0) {
132       if (work) work[len] = work[r];
133       lidxs[len++] = r;
134     }
135   if (on) *on = len;
136   if (oidxs) *oidxs = lidxs;
137   if (ogidxs) *ogidxs = work;
138   PetscFunctionReturn(0);
139 }
140 
141 #undef __FUNCT__
142 #define __FUNCT__ "MatGetSubMatrix_IS"
143 static PetscErrorCode MatGetSubMatrix_IS(Mat mat,IS irow,IS icol,MatReuse scall,Mat *newmat)
144 {
145   Mat               locmat,newlocmat;
146   Mat_IS            *newmatis;
147 #if defined(PETSC_USE_DEBUG)
148   Vec               rtest,ltest;
149   const PetscScalar *array;
150 #endif
151   const PetscInt    *idxs;
152   PetscInt          i,m,n;
153   PetscErrorCode    ierr;
154 
155   PetscFunctionBegin;
156   if (scall == MAT_REUSE_MATRIX) {
157     PetscBool ismatis;
158 
159     ierr = PetscObjectTypeCompare((PetscObject)*newmat,MATIS,&ismatis);CHKERRQ(ierr);
160     if (!ismatis) SETERRQ(PetscObjectComm((PetscObject)*newmat),PETSC_ERR_ARG_WRONG,"Cannot reuse matrix! Not of MATIS type");
161     newmatis = (Mat_IS*)(*newmat)->data;
162     if (!newmatis->getsub_ris) SETERRQ(PetscObjectComm((PetscObject)*newmat),PETSC_ERR_ARG_WRONG,"Cannot reuse matrix! Misses local row IS");
163     if (!newmatis->getsub_cis) SETERRQ(PetscObjectComm((PetscObject)*newmat),PETSC_ERR_ARG_WRONG,"Cannot reuse matrix! Misses local col IS");
164   }
165   /* irow and icol may not have duplicate entries */
166 #if defined(PETSC_USE_DEBUG)
167   ierr = MatCreateVecs(mat,&ltest,&rtest);CHKERRQ(ierr);
168   ierr = ISGetLocalSize(irow,&n);CHKERRQ(ierr);
169   ierr = ISGetIndices(irow,&idxs);CHKERRQ(ierr);
170   for (i=0;i<n;i++) {
171     ierr = VecSetValue(rtest,idxs[i],1.0,ADD_VALUES);CHKERRQ(ierr);
172   }
173   ierr = VecAssemblyBegin(rtest);CHKERRQ(ierr);
174   ierr = VecAssemblyEnd(rtest);CHKERRQ(ierr);
175   ierr = VecGetLocalSize(rtest,&n);CHKERRQ(ierr);
176   ierr = VecGetOwnershipRange(rtest,&m,NULL);CHKERRQ(ierr);
177   ierr = VecGetArrayRead(rtest,&array);CHKERRQ(ierr);
178   for (i=0;i<n;i++) if (array[i] != 0. && array[i] != 1.) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_SUP,"Index %D counted %D times! Irow may not have duplicate entries",i+m,(PetscInt)PetscRealPart(array[i]));
179   ierr = VecRestoreArrayRead(rtest,&array);CHKERRQ(ierr);
180   ierr = ISRestoreIndices(irow,&idxs);CHKERRQ(ierr);
181   ierr = ISGetLocalSize(icol,&n);CHKERRQ(ierr);
182   ierr = ISGetIndices(icol,&idxs);CHKERRQ(ierr);
183   for (i=0;i<n;i++) {
184     ierr = VecSetValue(ltest,idxs[i],1.0,ADD_VALUES);CHKERRQ(ierr);
185   }
186   ierr = VecAssemblyBegin(ltest);CHKERRQ(ierr);
187   ierr = VecAssemblyEnd(ltest);CHKERRQ(ierr);
188   ierr = VecGetLocalSize(ltest,&n);CHKERRQ(ierr);
189   ierr = VecGetOwnershipRange(ltest,&m,NULL);CHKERRQ(ierr);
190   ierr = VecGetArrayRead(ltest,&array);CHKERRQ(ierr);
191   for (i=0;i<n;i++) if (array[i] != 0. && array[i] != 1.) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_SUP,"Index %D counted %D times! Icol may not have duplicate entries",i+m,(PetscInt)PetscRealPart(array[i]));
192   ierr = VecRestoreArrayRead(ltest,&array);CHKERRQ(ierr);
193   ierr = ISRestoreIndices(icol,&idxs);CHKERRQ(ierr);
194   ierr = VecDestroy(&rtest);CHKERRQ(ierr);
195   ierr = VecDestroy(&ltest);CHKERRQ(ierr);
196 #endif
197   if (scall == MAT_INITIAL_MATRIX) {
198     Mat_IS                 *matis = (Mat_IS*)mat->data;
199     ISLocalToGlobalMapping rl2g;
200     IS                     is;
201     PetscInt               *lidxs,*lgidxs,*newgidxs;
202     PetscInt               ll,newloc;
203     MPI_Comm               comm;
204 
205     ierr = PetscObjectGetComm((PetscObject)mat,&comm);CHKERRQ(ierr);
206     ierr = ISGetLocalSize(irow,&m);CHKERRQ(ierr);
207     ierr = ISGetLocalSize(icol,&n);CHKERRQ(ierr);
208     ierr = MatCreate(comm,newmat);CHKERRQ(ierr);
209     ierr = MatSetType(*newmat,MATIS);CHKERRQ(ierr);
210     ierr = MatSetSizes(*newmat,m,n,PETSC_DECIDE,PETSC_DECIDE);CHKERRQ(ierr);
211     /* communicate irow to their owners in the layout */
212     ierr = ISGetIndices(irow,&idxs);CHKERRQ(ierr);
213     ierr = PetscLayoutMapLocal_Private(mat->rmap,m,idxs,&ll,&lidxs,&lgidxs);CHKERRQ(ierr);
214     ierr = ISRestoreIndices(irow,&idxs);CHKERRQ(ierr);
215     if (!matis->sf) { /* setup SF if not yet created and allocate rootdata and leafdata */
216       ierr = MatISComputeSF_Private(mat);CHKERRQ(ierr);
217     }
218     ierr = PetscMemzero(matis->sf_rootdata,matis->sf_nroots*sizeof(PetscInt));CHKERRQ(ierr);
219     for (i=0;i<ll;i++) matis->sf_rootdata[lidxs[i]] = lgidxs[i]+1;
220     ierr = PetscFree(lidxs);CHKERRQ(ierr);
221     ierr = PetscFree(lgidxs);CHKERRQ(ierr);
222     ierr = PetscSFBcastBegin(matis->sf,MPIU_INT,matis->sf_rootdata,matis->sf_leafdata);CHKERRQ(ierr);
223     ierr = PetscSFBcastEnd(matis->sf,MPIU_INT,matis->sf_rootdata,matis->sf_leafdata);CHKERRQ(ierr);
224     for (i=0,newloc=0;i<matis->sf_nleaves;i++) if (matis->sf_leafdata[i]) newloc++;
225     ierr = PetscMalloc1(newloc,&newgidxs);CHKERRQ(ierr);
226     ierr = PetscMalloc1(newloc,&lidxs);CHKERRQ(ierr);
227     for (i=0,newloc=0;i<matis->sf_nleaves;i++)
228       if (matis->sf_leafdata[i]) {
229         lidxs[newloc] = i;
230         newgidxs[newloc++] = matis->sf_leafdata[i]-1;
231       }
232     ierr = ISCreateGeneral(comm,newloc,newgidxs,PETSC_OWN_POINTER,&is);CHKERRQ(ierr);
233     ierr = ISLocalToGlobalMappingCreateIS(is,&rl2g);CHKERRQ(ierr);
234     ierr = ISDestroy(&is);CHKERRQ(ierr);
235     /* local is to extract local submatrix */
236     newmatis = (Mat_IS*)(*newmat)->data;
237     ierr = ISCreateGeneral(comm,newloc,lidxs,PETSC_OWN_POINTER,&newmatis->getsub_ris);CHKERRQ(ierr);
238     if (mat->congruentlayouts == -1) { /* first time we compare rows and cols layouts */
239       PetscBool cong;
240       ierr = PetscLayoutCompare(mat->rmap,mat->cmap,&cong);CHKERRQ(ierr);
241       if (cong) mat->congruentlayouts = 1;
242       else      mat->congruentlayouts = 0;
243     }
244     if (mat->congruentlayouts && irow == icol) {
245       ierr = MatSetLocalToGlobalMapping(*newmat,rl2g,rl2g);CHKERRQ(ierr);
246       ierr = PetscObjectReference((PetscObject)newmatis->getsub_ris);CHKERRQ(ierr);
247       newmatis->getsub_cis = newmatis->getsub_ris;
248     } else {
249       ISLocalToGlobalMapping cl2g;
250 
251       /* communicate icol to their owners in the layout */
252       ierr = ISGetIndices(icol,&idxs);CHKERRQ(ierr);
253       ierr = PetscLayoutMapLocal_Private(mat->cmap,n,idxs,&ll,&lidxs,&lgidxs);CHKERRQ(ierr);
254       ierr = ISRestoreIndices(icol,&idxs);CHKERRQ(ierr);
255       ierr = PetscMemzero(matis->csf_rootdata,matis->csf_nroots*sizeof(PetscInt));CHKERRQ(ierr);
256       for (i=0;i<ll;i++) matis->csf_rootdata[lidxs[i]] = lgidxs[i]+1;
257       ierr = PetscFree(lidxs);CHKERRQ(ierr);
258       ierr = PetscFree(lgidxs);CHKERRQ(ierr);
259       ierr = PetscSFBcastBegin(matis->csf,MPIU_INT,matis->csf_rootdata,matis->csf_leafdata);CHKERRQ(ierr);
260       ierr = PetscSFBcastEnd(matis->csf,MPIU_INT,matis->csf_rootdata,matis->csf_leafdata);CHKERRQ(ierr);
261       for (i=0,newloc=0;i<matis->csf_nleaves;i++) if (matis->csf_leafdata[i]) newloc++;
262       ierr = PetscMalloc1(newloc,&newgidxs);CHKERRQ(ierr);
263       ierr = PetscMalloc1(newloc,&lidxs);CHKERRQ(ierr);
264       for (i=0,newloc=0;i<matis->csf_nleaves;i++)
265         if (matis->csf_leafdata[i]) {
266           lidxs[newloc] = i;
267           newgidxs[newloc++] = matis->csf_leafdata[i]-1;
268         }
269       ierr = ISCreateGeneral(comm,newloc,newgidxs,PETSC_OWN_POINTER,&is);CHKERRQ(ierr);
270       ierr = ISLocalToGlobalMappingCreateIS(is,&cl2g);CHKERRQ(ierr);
271       ierr = ISDestroy(&is);CHKERRQ(ierr);
272       /* local is to extract local submatrix */
273       ierr = ISCreateGeneral(comm,newloc,lidxs,PETSC_OWN_POINTER,&newmatis->getsub_cis);CHKERRQ(ierr);
274       ierr = MatSetLocalToGlobalMapping(*newmat,rl2g,cl2g);CHKERRQ(ierr);
275       ierr = ISLocalToGlobalMappingDestroy(&cl2g);CHKERRQ(ierr);
276     }
277     ierr = ISLocalToGlobalMappingDestroy(&rl2g);CHKERRQ(ierr);
278   } else {
279     ierr = MatISGetLocalMat(*newmat,&newlocmat);CHKERRQ(ierr);
280   }
281   ierr = MatISGetLocalMat(mat,&locmat);CHKERRQ(ierr);
282   newmatis = (Mat_IS*)(*newmat)->data;
283   ierr = MatGetSubMatrix(locmat,newmatis->getsub_ris,newmatis->getsub_cis,scall,&newlocmat);CHKERRQ(ierr);
284   if (scall == MAT_INITIAL_MATRIX) {
285     ierr = MatISSetLocalMat(*newmat,newlocmat);CHKERRQ(ierr);
286     ierr = MatDestroy(&newlocmat);CHKERRQ(ierr);
287   }
288   ierr = MatAssemblyBegin(*newmat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
289   ierr = MatAssemblyEnd(*newmat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
290   PetscFunctionReturn(0);
291 }
292 
293 #undef __FUNCT__
294 #define __FUNCT__ "MatCopy_IS"
295 static PetscErrorCode MatCopy_IS(Mat A,Mat B,MatStructure str)
296 {
297   Mat_IS         *a = (Mat_IS*)A->data,*b;
298   PetscBool      ismatis;
299   PetscErrorCode ierr;
300 
301   PetscFunctionBegin;
302   ierr = PetscObjectTypeCompare((PetscObject)B,MATIS,&ismatis);CHKERRQ(ierr);
303   if (!ismatis) SETERRQ(PetscObjectComm((PetscObject)B),PETSC_ERR_SUP,"Need to be implemented");
304   b = (Mat_IS*)B->data;
305   ierr = MatCopy(a->A,b->A,str);CHKERRQ(ierr);
306   PetscFunctionReturn(0);
307 }
308 
309 #undef __FUNCT__
310 #define __FUNCT__ "MatMissingDiagonal_IS"
311 static PetscErrorCode MatMissingDiagonal_IS(Mat A,PetscBool  *missing,PetscInt *d)
312 {
313   Vec               v;
314   const PetscScalar *array;
315   PetscInt          i,n;
316   PetscErrorCode    ierr;
317 
318   PetscFunctionBegin;
319   *missing = PETSC_FALSE;
320   ierr = MatCreateVecs(A,NULL,&v);CHKERRQ(ierr);
321   ierr = MatGetDiagonal(A,v);CHKERRQ(ierr);
322   ierr = VecGetLocalSize(v,&n);CHKERRQ(ierr);
323   ierr = VecGetArrayRead(v,&array);CHKERRQ(ierr);
324   for (i=0;i<n;i++) if (array[i] == 0.) break;
325   ierr = VecRestoreArrayRead(v,&array);CHKERRQ(ierr);
326   ierr = VecDestroy(&v);CHKERRQ(ierr);
327   if (i != n) *missing = PETSC_TRUE;
328   if (d) {
329     *d = -1;
330     if (*missing) {
331       PetscInt rstart;
332       ierr = MatGetOwnershipRange(A,&rstart,NULL);CHKERRQ(ierr);
333       *d = i+rstart;
334     }
335   }
336   PetscFunctionReturn(0);
337 }
338 
339 #undef __FUNCT__
340 #define __FUNCT__ "MatISComputeSF_Private"
341 static PetscErrorCode MatISComputeSF_Private(Mat B)
342 {
343   Mat_IS         *matis = (Mat_IS*)(B->data);
344   const PetscInt *gidxs;
345   PetscErrorCode ierr;
346 
347   PetscFunctionBegin;
348   ierr = MatGetSize(matis->A,&matis->sf_nleaves,NULL);CHKERRQ(ierr);
349   ierr = MatGetLocalSize(B,&matis->sf_nroots,NULL);CHKERRQ(ierr);
350   ierr = PetscSFCreate(PetscObjectComm((PetscObject)B),&matis->sf);CHKERRQ(ierr);
351   ierr = ISLocalToGlobalMappingGetIndices(B->rmap->mapping,&gidxs);CHKERRQ(ierr);
352   ierr = PetscSFSetGraphLayout(matis->sf,B->rmap,matis->sf_nleaves,NULL,PETSC_OWN_POINTER,gidxs);CHKERRQ(ierr);
353   ierr = ISLocalToGlobalMappingRestoreIndices(B->rmap->mapping,&gidxs);CHKERRQ(ierr);
354   ierr = PetscMalloc2(matis->sf_nroots,&matis->sf_rootdata,matis->sf_nleaves,&matis->sf_leafdata);CHKERRQ(ierr);
355   if (B->rmap->mapping != B->cmap->mapping) { /* setup SF for columns */
356     ierr = MatGetSize(matis->A,NULL,&matis->csf_nleaves);CHKERRQ(ierr);
357     ierr = MatGetLocalSize(B,NULL,&matis->csf_nroots);CHKERRQ(ierr);
358     ierr = PetscSFCreate(PetscObjectComm((PetscObject)B),&matis->csf);CHKERRQ(ierr);
359     ierr = ISLocalToGlobalMappingGetIndices(B->cmap->mapping,&gidxs);CHKERRQ(ierr);
360     ierr = PetscSFSetGraphLayout(matis->csf,B->cmap,matis->csf_nleaves,NULL,PETSC_OWN_POINTER,gidxs);CHKERRQ(ierr);
361     ierr = ISLocalToGlobalMappingRestoreIndices(B->cmap->mapping,&gidxs);CHKERRQ(ierr);
362     ierr = PetscMalloc2(matis->csf_nroots,&matis->csf_rootdata,matis->csf_nleaves,&matis->csf_leafdata);CHKERRQ(ierr);
363   } else {
364     matis->csf = matis->sf;
365     matis->csf_nleaves = matis->sf_nleaves;
366     matis->csf_nroots = matis->sf_nroots;
367     matis->csf_leafdata = matis->sf_leafdata;
368     matis->csf_rootdata = matis->sf_rootdata;
369   }
370   PetscFunctionReturn(0);
371 }
372 
373 #undef __FUNCT__
374 #define __FUNCT__ "MatISSetPreallocation"
375 /*@
376    MatISSetPreallocation - Preallocates memory for a MATIS parallel matrix.
377 
378    Collective on MPI_Comm
379 
380    Input Parameters:
381 +  B - the matrix
382 .  d_nz  - number of nonzeros per row in DIAGONAL portion of local submatrix
383            (same value is used for all local rows)
384 .  d_nnz - array containing the number of nonzeros in the various rows of the
385            DIAGONAL portion of the local submatrix (possibly different for each row)
386            or NULL, if d_nz is used to specify the nonzero structure.
387            The size of this array is equal to the number of local rows, i.e 'm'.
388            For matrices that will be factored, you must leave room for (and set)
389            the diagonal entry even if it is zero.
390 .  o_nz  - number of nonzeros per row in the OFF-DIAGONAL portion of local
391            submatrix (same value is used for all local rows).
392 -  o_nnz - array containing the number of nonzeros in the various rows of the
393            OFF-DIAGONAL portion of the local submatrix (possibly different for
394            each row) or NULL, if o_nz is used to specify the nonzero
395            structure. The size of this array is equal to the number
396            of local rows, i.e 'm'.
397 
398    If the *_nnz parameter is given then the *_nz parameter is ignored
399 
400    Level: intermediate
401 
402    Notes: This function has the same interface as the MPIAIJ preallocation routine in order to simplify the transition
403           from the asssembled format to the unassembled one. It overestimates the preallocation of MATIS local
404           matrices; for exact preallocation, the user should set the preallocation directly on local matrix objects.
405 
406 .keywords: matrix
407 
408 .seealso: MatCreate(), MatCreateIS(), MatMPIAIJSetPreallocation(), MatISGetLocalMat(), MATIS
409 @*/
410 PetscErrorCode  MatISSetPreallocation(Mat B,PetscInt d_nz,const PetscInt d_nnz[],PetscInt o_nz,const PetscInt o_nnz[])
411 {
412   PetscErrorCode ierr;
413 
414   PetscFunctionBegin;
415   PetscValidHeaderSpecific(B,MAT_CLASSID,1);
416   PetscValidType(B,1);
417   ierr = PetscTryMethod(B,"MatISSetPreallocation_C",(Mat,PetscInt,const PetscInt[],PetscInt,const PetscInt[]),(B,d_nz,d_nnz,o_nz,o_nnz));CHKERRQ(ierr);
418   PetscFunctionReturn(0);
419 }
420 
421 #undef __FUNCT__
422 #define __FUNCT__ "MatISSetPreallocation_IS"
423 static PetscErrorCode  MatISSetPreallocation_IS(Mat B,PetscInt d_nz,const PetscInt d_nnz[],PetscInt o_nz,const PetscInt o_nnz[])
424 {
425   Mat_IS         *matis = (Mat_IS*)(B->data);
426   PetscInt       bs,i,nlocalcols;
427   PetscErrorCode ierr;
428 
429   PetscFunctionBegin;
430   if (!matis->A) SETERRQ(PetscObjectComm((PetscObject)B),PETSC_ERR_SUP,"You should first call MatSetLocalToGlobalMapping");
431   if (!matis->sf) { /* setup SF if not yet created and allocate rootdata and leafdata */
432     ierr = MatISComputeSF_Private(B);CHKERRQ(ierr);
433   }
434   if (!d_nnz) {
435     for (i=0;i<matis->sf_nroots;i++) matis->sf_rootdata[i] = d_nz;
436   } else {
437     for (i=0;i<matis->sf_nroots;i++) matis->sf_rootdata[i] = d_nnz[i];
438   }
439   if (!o_nnz) {
440     for (i=0;i<matis->sf_nroots;i++) matis->sf_rootdata[i] += o_nz;
441   } else {
442     for (i=0;i<matis->sf_nroots;i++) matis->sf_rootdata[i] += o_nnz[i];
443   }
444   ierr = PetscSFBcastBegin(matis->sf,MPIU_INT,matis->sf_rootdata,matis->sf_leafdata);CHKERRQ(ierr);
445   ierr = MatGetSize(matis->A,NULL,&nlocalcols);CHKERRQ(ierr);
446   ierr = MatGetBlockSize(matis->A,&bs);CHKERRQ(ierr);
447   ierr = PetscSFBcastEnd(matis->sf,MPIU_INT,matis->sf_rootdata,matis->sf_leafdata);CHKERRQ(ierr);
448   for (i=0;i<matis->sf_nleaves;i++) {
449     matis->sf_leafdata[i] = PetscMin(matis->sf_leafdata[i],nlocalcols);
450   }
451   ierr = MatSeqAIJSetPreallocation(matis->A,0,matis->sf_leafdata);CHKERRQ(ierr);
452   for (i=0;i<matis->sf_nleaves/bs;i++) {
453     matis->sf_leafdata[i] = matis->sf_leafdata[i*bs]/bs;
454   }
455   ierr = MatSeqBAIJSetPreallocation(matis->A,bs,0,matis->sf_leafdata);CHKERRQ(ierr);
456   for (i=0;i<matis->sf_nleaves/bs;i++) {
457     matis->sf_leafdata[i] = matis->sf_leafdata[i]-i;
458   }
459   ierr = MatSeqSBAIJSetPreallocation(matis->A,bs,0,matis->sf_leafdata);CHKERRQ(ierr);
460   PetscFunctionReturn(0);
461 }
462 
463 #undef __FUNCT__
464 #define __FUNCT__ "MatISSetMPIXAIJPreallocation_Private"
465 PETSC_EXTERN PetscErrorCode MatISSetMPIXAIJPreallocation_Private(Mat A, Mat B, PetscBool maxreduce)
466 {
467   Mat_IS          *matis = (Mat_IS*)(A->data);
468   PetscInt        *my_dnz,*my_onz,*dnz,*onz,*mat_ranges,*row_ownership;
469   const PetscInt  *global_indices_r,*global_indices_c;
470   PetscInt        i,j,bs,rows,cols;
471   PetscInt        lrows,lcols;
472   PetscInt        local_rows,local_cols;
473   PetscMPIInt     nsubdomains;
474   PetscBool       isdense,issbaij;
475   PetscErrorCode  ierr;
476 
477   PetscFunctionBegin;
478   ierr = MPI_Comm_size(PetscObjectComm((PetscObject)A),&nsubdomains);CHKERRQ(ierr);
479   ierr = MatGetSize(A,&rows,&cols);CHKERRQ(ierr);
480   ierr = MatGetBlockSize(A,&bs);CHKERRQ(ierr);
481   ierr = MatGetSize(matis->A,&local_rows,&local_cols);CHKERRQ(ierr);
482   ierr = PetscObjectTypeCompare((PetscObject)matis->A,MATSEQDENSE,&isdense);CHKERRQ(ierr);
483   ierr = PetscObjectTypeCompare((PetscObject)matis->A,MATSEQSBAIJ,&issbaij);CHKERRQ(ierr);
484   ierr = ISLocalToGlobalMappingGetIndices(A->rmap->mapping,&global_indices_r);CHKERRQ(ierr);
485   if (A->rmap->mapping != A->cmap->mapping) {
486     ierr = ISLocalToGlobalMappingGetIndices(A->cmap->mapping,&global_indices_c);CHKERRQ(ierr);
487   } else {
488     global_indices_c = global_indices_r;
489   }
490 
491   if (issbaij) {
492     ierr = MatGetRowUpperTriangular(matis->A);CHKERRQ(ierr);
493   }
494   /*
495      An SF reduce is needed to sum up properly on shared rows.
496      Note that generally preallocation is not exact, since it overestimates nonzeros
497   */
498   if (!matis->sf) { /* setup SF if not yet created and allocate rootdata and leafdata */
499     ierr = MatISComputeSF_Private(A);CHKERRQ(ierr);
500   }
501   ierr = MatGetLocalSize(A,&lrows,&lcols);CHKERRQ(ierr);
502   ierr = MatPreallocateInitialize(PetscObjectComm((PetscObject)A),lrows,lcols,dnz,onz);CHKERRQ(ierr);
503   /* All processes need to compute entire row ownership */
504   ierr = PetscMalloc1(rows,&row_ownership);CHKERRQ(ierr);
505   ierr = MatGetOwnershipRanges(A,(const PetscInt**)&mat_ranges);CHKERRQ(ierr);
506   for (i=0;i<nsubdomains;i++) {
507     for (j=mat_ranges[i];j<mat_ranges[i+1];j++) {
508       row_ownership[j] = i;
509     }
510   }
511   ierr = MatGetOwnershipRangesColumn(A,(const PetscInt**)&mat_ranges);CHKERRQ(ierr);
512 
513   /*
514      my_dnz and my_onz contains exact contribution to preallocation from each local mat
515      then, they will be summed up properly. This way, preallocation is always sufficient
516   */
517   ierr = PetscCalloc2(local_rows,&my_dnz,local_rows,&my_onz);CHKERRQ(ierr);
518   /* preallocation as a MATAIJ */
519   if (isdense) { /* special case for dense local matrices */
520     for (i=0;i<local_rows;i++) {
521       PetscInt index_row = global_indices_r[i];
522       for (j=i;j<local_rows;j++) {
523         PetscInt owner = row_ownership[index_row];
524         PetscInt index_col = global_indices_c[j];
525         if (index_col > mat_ranges[owner]-1 && index_col < mat_ranges[owner+1] ) { /* diag block */
526           my_dnz[i] += 1;
527         } else { /* offdiag block */
528           my_onz[i] += 1;
529         }
530         /* same as before, interchanging rows and cols */
531         if (i != j) {
532           owner = row_ownership[index_col];
533           if (index_row > mat_ranges[owner]-1 && index_row < mat_ranges[owner+1] ) {
534             my_dnz[j] += 1;
535           } else {
536             my_onz[j] += 1;
537           }
538         }
539       }
540     }
541   } else if (matis->A->ops->getrowij) {
542     const PetscInt *ii,*jj,*jptr;
543     PetscBool      done;
544     ierr = MatGetRowIJ(matis->A,0,PETSC_FALSE,PETSC_FALSE,&local_rows,&ii,&jj,&done);CHKERRQ(ierr);
545     if (!done) SETERRQ1(PetscObjectComm((PetscObject)(matis->A)),PETSC_ERR_PLIB,"Error in MatGetRowIJ called from %s\n",__FUNCT__);
546     jptr = jj;
547     for (i=0;i<local_rows;i++) {
548       PetscInt index_row = global_indices_r[i];
549       for (j=0;j<ii[i+1]-ii[i];j++,jptr++) {
550         PetscInt owner = row_ownership[index_row];
551         PetscInt index_col = global_indices_c[*jptr];
552         if (index_col > mat_ranges[owner]-1 && index_col < mat_ranges[owner+1] ) { /* diag block */
553           my_dnz[i] += 1;
554         } else { /* offdiag block */
555           my_onz[i] += 1;
556         }
557         /* same as before, interchanging rows and cols */
558         if (issbaij && index_col != index_row) {
559           owner = row_ownership[index_col];
560           if (index_row > mat_ranges[owner]-1 && index_row < mat_ranges[owner+1] ) {
561             my_dnz[*jptr] += 1;
562           } else {
563             my_onz[*jptr] += 1;
564           }
565         }
566       }
567     }
568     ierr = MatRestoreRowIJ(matis->A,0,PETSC_FALSE,PETSC_FALSE,&local_rows,&ii,&jj,&done);CHKERRQ(ierr);
569     if (!done) SETERRQ1(PetscObjectComm((PetscObject)(matis->A)),PETSC_ERR_PLIB,"Error in MatRestoreRowIJ called from %s\n",__FUNCT__);
570   } else { /* loop over rows and use MatGetRow */
571     for (i=0;i<local_rows;i++) {
572       const PetscInt *cols;
573       PetscInt       ncols,index_row = global_indices_r[i];
574       ierr = MatGetRow(matis->A,i,&ncols,&cols,NULL);CHKERRQ(ierr);
575       for (j=0;j<ncols;j++) {
576         PetscInt owner = row_ownership[index_row];
577         PetscInt index_col = global_indices_c[cols[j]];
578         if (index_col > mat_ranges[owner]-1 && index_col < mat_ranges[owner+1] ) { /* diag block */
579           my_dnz[i] += 1;
580         } else { /* offdiag block */
581           my_onz[i] += 1;
582         }
583         /* same as before, interchanging rows and cols */
584         if (issbaij && index_col != index_row) {
585           owner = row_ownership[index_col];
586           if (index_row > mat_ranges[owner]-1 && index_row < mat_ranges[owner+1] ) {
587             my_dnz[cols[j]] += 1;
588           } else {
589             my_onz[cols[j]] += 1;
590           }
591         }
592       }
593       ierr = MatRestoreRow(matis->A,i,&ncols,&cols,NULL);CHKERRQ(ierr);
594     }
595   }
596   ierr = ISLocalToGlobalMappingRestoreIndices(A->rmap->mapping,&global_indices_r);CHKERRQ(ierr);
597   if (global_indices_c != global_indices_r) {
598     ierr = ISLocalToGlobalMappingRestoreIndices(A->cmap->mapping,&global_indices_c);CHKERRQ(ierr);
599   }
600   ierr = PetscFree(row_ownership);CHKERRQ(ierr);
601 
602   /* Reduce my_dnz and my_onz */
603   if (maxreduce) {
604     ierr = PetscSFReduceBegin(matis->sf,MPIU_INT,my_dnz,dnz,MPI_MAX);CHKERRQ(ierr);
605     ierr = PetscSFReduceBegin(matis->sf,MPIU_INT,my_onz,onz,MPI_MAX);CHKERRQ(ierr);
606     ierr = PetscSFReduceEnd(matis->sf,MPIU_INT,my_dnz,dnz,MPI_MAX);CHKERRQ(ierr);
607     ierr = PetscSFReduceEnd(matis->sf,MPIU_INT,my_onz,onz,MPI_MAX);CHKERRQ(ierr);
608   } else {
609     ierr = PetscSFReduceBegin(matis->sf,MPIU_INT,my_dnz,dnz,MPI_SUM);CHKERRQ(ierr);
610     ierr = PetscSFReduceBegin(matis->sf,MPIU_INT,my_onz,onz,MPI_SUM);CHKERRQ(ierr);
611     ierr = PetscSFReduceEnd(matis->sf,MPIU_INT,my_dnz,dnz,MPI_SUM);CHKERRQ(ierr);
612     ierr = PetscSFReduceEnd(matis->sf,MPIU_INT,my_onz,onz,MPI_SUM);CHKERRQ(ierr);
613   }
614   ierr = PetscFree2(my_dnz,my_onz);CHKERRQ(ierr);
615 
616   /* Resize preallocation if overestimated */
617   for (i=0;i<lrows;i++) {
618     dnz[i] = PetscMin(dnz[i],lcols);
619     onz[i] = PetscMin(onz[i],cols-lcols);
620   }
621   /* set preallocation */
622   ierr = MatMPIAIJSetPreallocation(B,0,dnz,0,onz);CHKERRQ(ierr);
623   for (i=0;i<lrows/bs;i++) {
624     dnz[i] = dnz[i*bs]/bs;
625     onz[i] = onz[i*bs]/bs;
626   }
627   ierr = MatMPIBAIJSetPreallocation(B,bs,0,dnz,0,onz);CHKERRQ(ierr);
628   ierr = MatMPISBAIJSetPreallocation(B,bs,0,dnz,0,onz);CHKERRQ(ierr);
629   ierr = MatPreallocateFinalize(dnz,onz);CHKERRQ(ierr);
630   if (issbaij) {
631     ierr = MatRestoreRowUpperTriangular(matis->A);CHKERRQ(ierr);
632   }
633   PetscFunctionReturn(0);
634 }
635 
636 #undef __FUNCT__
637 #define __FUNCT__ "MatISGetMPIXAIJ_IS"
638 static PetscErrorCode MatISGetMPIXAIJ_IS(Mat mat, MatReuse reuse, Mat *M)
639 {
640   Mat_IS         *matis = (Mat_IS*)(mat->data);
641   Mat            local_mat;
642   /* info on mat */
643   PetscInt       bs,rows,cols,lrows,lcols;
644   PetscInt       local_rows,local_cols;
645   PetscBool      isdense,issbaij,isseqaij;
646   PetscMPIInt    nsubdomains;
647   /* values insertion */
648   PetscScalar    *array;
649   /* work */
650   PetscErrorCode ierr;
651 
652   PetscFunctionBegin;
653   /* get info from mat */
654   ierr = MPI_Comm_size(PetscObjectComm((PetscObject)mat),&nsubdomains);CHKERRQ(ierr);
655   if (nsubdomains == 1) {
656     if (reuse == MAT_INITIAL_MATRIX) {
657       ierr = MatDuplicate(matis->A,MAT_COPY_VALUES,&(*M));CHKERRQ(ierr);
658     } else {
659       ierr = MatCopy(matis->A,*M,SAME_NONZERO_PATTERN);CHKERRQ(ierr);
660     }
661     PetscFunctionReturn(0);
662   }
663   ierr = MatGetSize(mat,&rows,&cols);CHKERRQ(ierr);
664   ierr = MatGetBlockSize(mat,&bs);CHKERRQ(ierr);
665   ierr = MatGetLocalSize(mat,&lrows,&lcols);CHKERRQ(ierr);
666   ierr = MatGetSize(matis->A,&local_rows,&local_cols);CHKERRQ(ierr);
667   ierr = PetscObjectTypeCompare((PetscObject)matis->A,MATSEQDENSE,&isdense);CHKERRQ(ierr);
668   ierr = PetscObjectTypeCompare((PetscObject)matis->A,MATSEQAIJ,&isseqaij);CHKERRQ(ierr);
669   ierr = PetscObjectTypeCompare((PetscObject)matis->A,MATSEQSBAIJ,&issbaij);CHKERRQ(ierr);
670 
671   if (reuse == MAT_INITIAL_MATRIX) {
672     MatType     new_mat_type;
673     PetscBool   issbaij_red;
674 
675     /* determining new matrix type */
676     ierr = MPIU_Allreduce(&issbaij,&issbaij_red,1,MPIU_BOOL,MPI_LAND,PetscObjectComm((PetscObject)mat));CHKERRQ(ierr);
677     if (issbaij_red) {
678       new_mat_type = MATSBAIJ;
679     } else {
680       if (bs>1) {
681         new_mat_type = MATBAIJ;
682       } else {
683         new_mat_type = MATAIJ;
684       }
685     }
686 
687     ierr = MatCreate(PetscObjectComm((PetscObject)mat),M);CHKERRQ(ierr);
688     ierr = MatSetSizes(*M,lrows,lcols,rows,cols);CHKERRQ(ierr);
689     ierr = MatSetBlockSize(*M,bs);CHKERRQ(ierr);
690     ierr = MatSetType(*M,new_mat_type);CHKERRQ(ierr);
691     ierr = MatISSetMPIXAIJPreallocation_Private(mat,*M,PETSC_FALSE);CHKERRQ(ierr);
692   } else {
693     PetscInt mbs,mrows,mcols,mlrows,mlcols;
694     /* some checks */
695     ierr = MatGetBlockSize(*M,&mbs);CHKERRQ(ierr);
696     ierr = MatGetSize(*M,&mrows,&mcols);CHKERRQ(ierr);
697     ierr = MatGetLocalSize(*M,&mlrows,&mlcols);CHKERRQ(ierr);
698     if (mrows != rows) SETERRQ2(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Cannot reuse matrix. Wrong number of rows (%d != %d)",rows,mrows);
699     if (mcols != cols) SETERRQ2(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Cannot reuse matrix. Wrong number of cols (%d != %d)",cols,mcols);
700     if (mlrows != lrows) SETERRQ2(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Cannot reuse matrix. Wrong number of local rows (%d != %d)",lrows,mlrows);
701     if (mlcols != lcols) SETERRQ2(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Cannot reuse matrix. Wrong number of local cols (%d != %d)",lcols,mlcols);
702     if (mbs != bs) SETERRQ2(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Cannot reuse matrix. Wrong block size (%d != %d)",bs,mbs);
703     ierr = MatZeroEntries(*M);CHKERRQ(ierr);
704   }
705 
706   if (issbaij) {
707     ierr = MatConvert(matis->A,MATSEQBAIJ,MAT_INITIAL_MATRIX,&local_mat);CHKERRQ(ierr);
708   } else {
709     ierr = PetscObjectReference((PetscObject)matis->A);CHKERRQ(ierr);
710     local_mat = matis->A;
711   }
712 
713   /* Set values */
714   ierr = MatSetLocalToGlobalMapping(*M,mat->rmap->mapping,mat->cmap->mapping);CHKERRQ(ierr);
715   if (isdense) { /* special case for dense local matrices */
716     PetscInt i,*dummy_rows,*dummy_cols;
717 
718     if (local_rows != local_cols) {
719       ierr = PetscMalloc2(local_rows,&dummy_rows,local_cols,&dummy_cols);CHKERRQ(ierr);
720       for (i=0;i<local_rows;i++) dummy_rows[i] = i;
721       for (i=0;i<local_cols;i++) dummy_cols[i] = i;
722     } else {
723       ierr = PetscMalloc1(local_rows,&dummy_rows);CHKERRQ(ierr);
724       for (i=0;i<local_rows;i++) dummy_rows[i] = i;
725       dummy_cols = dummy_rows;
726     }
727     ierr = MatSetOption(*M,MAT_ROW_ORIENTED,PETSC_FALSE);CHKERRQ(ierr);
728     ierr = MatDenseGetArray(local_mat,&array);CHKERRQ(ierr);
729     ierr = MatSetValuesLocal(*M,local_rows,dummy_rows,local_cols,dummy_cols,array,ADD_VALUES);CHKERRQ(ierr);
730     ierr = MatDenseRestoreArray(local_mat,&array);CHKERRQ(ierr);
731     if (dummy_rows != dummy_cols) {
732       ierr = PetscFree2(dummy_rows,dummy_cols);CHKERRQ(ierr);
733     } else {
734       ierr = PetscFree(dummy_rows);CHKERRQ(ierr);
735     }
736   } else if (isseqaij) {
737     PetscInt  i,nvtxs,*xadj,*adjncy;
738     PetscBool done;
739 
740     ierr = MatGetRowIJ(local_mat,0,PETSC_FALSE,PETSC_FALSE,&nvtxs,(const PetscInt**)&xadj,(const PetscInt**)&adjncy,&done);CHKERRQ(ierr);
741     if (!done) SETERRQ1(PetscObjectComm((PetscObject)local_mat),PETSC_ERR_PLIB,"Error in MatGetRowIJ called from %s\n",__FUNCT__);
742     ierr = MatSeqAIJGetArray(local_mat,&array);CHKERRQ(ierr);
743     for (i=0;i<nvtxs;i++) {
744       ierr = MatSetValuesLocal(*M,1,&i,xadj[i+1]-xadj[i],adjncy+xadj[i],array+xadj[i],ADD_VALUES);CHKERRQ(ierr);
745     }
746     ierr = MatRestoreRowIJ(local_mat,0,PETSC_FALSE,PETSC_FALSE,&nvtxs,(const PetscInt**)&xadj,(const PetscInt**)&adjncy,&done);CHKERRQ(ierr);
747     if (!done) SETERRQ1(PetscObjectComm((PetscObject)local_mat),PETSC_ERR_PLIB,"Error in MatRestoreRowIJ called from %s\n",__FUNCT__);
748     ierr = MatSeqAIJRestoreArray(local_mat,&array);CHKERRQ(ierr);
749   } else { /* very basic values insertion for all other matrix types */
750     PetscInt i;
751 
752     for (i=0;i<local_rows;i++) {
753       PetscInt       j;
754       const PetscInt *local_indices_cols;
755 
756       ierr = MatGetRow(local_mat,i,&j,&local_indices_cols,(const PetscScalar**)&array);CHKERRQ(ierr);
757       ierr = MatSetValuesLocal(*M,1,&i,j,local_indices_cols,array,ADD_VALUES);CHKERRQ(ierr);
758       ierr = MatRestoreRow(local_mat,i,&j,&local_indices_cols,(const PetscScalar**)&array);CHKERRQ(ierr);
759     }
760   }
761   ierr = MatAssemblyBegin(*M,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
762   ierr = MatDestroy(&local_mat);CHKERRQ(ierr);
763   ierr = MatAssemblyEnd(*M,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
764   if (isdense) {
765     ierr = MatSetOption(*M,MAT_ROW_ORIENTED,PETSC_TRUE);CHKERRQ(ierr);
766   }
767   PetscFunctionReturn(0);
768 }
769 
770 #undef __FUNCT__
771 #define __FUNCT__ "MatISGetMPIXAIJ"
772 /*@
773     MatISGetMPIXAIJ - Converts MATIS matrix into a parallel AIJ format
774 
775   Input Parameter:
776 .  mat - the matrix (should be of type MATIS)
777 .  reuse - either MAT_INITIAL_MATRIX or MAT_REUSE_MATRIX
778 
779   Output Parameter:
780 .  newmat - the matrix in AIJ format
781 
782   Level: developer
783 
784   Notes: mat and *newmat cannot be the same object when MAT_REUSE_MATRIX is requested.
785 
786 .seealso: MATIS
787 @*/
788 PetscErrorCode MatISGetMPIXAIJ(Mat mat, MatReuse reuse, Mat *newmat)
789 {
790   PetscErrorCode ierr;
791 
792   PetscFunctionBegin;
793   PetscValidHeaderSpecific(mat,MAT_CLASSID,1);
794   PetscValidLogicalCollectiveEnum(mat,reuse,2);
795   PetscValidPointer(newmat,3);
796   if (reuse != MAT_INITIAL_MATRIX) {
797     PetscValidHeaderSpecific(*newmat,MAT_CLASSID,3);
798     PetscCheckSameComm(mat,1,*newmat,3);
799     if (mat == *newmat) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Cannot reuse the same matrix");
800   }
801   ierr = PetscUseMethod(mat,"MatISGetMPIXAIJ_C",(Mat,MatReuse,Mat*),(mat,reuse,newmat));CHKERRQ(ierr);
802   PetscFunctionReturn(0);
803 }
804 
805 #undef __FUNCT__
806 #define __FUNCT__ "MatDuplicate_IS"
807 PetscErrorCode MatDuplicate_IS(Mat mat,MatDuplicateOption op,Mat *newmat)
808 {
809   PetscErrorCode ierr;
810   Mat_IS         *matis = (Mat_IS*)(mat->data);
811   PetscInt       bs,m,n,M,N;
812   Mat            B,localmat;
813 
814   PetscFunctionBegin;
815   ierr = MatGetBlockSize(mat,&bs);CHKERRQ(ierr);
816   ierr = MatGetSize(mat,&M,&N);CHKERRQ(ierr);
817   ierr = MatGetLocalSize(mat,&m,&n);CHKERRQ(ierr);
818   ierr = MatCreateIS(PetscObjectComm((PetscObject)mat),bs,m,n,M,N,mat->rmap->mapping,mat->cmap->mapping,&B);CHKERRQ(ierr);
819   ierr = MatDuplicate(matis->A,op,&localmat);CHKERRQ(ierr);
820   ierr = MatISSetLocalMat(B,localmat);CHKERRQ(ierr);
821   ierr = MatDestroy(&localmat);CHKERRQ(ierr);
822   ierr = MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
823   ierr = MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
824   *newmat = B;
825   PetscFunctionReturn(0);
826 }
827 
828 #undef __FUNCT__
829 #define __FUNCT__ "MatIsHermitian_IS"
830 static PetscErrorCode MatIsHermitian_IS(Mat A,PetscReal tol,PetscBool  *flg)
831 {
832   PetscErrorCode ierr;
833   Mat_IS         *matis = (Mat_IS*)A->data;
834   PetscBool      local_sym;
835 
836   PetscFunctionBegin;
837   ierr = MatIsHermitian(matis->A,tol,&local_sym);CHKERRQ(ierr);
838   ierr = MPIU_Allreduce(&local_sym,flg,1,MPIU_BOOL,MPI_LAND,PetscObjectComm((PetscObject)A));CHKERRQ(ierr);
839   PetscFunctionReturn(0);
840 }
841 
842 #undef __FUNCT__
843 #define __FUNCT__ "MatIsSymmetric_IS"
844 static PetscErrorCode MatIsSymmetric_IS(Mat A,PetscReal tol,PetscBool  *flg)
845 {
846   PetscErrorCode ierr;
847   Mat_IS         *matis = (Mat_IS*)A->data;
848   PetscBool      local_sym;
849 
850   PetscFunctionBegin;
851   ierr = MatIsSymmetric(matis->A,tol,&local_sym);CHKERRQ(ierr);
852   ierr = MPIU_Allreduce(&local_sym,flg,1,MPIU_BOOL,MPI_LAND,PetscObjectComm((PetscObject)A));CHKERRQ(ierr);
853   PetscFunctionReturn(0);
854 }
855 
856 #undef __FUNCT__
857 #define __FUNCT__ "MatDestroy_IS"
858 static PetscErrorCode MatDestroy_IS(Mat A)
859 {
860   PetscErrorCode ierr;
861   Mat_IS         *b = (Mat_IS*)A->data;
862 
863   PetscFunctionBegin;
864   ierr = MatDestroy(&b->A);CHKERRQ(ierr);
865   ierr = VecScatterDestroy(&b->cctx);CHKERRQ(ierr);
866   ierr = VecScatterDestroy(&b->rctx);CHKERRQ(ierr);
867   ierr = VecDestroy(&b->x);CHKERRQ(ierr);
868   ierr = VecDestroy(&b->y);CHKERRQ(ierr);
869   ierr = VecDestroy(&b->counter);CHKERRQ(ierr);
870   ierr = ISDestroy(&b->getsub_ris);CHKERRQ(ierr);
871   ierr = ISDestroy(&b->getsub_cis);CHKERRQ(ierr);
872   if (b->sf != b->csf) {
873     ierr = PetscSFDestroy(&b->csf);CHKERRQ(ierr);
874     ierr = PetscFree2(b->csf_rootdata,b->csf_leafdata);CHKERRQ(ierr);
875   }
876   ierr = PetscSFDestroy(&b->sf);CHKERRQ(ierr);
877   ierr = PetscFree2(b->sf_rootdata,b->sf_leafdata);CHKERRQ(ierr);
878   ierr = PetscFree(A->data);CHKERRQ(ierr);
879   ierr = PetscObjectChangeTypeName((PetscObject)A,0);CHKERRQ(ierr);
880   ierr = PetscObjectComposeFunction((PetscObject)A,"MatISGetLocalMat_C",NULL);CHKERRQ(ierr);
881   ierr = PetscObjectComposeFunction((PetscObject)A,"MatISSetLocalMat_C",NULL);CHKERRQ(ierr);
882   ierr = PetscObjectComposeFunction((PetscObject)A,"MatISGetMPIXAIJ_C",NULL);CHKERRQ(ierr);
883   ierr = PetscObjectComposeFunction((PetscObject)A,"MatISSetPreallocation_C",NULL);CHKERRQ(ierr);
884   PetscFunctionReturn(0);
885 }
886 
887 #undef __FUNCT__
888 #define __FUNCT__ "MatMult_IS"
889 static PetscErrorCode MatMult_IS(Mat A,Vec x,Vec y)
890 {
891   PetscErrorCode ierr;
892   Mat_IS         *is  = (Mat_IS*)A->data;
893   PetscScalar    zero = 0.0;
894 
895   PetscFunctionBegin;
896   /*  scatter the global vector x into the local work vector */
897   ierr = VecScatterBegin(is->cctx,x,is->x,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
898   ierr = VecScatterEnd(is->cctx,x,is->x,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
899 
900   /* multiply the local matrix */
901   ierr = MatMult(is->A,is->x,is->y);CHKERRQ(ierr);
902 
903   /* scatter product back into global memory */
904   ierr = VecSet(y,zero);CHKERRQ(ierr);
905   ierr = VecScatterBegin(is->rctx,is->y,y,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
906   ierr = VecScatterEnd(is->rctx,is->y,y,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
907   PetscFunctionReturn(0);
908 }
909 
910 #undef __FUNCT__
911 #define __FUNCT__ "MatMultAdd_IS"
912 static PetscErrorCode MatMultAdd_IS(Mat A,Vec v1,Vec v2,Vec v3)
913 {
914   Vec            temp_vec;
915   PetscErrorCode ierr;
916 
917   PetscFunctionBegin; /*  v3 = v2 + A * v1.*/
918   if (v3 != v2) {
919     ierr = MatMult(A,v1,v3);CHKERRQ(ierr);
920     ierr = VecAXPY(v3,1.0,v2);CHKERRQ(ierr);
921   } else {
922     ierr = VecDuplicate(v2,&temp_vec);CHKERRQ(ierr);
923     ierr = MatMult(A,v1,temp_vec);CHKERRQ(ierr);
924     ierr = VecAXPY(temp_vec,1.0,v2);CHKERRQ(ierr);
925     ierr = VecCopy(temp_vec,v3);CHKERRQ(ierr);
926     ierr = VecDestroy(&temp_vec);CHKERRQ(ierr);
927   }
928   PetscFunctionReturn(0);
929 }
930 
931 #undef __FUNCT__
932 #define __FUNCT__ "MatMultTranspose_IS"
933 static PetscErrorCode MatMultTranspose_IS(Mat A,Vec y,Vec x)
934 {
935   Mat_IS         *is = (Mat_IS*)A->data;
936   PetscErrorCode ierr;
937 
938   PetscFunctionBegin;
939   /*  scatter the global vector x into the local work vector */
940   ierr = VecScatterBegin(is->rctx,y,is->y,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
941   ierr = VecScatterEnd(is->rctx,y,is->y,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
942 
943   /* multiply the local matrix */
944   ierr = MatMultTranspose(is->A,is->y,is->x);CHKERRQ(ierr);
945 
946   /* scatter product back into global vector */
947   ierr = VecSet(x,0);CHKERRQ(ierr);
948   ierr = VecScatterBegin(is->cctx,is->x,x,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
949   ierr = VecScatterEnd(is->cctx,is->x,x,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
950   PetscFunctionReturn(0);
951 }
952 
953 #undef __FUNCT__
954 #define __FUNCT__ "MatMultTransposeAdd_IS"
955 static PetscErrorCode MatMultTransposeAdd_IS(Mat A,Vec v1,Vec v2,Vec v3)
956 {
957   Vec            temp_vec;
958   PetscErrorCode ierr;
959 
960   PetscFunctionBegin; /*  v3 = v2 + A' * v1.*/
961   if (v3 != v2) {
962     ierr = MatMultTranspose(A,v1,v3);CHKERRQ(ierr);
963     ierr = VecAXPY(v3,1.0,v2);CHKERRQ(ierr);
964   } else {
965     ierr = VecDuplicate(v2,&temp_vec);CHKERRQ(ierr);
966     ierr = MatMultTranspose(A,v1,temp_vec);CHKERRQ(ierr);
967     ierr = VecAXPY(temp_vec,1.0,v2);CHKERRQ(ierr);
968     ierr = VecCopy(temp_vec,v3);CHKERRQ(ierr);
969     ierr = VecDestroy(&temp_vec);CHKERRQ(ierr);
970   }
971   PetscFunctionReturn(0);
972 }
973 
974 #undef __FUNCT__
975 #define __FUNCT__ "MatView_IS"
976 static PetscErrorCode MatView_IS(Mat A,PetscViewer viewer)
977 {
978   Mat_IS         *a = (Mat_IS*)A->data;
979   PetscErrorCode ierr;
980   PetscViewer    sviewer;
981 
982   PetscFunctionBegin;
983   ierr = PetscViewerGetSubViewer(viewer,PETSC_COMM_SELF,&sviewer);CHKERRQ(ierr);
984   ierr = MatView(a->A,sviewer);CHKERRQ(ierr);
985   ierr = PetscViewerRestoreSubViewer(viewer,PETSC_COMM_SELF,&sviewer);CHKERRQ(ierr);
986   ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
987   PetscFunctionReturn(0);
988 }
989 
990 #undef __FUNCT__
991 #define __FUNCT__ "MatSetLocalToGlobalMapping_IS"
992 static PetscErrorCode MatSetLocalToGlobalMapping_IS(Mat A,ISLocalToGlobalMapping rmapping,ISLocalToGlobalMapping cmapping)
993 {
994   PetscErrorCode ierr;
995   PetscInt       nr,rbs,nc,cbs;
996   Mat_IS         *is = (Mat_IS*)A->data;
997   IS             from,to;
998   Vec            cglobal,rglobal;
999 
1000   PetscFunctionBegin;
1001   PetscCheckSameComm(A,1,rmapping,2);
1002   PetscCheckSameComm(A,1,cmapping,3);
1003   /* Destroy any previous data */
1004   ierr = VecDestroy(&is->x);CHKERRQ(ierr);
1005   ierr = VecDestroy(&is->y);CHKERRQ(ierr);
1006   ierr = VecDestroy(&is->counter);CHKERRQ(ierr);
1007   ierr = VecScatterDestroy(&is->rctx);CHKERRQ(ierr);
1008   ierr = VecScatterDestroy(&is->cctx);CHKERRQ(ierr);
1009   ierr = MatDestroy(&is->A);CHKERRQ(ierr);
1010   ierr = PetscSFDestroy(&is->sf);CHKERRQ(ierr);
1011   ierr = PetscFree2(is->sf_rootdata,is->sf_leafdata);CHKERRQ(ierr);
1012 
1013   /* Setup Layout and set local to global maps */
1014   ierr = PetscLayoutSetUp(A->rmap);CHKERRQ(ierr);
1015   ierr = PetscLayoutSetUp(A->cmap);CHKERRQ(ierr);
1016   ierr = PetscLayoutSetISLocalToGlobalMapping(A->rmap,rmapping);CHKERRQ(ierr);
1017   ierr = PetscLayoutSetISLocalToGlobalMapping(A->cmap,cmapping);CHKERRQ(ierr);
1018 
1019   /* Create the local matrix A */
1020   ierr = ISLocalToGlobalMappingGetSize(rmapping,&nr);CHKERRQ(ierr);
1021   ierr = ISLocalToGlobalMappingGetBlockSize(rmapping,&rbs);CHKERRQ(ierr);
1022   ierr = ISLocalToGlobalMappingGetSize(cmapping,&nc);CHKERRQ(ierr);
1023   ierr = ISLocalToGlobalMappingGetBlockSize(cmapping,&cbs);CHKERRQ(ierr);
1024   ierr = MatCreate(PETSC_COMM_SELF,&is->A);CHKERRQ(ierr);
1025   ierr = MatSetType(is->A,MATAIJ);CHKERRQ(ierr);
1026   ierr = MatSetSizes(is->A,nr,nc,nr,nc);CHKERRQ(ierr);
1027   ierr = MatSetBlockSizes(is->A,rbs,cbs);CHKERRQ(ierr);
1028   ierr = MatSetOptionsPrefix(is->A,((PetscObject)A)->prefix);CHKERRQ(ierr);
1029   ierr = MatAppendOptionsPrefix(is->A,"is_");CHKERRQ(ierr);
1030   ierr = MatSetFromOptions(is->A);CHKERRQ(ierr);
1031 
1032   if (!is->islocalref) { /* setup scatters and local vectors for MatMult */
1033     /* Create the local work vectors */
1034     ierr = MatCreateVecs(is->A,&is->x,&is->y);CHKERRQ(ierr);
1035 
1036     /* setup the global to local scatters */
1037     ierr = MatCreateVecs(A,&cglobal,&rglobal);CHKERRQ(ierr);
1038     ierr = ISCreateStride(PETSC_COMM_SELF,nr,0,1,&to);CHKERRQ(ierr);
1039     ierr = ISLocalToGlobalMappingApplyIS(rmapping,to,&from);CHKERRQ(ierr);
1040     ierr = VecScatterCreate(rglobal,from,is->y,to,&is->rctx);CHKERRQ(ierr);
1041     if (rmapping != cmapping) {
1042       ierr = ISDestroy(&to);CHKERRQ(ierr);
1043       ierr = ISDestroy(&from);CHKERRQ(ierr);
1044       ierr = ISCreateStride(PETSC_COMM_SELF,nc,0,1,&to);CHKERRQ(ierr);
1045       ierr = ISLocalToGlobalMappingApplyIS(cmapping,to,&from);CHKERRQ(ierr);
1046       ierr = VecScatterCreate(cglobal,from,is->x,to,&is->cctx);CHKERRQ(ierr);
1047     } else {
1048       ierr = PetscObjectReference((PetscObject)is->rctx);CHKERRQ(ierr);
1049       is->cctx = is->rctx;
1050     }
1051 
1052     /* interface counter vector (local) */
1053     ierr = VecDuplicate(is->y,&is->counter);CHKERRQ(ierr);
1054     ierr = VecSet(is->y,1.);CHKERRQ(ierr);
1055     ierr = VecScatterBegin(is->rctx,is->y,rglobal,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
1056     ierr = VecScatterEnd(is->rctx,is->y,rglobal,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
1057     ierr = VecScatterBegin(is->rctx,rglobal,is->counter,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
1058     ierr = VecScatterEnd(is->rctx,rglobal,is->counter,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
1059 
1060     /* free workspace */
1061     ierr = VecDestroy(&rglobal);CHKERRQ(ierr);
1062     ierr = VecDestroy(&cglobal);CHKERRQ(ierr);
1063     ierr = ISDestroy(&to);CHKERRQ(ierr);
1064     ierr = ISDestroy(&from);CHKERRQ(ierr);
1065   }
1066   ierr = MatSetUp(A);CHKERRQ(ierr);
1067   PetscFunctionReturn(0);
1068 }
1069 
1070 #undef __FUNCT__
1071 #define __FUNCT__ "MatSetValues_IS"
1072 static PetscErrorCode MatSetValues_IS(Mat mat, PetscInt m,const PetscInt *rows, PetscInt n,const PetscInt *cols, const PetscScalar *values, InsertMode addv)
1073 {
1074   Mat_IS         *is = (Mat_IS*)mat->data;
1075   PetscErrorCode ierr;
1076 #if defined(PETSC_USE_DEBUG)
1077   PetscInt       i,zm,zn;
1078 #endif
1079   PetscInt       rows_l[MATIS_MAX_ENTRIES_INSERTION],cols_l[MATIS_MAX_ENTRIES_INSERTION];
1080 
1081   PetscFunctionBegin;
1082 #if defined(PETSC_USE_DEBUG)
1083   if (m > MATIS_MAX_ENTRIES_INSERTION || n > MATIS_MAX_ENTRIES_INSERTION) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_SUP,"Number of row/column indices must be <= %D: they are %D %D",MATIS_MAX_ENTRIES_INSERTION,m,n);
1084   /* count negative indices */
1085   for (i=0,zm=0;i<m;i++) if (rows[i] < 0) zm++;
1086   for (i=0,zn=0;i<n;i++) if (cols[i] < 0) zn++;
1087 #endif
1088   ierr = ISGlobalToLocalMappingApply(mat->rmap->mapping,IS_GTOLM_MASK,m,rows,&m,rows_l);CHKERRQ(ierr);
1089   ierr = ISGlobalToLocalMappingApply(mat->cmap->mapping,IS_GTOLM_MASK,n,cols,&n,cols_l);CHKERRQ(ierr);
1090 #if defined(PETSC_USE_DEBUG)
1091   /* count negative indices (should be the same as before) */
1092   for (i=0;i<m;i++) if (rows_l[i] < 0) zm--;
1093   for (i=0;i<n;i++) if (cols_l[i] < 0) zn--;
1094   if (zm) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Some of the row indices can not be mapped! Maybe you should not use MATIS");
1095   if (zn) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Some of the column indices can not be mapped! Maybe you should not use MATIS");
1096 #endif
1097   ierr = MatSetValues(is->A,m,rows_l,n,cols_l,values,addv);CHKERRQ(ierr);
1098   PetscFunctionReturn(0);
1099 }
1100 
1101 #undef __FUNCT__
1102 #define __FUNCT__ "MatSetValuesBlocked_IS"
1103 static PetscErrorCode MatSetValuesBlocked_IS(Mat mat, PetscInt m,const PetscInt *rows, PetscInt n,const PetscInt *cols, const PetscScalar *values, InsertMode addv)
1104 {
1105   Mat_IS         *is = (Mat_IS*)mat->data;
1106   PetscErrorCode ierr;
1107 #if defined(PETSC_USE_DEBUG)
1108   PetscInt       i,zm,zn;
1109 #endif
1110   PetscInt       rows_l[MATIS_MAX_ENTRIES_INSERTION],cols_l[MATIS_MAX_ENTRIES_INSERTION];
1111 
1112   PetscFunctionBegin;
1113 #if defined(PETSC_USE_DEBUG)
1114   if (m > MATIS_MAX_ENTRIES_INSERTION || n > MATIS_MAX_ENTRIES_INSERTION) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_SUP,"Number of row/column block indices must be <= %D: they are %D %D",MATIS_MAX_ENTRIES_INSERTION,m,n);
1115   /* count negative indices */
1116   for (i=0,zm=0;i<m;i++) if (rows[i] < 0) zm++;
1117   for (i=0,zn=0;i<n;i++) if (cols[i] < 0) zn++;
1118 #endif
1119   ierr = ISGlobalToLocalMappingApplyBlock(mat->rmap->mapping,IS_GTOLM_MASK,m,rows,&m,rows_l);CHKERRQ(ierr);
1120   ierr = ISGlobalToLocalMappingApplyBlock(mat->cmap->mapping,IS_GTOLM_MASK,n,cols,&n,cols_l);CHKERRQ(ierr);
1121 #if defined(PETSC_USE_DEBUG)
1122   /* count negative indices (should be the same as before) */
1123   for (i=0;i<m;i++) if (rows_l[i] < 0) zm--;
1124   for (i=0;i<n;i++) if (cols_l[i] < 0) zn--;
1125   if (zm) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Some of the row indices can not be mapped! Maybe you should not use MATIS");
1126   if (zn) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Some of the column indices can not be mapped! Maybe you should not use MATIS");
1127 #endif
1128   ierr = MatSetValuesBlocked(is->A,m,rows_l,n,cols_l,values,addv);CHKERRQ(ierr);
1129   PetscFunctionReturn(0);
1130 }
1131 
1132 #undef __FUNCT__
1133 #define __FUNCT__ "MatSetValuesLocal_IS"
1134 static PetscErrorCode MatSetValuesLocal_IS(Mat A,PetscInt m,const PetscInt *rows, PetscInt n,const PetscInt *cols,const PetscScalar *values,InsertMode addv)
1135 {
1136   PetscErrorCode ierr;
1137   Mat_IS         *is = (Mat_IS*)A->data;
1138 
1139   PetscFunctionBegin;
1140   ierr = MatSetValues(is->A,m,rows,n,cols,values,addv);CHKERRQ(ierr);
1141   PetscFunctionReturn(0);
1142 }
1143 
1144 #undef __FUNCT__
1145 #define __FUNCT__ "MatSetValuesBlockedLocal_IS"
1146 static PetscErrorCode MatSetValuesBlockedLocal_IS(Mat A,PetscInt m,const PetscInt *rows, PetscInt n,const PetscInt *cols,const PetscScalar *values,InsertMode addv)
1147 {
1148   PetscErrorCode ierr;
1149   Mat_IS         *is = (Mat_IS*)A->data;
1150 
1151   PetscFunctionBegin;
1152   ierr = MatSetValuesBlocked(is->A,m,rows,n,cols,values,addv);CHKERRQ(ierr);
1153   PetscFunctionReturn(0);
1154 }
1155 
1156 #undef __FUNCT__
1157 #define __FUNCT__ "MatISZeroRowsColumnsLocal_Private"
1158 static PetscErrorCode MatISZeroRowsColumnsLocal_Private(Mat A,PetscInt n,const PetscInt rows[],PetscScalar diag,PetscBool columns)
1159 {
1160   Mat_IS         *is = (Mat_IS*)A->data;
1161   PetscErrorCode ierr;
1162 
1163   PetscFunctionBegin;
1164   if (!n) {
1165     is->pure_neumann = PETSC_TRUE;
1166   } else {
1167     PetscInt i;
1168     is->pure_neumann = PETSC_FALSE;
1169 
1170     if (columns) {
1171       ierr = MatZeroRowsColumns(is->A,n,rows,diag,0,0);CHKERRQ(ierr);
1172     } else {
1173       ierr = MatZeroRows(is->A,n,rows,diag,0,0);CHKERRQ(ierr);
1174     }
1175     if (diag != 0.) {
1176       const PetscScalar *array;
1177       ierr = VecGetArrayRead(is->counter,&array);CHKERRQ(ierr);
1178       for (i=0; i<n; i++) {
1179         ierr = MatSetValue(is->A,rows[i],rows[i],diag/(array[rows[i]]),INSERT_VALUES);CHKERRQ(ierr);
1180       }
1181       ierr = VecRestoreArrayRead(is->counter,&array);CHKERRQ(ierr);
1182     }
1183     ierr = MatAssemblyBegin(is->A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
1184     ierr = MatAssemblyEnd(is->A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
1185   }
1186   PetscFunctionReturn(0);
1187 }
1188 
1189 #undef __FUNCT__
1190 #define __FUNCT__ "MatZeroRowsColumns_Private_IS"
1191 static PetscErrorCode MatZeroRowsColumns_Private_IS(Mat A,PetscInt n,const PetscInt rows[],PetscScalar diag,Vec x,Vec b,PetscBool columns)
1192 {
1193   Mat_IS         *matis = (Mat_IS*)A->data;
1194   PetscInt       nr,nl,len,i;
1195   PetscInt       *lrows;
1196   PetscErrorCode ierr;
1197 
1198   PetscFunctionBegin;
1199 #if defined(PETSC_USE_DEBUG)
1200   if (columns || diag != 0. || (x && b)) {
1201     PetscBool cong;
1202     ierr = PetscLayoutCompare(A->rmap,A->cmap,&cong);CHKERRQ(ierr);
1203     if (!cong && columns) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"Columns can be zeroed if and only if A->rmap and A->cmap are congruent for MATIS");
1204     if (!cong && diag != 0.) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"Nonzero diagonal value supported if and only if A->rmap and A->cmap are congruent for MATIS");
1205     if (!cong && x && b) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"A->rmap and A->cmap need to be congruent");
1206   }
1207 #endif
1208   /* get locally owned rows */
1209   ierr = PetscLayoutMapLocal_Private(A->rmap,n,rows,&len,&lrows,NULL);CHKERRQ(ierr);
1210   /* fix right hand side if needed */
1211   if (x && b) {
1212     const PetscScalar *xx;
1213     PetscScalar       *bb;
1214 
1215     ierr = VecGetArrayRead(x, &xx);CHKERRQ(ierr);
1216     ierr = VecGetArray(b, &bb);CHKERRQ(ierr);
1217     for (i=0;i<len;++i) bb[lrows[i]] = diag*xx[lrows[i]];
1218     ierr = VecRestoreArrayRead(x, &xx);CHKERRQ(ierr);
1219     ierr = VecRestoreArray(b, &bb);CHKERRQ(ierr);
1220   }
1221   /* get rows associated to the local matrices */
1222   if (!matis->sf) { /* setup SF if not yet created and allocate rootdata and leafdata */
1223     ierr = MatISComputeSF_Private(A);CHKERRQ(ierr);
1224   }
1225   ierr = MatGetSize(matis->A,&nl,NULL);CHKERRQ(ierr);
1226   ierr = PetscMemzero(matis->sf_leafdata,nl*sizeof(PetscInt));CHKERRQ(ierr);
1227   ierr = PetscMemzero(matis->sf_rootdata,A->rmap->n*sizeof(PetscInt));CHKERRQ(ierr);
1228   for (i=0;i<len;i++) matis->sf_rootdata[lrows[i]] = 1;
1229   ierr = PetscFree(lrows);CHKERRQ(ierr);
1230   ierr = PetscSFBcastBegin(matis->sf,MPIU_INT,matis->sf_rootdata,matis->sf_leafdata);CHKERRQ(ierr);
1231   ierr = PetscSFBcastEnd(matis->sf,MPIU_INT,matis->sf_rootdata,matis->sf_leafdata);CHKERRQ(ierr);
1232   ierr = PetscMalloc1(nl,&lrows);CHKERRQ(ierr);
1233   for (i=0,nr=0;i<nl;i++) if (matis->sf_leafdata[i]) lrows[nr++] = i;
1234   ierr = MatISZeroRowsColumnsLocal_Private(A,nr,lrows,diag,columns);CHKERRQ(ierr);
1235   ierr = PetscFree(lrows);CHKERRQ(ierr);
1236   PetscFunctionReturn(0);
1237 }
1238 
1239 #undef __FUNCT__
1240 #define __FUNCT__ "MatZeroRows_IS"
1241 static PetscErrorCode MatZeroRows_IS(Mat A,PetscInt n,const PetscInt rows[],PetscScalar diag,Vec x,Vec b)
1242 {
1243   PetscErrorCode ierr;
1244 
1245   PetscFunctionBegin;
1246   ierr = MatZeroRowsColumns_Private_IS(A,n,rows,diag,x,b,PETSC_FALSE);CHKERRQ(ierr);
1247   PetscFunctionReturn(0);
1248 }
1249 
1250 #undef __FUNCT__
1251 #define __FUNCT__ "MatZeroRowsColumns_IS"
1252 static PetscErrorCode MatZeroRowsColumns_IS(Mat A,PetscInt n,const PetscInt rows[],PetscScalar diag,Vec x,Vec b)
1253 {
1254   PetscErrorCode ierr;
1255 
1256   PetscFunctionBegin;
1257   ierr = MatZeroRowsColumns_Private_IS(A,n,rows,diag,x,b,PETSC_TRUE);CHKERRQ(ierr);
1258   PetscFunctionReturn(0);
1259 }
1260 
1261 #undef __FUNCT__
1262 #define __FUNCT__ "MatAssemblyBegin_IS"
1263 static PetscErrorCode MatAssemblyBegin_IS(Mat A,MatAssemblyType type)
1264 {
1265   Mat_IS         *is = (Mat_IS*)A->data;
1266   PetscErrorCode ierr;
1267 
1268   PetscFunctionBegin;
1269   ierr = MatAssemblyBegin(is->A,type);CHKERRQ(ierr);
1270   PetscFunctionReturn(0);
1271 }
1272 
1273 #undef __FUNCT__
1274 #define __FUNCT__ "MatAssemblyEnd_IS"
1275 static PetscErrorCode MatAssemblyEnd_IS(Mat A,MatAssemblyType type)
1276 {
1277   Mat_IS         *is = (Mat_IS*)A->data;
1278   PetscErrorCode ierr;
1279 
1280   PetscFunctionBegin;
1281   ierr = MatAssemblyEnd(is->A,type);CHKERRQ(ierr);
1282   PetscFunctionReturn(0);
1283 }
1284 
1285 #undef __FUNCT__
1286 #define __FUNCT__ "MatISGetLocalMat_IS"
1287 static PetscErrorCode MatISGetLocalMat_IS(Mat mat,Mat *local)
1288 {
1289   Mat_IS *is = (Mat_IS*)mat->data;
1290 
1291   PetscFunctionBegin;
1292   *local = is->A;
1293   PetscFunctionReturn(0);
1294 }
1295 
1296 #undef __FUNCT__
1297 #define __FUNCT__ "MatISGetLocalMat"
1298 /*@
1299     MatISGetLocalMat - Gets the local matrix stored inside a MATIS matrix.
1300 
1301   Input Parameter:
1302 .  mat - the matrix
1303 
1304   Output Parameter:
1305 .  local - the local matrix
1306 
1307   Level: advanced
1308 
1309   Notes:
1310     This can be called if you have precomputed the nonzero structure of the
1311   matrix and want to provide it to the inner matrix object to improve the performance
1312   of the MatSetValues() operation.
1313 
1314 .seealso: MATIS
1315 @*/
1316 PetscErrorCode MatISGetLocalMat(Mat mat,Mat *local)
1317 {
1318   PetscErrorCode ierr;
1319 
1320   PetscFunctionBegin;
1321   PetscValidHeaderSpecific(mat,MAT_CLASSID,1);
1322   PetscValidPointer(local,2);
1323   ierr = PetscUseMethod(mat,"MatISGetLocalMat_C",(Mat,Mat*),(mat,local));CHKERRQ(ierr);
1324   PetscFunctionReturn(0);
1325 }
1326 
1327 #undef __FUNCT__
1328 #define __FUNCT__ "MatISSetLocalMat_IS"
1329 static PetscErrorCode MatISSetLocalMat_IS(Mat mat,Mat local)
1330 {
1331   Mat_IS         *is = (Mat_IS*)mat->data;
1332   PetscInt       nrows,ncols,orows,ocols;
1333   PetscErrorCode ierr;
1334 
1335   PetscFunctionBegin;
1336   if (is->A) {
1337     ierr = MatGetSize(is->A,&orows,&ocols);CHKERRQ(ierr);
1338     ierr = MatGetSize(local,&nrows,&ncols);CHKERRQ(ierr);
1339     if (orows != nrows || ocols != ncols) SETERRQ4(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Local MATIS matrix should be of size %Dx%D (you passed a %Dx%D matrix)",orows,ocols,nrows,ncols);
1340   }
1341   ierr  = PetscObjectReference((PetscObject)local);CHKERRQ(ierr);
1342   ierr  = MatDestroy(&is->A);CHKERRQ(ierr);
1343   is->A = local;
1344   PetscFunctionReturn(0);
1345 }
1346 
1347 #undef __FUNCT__
1348 #define __FUNCT__ "MatISSetLocalMat"
1349 /*@
1350     MatISSetLocalMat - Replace the local matrix stored inside a MATIS object.
1351 
1352   Input Parameter:
1353 .  mat - the matrix
1354 .  local - the local matrix
1355 
1356   Output Parameter:
1357 
1358   Level: advanced
1359 
1360   Notes:
1361     This can be called if you have precomputed the local matrix and
1362   want to provide it to the matrix object MATIS.
1363 
1364 .seealso: MATIS
1365 @*/
1366 PetscErrorCode MatISSetLocalMat(Mat mat,Mat local)
1367 {
1368   PetscErrorCode ierr;
1369 
1370   PetscFunctionBegin;
1371   PetscValidHeaderSpecific(mat,MAT_CLASSID,1);
1372   PetscValidHeaderSpecific(local,MAT_CLASSID,2);
1373   ierr = PetscUseMethod(mat,"MatISSetLocalMat_C",(Mat,Mat),(mat,local));CHKERRQ(ierr);
1374   PetscFunctionReturn(0);
1375 }
1376 
1377 #undef __FUNCT__
1378 #define __FUNCT__ "MatZeroEntries_IS"
1379 static PetscErrorCode MatZeroEntries_IS(Mat A)
1380 {
1381   Mat_IS         *a = (Mat_IS*)A->data;
1382   PetscErrorCode ierr;
1383 
1384   PetscFunctionBegin;
1385   ierr = MatZeroEntries(a->A);CHKERRQ(ierr);
1386   PetscFunctionReturn(0);
1387 }
1388 
1389 #undef __FUNCT__
1390 #define __FUNCT__ "MatScale_IS"
1391 static PetscErrorCode MatScale_IS(Mat A,PetscScalar a)
1392 {
1393   Mat_IS         *is = (Mat_IS*)A->data;
1394   PetscErrorCode ierr;
1395 
1396   PetscFunctionBegin;
1397   ierr = MatScale(is->A,a);CHKERRQ(ierr);
1398   PetscFunctionReturn(0);
1399 }
1400 
1401 #undef __FUNCT__
1402 #define __FUNCT__ "MatGetDiagonal_IS"
1403 static PetscErrorCode MatGetDiagonal_IS(Mat A, Vec v)
1404 {
1405   Mat_IS         *is = (Mat_IS*)A->data;
1406   PetscErrorCode ierr;
1407 
1408   PetscFunctionBegin;
1409   /* get diagonal of the local matrix */
1410   ierr = MatGetDiagonal(is->A,is->y);CHKERRQ(ierr);
1411 
1412   /* scatter diagonal back into global vector */
1413   ierr = VecSet(v,0);CHKERRQ(ierr);
1414   ierr = VecScatterBegin(is->rctx,is->y,v,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
1415   ierr = VecScatterEnd(is->rctx,is->y,v,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
1416   PetscFunctionReturn(0);
1417 }
1418 
1419 #undef __FUNCT__
1420 #define __FUNCT__ "MatSetOption_IS"
1421 static PetscErrorCode MatSetOption_IS(Mat A,MatOption op,PetscBool flg)
1422 {
1423   Mat_IS         *a = (Mat_IS*)A->data;
1424   PetscErrorCode ierr;
1425 
1426   PetscFunctionBegin;
1427   ierr = MatSetOption(a->A,op,flg);CHKERRQ(ierr);
1428   PetscFunctionReturn(0);
1429 }
1430 
1431 #undef __FUNCT__
1432 #define __FUNCT__ "MatAXPY_IS"
1433 static PetscErrorCode MatAXPY_IS(Mat Y,PetscScalar a,Mat X,MatStructure str)
1434 {
1435   Mat_IS         *y = (Mat_IS*)Y->data;
1436   Mat_IS         *x;
1437 #if defined(PETSC_USE_DEBUG)
1438   PetscBool      ismatis;
1439 #endif
1440   PetscErrorCode ierr;
1441 
1442   PetscFunctionBegin;
1443 #if defined(PETSC_USE_DEBUG)
1444   ierr = PetscObjectTypeCompare((PetscObject)X,MATIS,&ismatis);CHKERRQ(ierr);
1445   if (!ismatis) SETERRQ(PetscObjectComm((PetscObject)Y),PETSC_ERR_SUP,"Cannot call MatAXPY(Y,a,X,str) with X not of type MATIS");
1446 #endif
1447   x = (Mat_IS*)X->data;
1448   ierr = MatAXPY(y->A,a,x->A,str);CHKERRQ(ierr);
1449   PetscFunctionReturn(0);
1450 }
1451 
1452 #undef __FUNCT__
1453 #define __FUNCT__ "MatGetLocalSubMatrix_IS"
1454 static PetscErrorCode MatGetLocalSubMatrix_IS(Mat A,IS row,IS col,Mat *submat)
1455 {
1456   Mat                    lA;
1457   Mat_IS                 *matis;
1458   ISLocalToGlobalMapping rl2g,cl2g;
1459   IS                     is;
1460   const PetscInt         *rg,*rl;
1461   PetscInt               nrg;
1462   PetscInt               N,M,nrl,i,*idxs;
1463   PetscErrorCode         ierr;
1464 
1465   PetscFunctionBegin;
1466   ierr = ISLocalToGlobalMappingGetIndices(A->rmap->mapping,&rg);CHKERRQ(ierr);
1467   ierr = ISGetLocalSize(row,&nrl);CHKERRQ(ierr);
1468   ierr = ISGetIndices(row,&rl);CHKERRQ(ierr);
1469   ierr = ISLocalToGlobalMappingGetSize(A->rmap->mapping,&nrg);CHKERRQ(ierr);
1470 #if defined(PETSC_USE_DEBUG)
1471   for (i=0;i<nrl;i++) if (rl[i]>=nrg) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Local row index %D -> %D greater than maximum possible %D",i,rl[i],nrg);
1472 #endif
1473   ierr = PetscMalloc1(nrg,&idxs);CHKERRQ(ierr);
1474   /* map from [0,nrl) to row */
1475   for (i=0;i<nrl;i++) idxs[i] = rl[i];
1476 #if defined(PETSC_USE_DEBUG)
1477   for (i=nrl;i<nrg;i++) idxs[i] = nrg;
1478 #else
1479   for (i=nrl;i<nrg;i++) idxs[i] = -1;
1480 #endif
1481   ierr = ISRestoreIndices(row,&rl);CHKERRQ(ierr);
1482   ierr = ISLocalToGlobalMappingRestoreIndices(A->rmap->mapping,&rg);CHKERRQ(ierr);
1483   ierr = ISCreateGeneral(PetscObjectComm((PetscObject)A),nrg,idxs,PETSC_OWN_POINTER,&is);CHKERRQ(ierr);
1484   ierr = ISLocalToGlobalMappingCreateIS(is,&rl2g);CHKERRQ(ierr);
1485   ierr = ISDestroy(&is);CHKERRQ(ierr);
1486   /* compute new l2g map for columns */
1487   if (col != row || A->rmap->mapping != A->cmap->mapping) {
1488     const PetscInt *cg,*cl;
1489     PetscInt       ncg;
1490     PetscInt       ncl;
1491 
1492     ierr = ISLocalToGlobalMappingGetIndices(A->cmap->mapping,&cg);CHKERRQ(ierr);
1493     ierr = ISGetLocalSize(col,&ncl);CHKERRQ(ierr);
1494     ierr = ISGetIndices(col,&cl);CHKERRQ(ierr);
1495     ierr = ISLocalToGlobalMappingGetSize(A->cmap->mapping,&ncg);CHKERRQ(ierr);
1496 #if defined(PETSC_USE_DEBUG)
1497     for (i=0;i<ncl;i++) if (cl[i]>=ncg) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Local column index %D -> %D greater than maximum possible %D",i,cl[i],ncg);
1498 #endif
1499     ierr = PetscMalloc1(ncg,&idxs);CHKERRQ(ierr);
1500     /* map from [0,ncl) to col */
1501     for (i=0;i<ncl;i++) idxs[i] = cl[i];
1502 #if defined(PETSC_USE_DEBUG)
1503     for (i=ncl;i<ncg;i++) idxs[i] = ncg;
1504 #else
1505     for (i=ncl;i<ncg;i++) idxs[i] = -1;
1506 #endif
1507     ierr = ISRestoreIndices(col,&cl);CHKERRQ(ierr);
1508     ierr = ISLocalToGlobalMappingRestoreIndices(A->cmap->mapping,&cg);CHKERRQ(ierr);
1509     ierr = ISCreateGeneral(PetscObjectComm((PetscObject)A),ncg,idxs,PETSC_OWN_POINTER,&is);CHKERRQ(ierr);
1510     ierr = ISLocalToGlobalMappingCreateIS(is,&cl2g);CHKERRQ(ierr);
1511     ierr = ISDestroy(&is);CHKERRQ(ierr);
1512   } else {
1513     ierr = PetscObjectReference((PetscObject)rl2g);CHKERRQ(ierr);
1514     cl2g = rl2g;
1515   }
1516   /* create the MATIS submatrix */
1517   ierr = MatGetSize(A,&M,&N);CHKERRQ(ierr);
1518   ierr = MatCreate(PetscObjectComm((PetscObject)A),submat);CHKERRQ(ierr);
1519   ierr = MatSetSizes(*submat,PETSC_DECIDE,PETSC_DECIDE,M,N);CHKERRQ(ierr);
1520   ierr = MatSetType(*submat,MATIS);CHKERRQ(ierr);
1521   matis = (Mat_IS*)((*submat)->data);
1522   matis->islocalref = PETSC_TRUE;
1523   ierr = MatSetLocalToGlobalMapping(*submat,rl2g,cl2g);CHKERRQ(ierr);
1524   ierr = MatISGetLocalMat(A,&lA);CHKERRQ(ierr);
1525   ierr = MatISSetLocalMat(*submat,lA);CHKERRQ(ierr);
1526   ierr = MatSetUp(*submat);CHKERRQ(ierr);
1527   ierr = MatAssemblyBegin(*submat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
1528   ierr = MatAssemblyEnd(*submat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
1529   ierr = ISLocalToGlobalMappingDestroy(&rl2g);CHKERRQ(ierr);
1530   ierr = ISLocalToGlobalMappingDestroy(&cl2g);CHKERRQ(ierr);
1531   /* remove unsupported ops */
1532   ierr = PetscMemzero((*submat)->ops,sizeof(struct _MatOps));CHKERRQ(ierr);
1533   (*submat)->ops->destroy               = MatDestroy_IS;
1534   (*submat)->ops->setvalueslocal        = MatSetValuesLocal_SubMat_IS;
1535   (*submat)->ops->setvaluesblockedlocal = MatSetValuesBlockedLocal_SubMat_IS;
1536   (*submat)->ops->assemblybegin         = MatAssemblyBegin_IS;
1537   (*submat)->ops->assemblyend           = MatAssemblyEnd_IS;
1538   PetscFunctionReturn(0);
1539 }
1540 
1541 #undef __FUNCT__
1542 #define __FUNCT__ "MatCreateIS"
1543 /*@
1544     MatCreateIS - Creates a "process" unassembled matrix, assembled on each
1545        process but not across processes.
1546 
1547    Input Parameters:
1548 +     comm    - MPI communicator that will share the matrix
1549 .     bs      - block size of the matrix
1550 .     m,n,M,N - local and/or global sizes of the left and right vector used in matrix vector products
1551 .     rmap    - local to global map for rows
1552 -     cmap    - local to global map for cols
1553 
1554    Output Parameter:
1555 .    A - the resulting matrix
1556 
1557    Level: advanced
1558 
1559    Notes: See MATIS for more details.
1560           m and n are NOT related to the size of the map; they are the size of the part of the vector owned
1561           by that process. The sizes of rmap and cmap define the size of the local matrices.
1562           If either rmap or cmap are NULL, then the matrix is assumed to be square.
1563 
1564 .seealso: MATIS, MatSetLocalToGlobalMapping()
1565 @*/
1566 PetscErrorCode  MatCreateIS(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,ISLocalToGlobalMapping rmap,ISLocalToGlobalMapping cmap,Mat *A)
1567 {
1568   PetscErrorCode ierr;
1569 
1570   PetscFunctionBegin;
1571   if (!rmap && !cmap) SETERRQ(comm,PETSC_ERR_USER,"You need to provide at least one of the mapping");
1572   ierr = MatCreate(comm,A);CHKERRQ(ierr);
1573   ierr = MatSetBlockSize(*A,bs);CHKERRQ(ierr);
1574   ierr = MatSetSizes(*A,m,n,M,N);CHKERRQ(ierr);
1575   ierr = MatSetType(*A,MATIS);CHKERRQ(ierr);
1576   if (rmap && cmap) {
1577     ierr = MatSetLocalToGlobalMapping(*A,rmap,cmap);CHKERRQ(ierr);
1578   } else if (!rmap) {
1579     ierr = MatSetLocalToGlobalMapping(*A,cmap,cmap);CHKERRQ(ierr);
1580   } else {
1581     ierr = MatSetLocalToGlobalMapping(*A,rmap,rmap);CHKERRQ(ierr);
1582   }
1583   PetscFunctionReturn(0);
1584 }
1585 
1586 /*MC
1587    MATIS - MATIS = "is" - A matrix type to be used for using the non-overlapping domain decomposition methods (e.g. PCBDDC or KSPFETIDP).
1588    This stores the matrices in globally unassembled form. Each processor
1589    assembles only its local Neumann problem and the parallel matrix vector
1590    product is handled "implicitly".
1591 
1592    Operations Provided:
1593 +  MatMult()
1594 .  MatMultAdd()
1595 .  MatMultTranspose()
1596 .  MatMultTransposeAdd()
1597 .  MatZeroEntries()
1598 .  MatSetOption()
1599 .  MatZeroRows()
1600 .  MatSetValues()
1601 .  MatSetValuesBlocked()
1602 .  MatSetValuesLocal()
1603 .  MatSetValuesBlockedLocal()
1604 .  MatScale()
1605 .  MatGetDiagonal()
1606 .  MatMissingDiagonal()
1607 .  MatDuplicate()
1608 .  MatCopy()
1609 .  MatAXPY()
1610 .  MatGetSubMatrix()
1611 .  MatGetLocalSubMatrix()
1612 -  MatSetLocalToGlobalMapping()
1613 
1614    Options Database Keys:
1615 . -mat_type is - sets the matrix type to "is" during a call to MatSetFromOptions()
1616 
1617    Notes: Options prefix for the inner matrix are given by -is_mat_xxx
1618 
1619           You must call MatSetLocalToGlobalMapping() before using this matrix type.
1620 
1621           You can do matrix preallocation on the local matrix after you obtain it with
1622           MatISGetLocalMat(); otherwise, you could use MatISSetPreallocation()
1623 
1624   Level: advanced
1625 
1626 .seealso: Mat, MatISGetLocalMat(), MatSetLocalToGlobalMapping(), MatISSetPreallocation(), MatCreateIS(), PCBDDC, KSPFETIDP
1627 
1628 M*/
1629 
1630 #undef __FUNCT__
1631 #define __FUNCT__ "MatCreate_IS"
1632 PETSC_EXTERN PetscErrorCode MatCreate_IS(Mat A)
1633 {
1634   PetscErrorCode ierr;
1635   Mat_IS         *b;
1636 
1637   PetscFunctionBegin;
1638   ierr    = PetscNewLog(A,&b);CHKERRQ(ierr);
1639   A->data = (void*)b;
1640 
1641   /* matrix ops */
1642   ierr    = PetscMemzero(A->ops,sizeof(struct _MatOps));CHKERRQ(ierr);
1643   A->ops->mult                    = MatMult_IS;
1644   A->ops->multadd                 = MatMultAdd_IS;
1645   A->ops->multtranspose           = MatMultTranspose_IS;
1646   A->ops->multtransposeadd        = MatMultTransposeAdd_IS;
1647   A->ops->destroy                 = MatDestroy_IS;
1648   A->ops->setlocaltoglobalmapping = MatSetLocalToGlobalMapping_IS;
1649   A->ops->setvalues               = MatSetValues_IS;
1650   A->ops->setvaluesblocked        = MatSetValuesBlocked_IS;
1651   A->ops->setvalueslocal          = MatSetValuesLocal_IS;
1652   A->ops->setvaluesblockedlocal   = MatSetValuesBlockedLocal_IS;
1653   A->ops->zerorows                = MatZeroRows_IS;
1654   A->ops->zerorowscolumns         = MatZeroRowsColumns_IS;
1655   A->ops->assemblybegin           = MatAssemblyBegin_IS;
1656   A->ops->assemblyend             = MatAssemblyEnd_IS;
1657   A->ops->view                    = MatView_IS;
1658   A->ops->zeroentries             = MatZeroEntries_IS;
1659   A->ops->scale                   = MatScale_IS;
1660   A->ops->getdiagonal             = MatGetDiagonal_IS;
1661   A->ops->setoption               = MatSetOption_IS;
1662   A->ops->ishermitian             = MatIsHermitian_IS;
1663   A->ops->issymmetric             = MatIsSymmetric_IS;
1664   A->ops->duplicate               = MatDuplicate_IS;
1665   A->ops->missingdiagonal         = MatMissingDiagonal_IS;
1666   A->ops->copy                    = MatCopy_IS;
1667   A->ops->getlocalsubmatrix       = MatGetLocalSubMatrix_IS;
1668   A->ops->getsubmatrix            = MatGetSubMatrix_IS;
1669   A->ops->axpy                    = MatAXPY_IS;
1670   A->ops->diagonalset             = MatDiagonalSet_IS;
1671   A->ops->shift                   = MatShift_IS;
1672 
1673   /* special MATIS functions */
1674   ierr = PetscObjectComposeFunction((PetscObject)A,"MatISGetLocalMat_C",MatISGetLocalMat_IS);CHKERRQ(ierr);
1675   ierr = PetscObjectComposeFunction((PetscObject)A,"MatISSetLocalMat_C",MatISSetLocalMat_IS);CHKERRQ(ierr);
1676   ierr = PetscObjectComposeFunction((PetscObject)A,"MatISGetMPIXAIJ_C",MatISGetMPIXAIJ_IS);CHKERRQ(ierr);
1677   ierr = PetscObjectComposeFunction((PetscObject)A,"MatISSetPreallocation_C",MatISSetPreallocation_IS);CHKERRQ(ierr);
1678   ierr = PetscObjectChangeTypeName((PetscObject)A,MATIS);CHKERRQ(ierr);
1679   PetscFunctionReturn(0);
1680 }
1681