xref: /petsc/src/mat/impls/is/matis.c (revision bb1015c3d26b4b687e2446ad76998fe3eecfe9bd)
1 /*
2     Creates a matrix class for using the Neumann-Neumann type preconditioners.
3     This stores the matrices in globally unassembled form. Each processor
4     assembles only its local Neumann problem and the parallel matrix vector
5     product is handled "implicitly".
6 
7     Currently this allows for only one subdomain per processor.
8 */
9 
10 #include <../src/mat/impls/is/matis.h>      /*I "petscmat.h" I*/
11 
12 #define MATIS_MAX_ENTRIES_INSERTION 2048
13 
14 static PetscErrorCode MatISComputeSF_Private(Mat);
15 static PetscErrorCode MatISZeroRowsLocal_Private(Mat,PetscInt,const PetscInt[],PetscScalar);
16 
17 #undef __FUNCT__
18 #define __FUNCT__ "MatSetValuesLocal_SubMat_IS"
19 static PetscErrorCode MatSetValuesLocal_SubMat_IS(Mat A,PetscInt m,const PetscInt *rows, PetscInt n,const PetscInt *cols,const PetscScalar *values,InsertMode addv)
20 {
21   PetscErrorCode ierr;
22   Mat_IS         *is = (Mat_IS*)A->data;
23   PetscInt       rows_l[MATIS_MAX_ENTRIES_INSERTION],cols_l[MATIS_MAX_ENTRIES_INSERTION];
24 
25   PetscFunctionBegin;
26 #if defined(PETSC_USE_DEBUG)
27   if (m > MATIS_MAX_ENTRIES_INSERTION || n > MATIS_MAX_ENTRIES_INSERTION) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_SUP,"Number of row/column indices must be <= %D: they are %D %D",MATIS_MAX_ENTRIES_INSERTION,m,n);
28 #endif
29   ierr = ISLocalToGlobalMappingApply(A->rmap->mapping,m,rows,rows_l);CHKERRQ(ierr);
30   ierr = ISLocalToGlobalMappingApply(A->cmap->mapping,n,cols,cols_l);CHKERRQ(ierr);
31   ierr = MatSetValues(is->A,m,rows_l,n,cols_l,values,addv);CHKERRQ(ierr);
32   PetscFunctionReturn(0);
33 }
34 
35 #undef __FUNCT__
36 #define __FUNCT__ "MatSetValuesBlockedLocal_SubMat_IS"
37 static PetscErrorCode MatSetValuesBlockedLocal_SubMat_IS(Mat A,PetscInt m,const PetscInt *rows, PetscInt n,const PetscInt *cols,const PetscScalar *values,InsertMode addv)
38 {
39   PetscErrorCode ierr;
40   Mat_IS         *is = (Mat_IS*)A->data;
41   PetscInt       rows_l[MATIS_MAX_ENTRIES_INSERTION],cols_l[MATIS_MAX_ENTRIES_INSERTION];
42 
43   PetscFunctionBegin;
44 #if defined(PETSC_USE_DEBUG)
45   if (m > MATIS_MAX_ENTRIES_INSERTION || n > MATIS_MAX_ENTRIES_INSERTION) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_SUP,"Number of row/column block indices must be <= %D: they are %D %D",MATIS_MAX_ENTRIES_INSERTION,m,n);
46 #endif
47   ierr = ISLocalToGlobalMappingApplyBlock(A->rmap->mapping,m,rows,rows_l);CHKERRQ(ierr);
48   ierr = ISLocalToGlobalMappingApplyBlock(A->cmap->mapping,n,cols,cols_l);CHKERRQ(ierr);
49   ierr = MatSetValuesBlocked(is->A,m,rows_l,n,cols_l,values,addv);CHKERRQ(ierr);
50   PetscFunctionReturn(0);
51 }
52 
53 #undef __FUNCT__
54 #define __FUNCT__ "PetscLayoutMapLocal_Private"
55 static PetscErrorCode PetscLayoutMapLocal_Private(PetscLayout map,PetscInt N,const PetscInt idxs[],const PetscInt gidxs[], PetscInt *on,PetscInt **oidxs,PetscInt **ogidxs)
56 {
57   PetscInt      *owners = map->range;
58   PetscInt       n      = map->n;
59   PetscSF        sf;
60   PetscInt      *lidxs,*work = NULL;
61   PetscSFNode   *ridxs;
62   PetscMPIInt    rank;
63   PetscInt       r, p = 0, len = 0;
64   PetscErrorCode ierr;
65 
66   PetscFunctionBegin;
67   /* Create SF where leaves are input idxs and roots are owned idxs (code adapted from MatZeroRowsMapLocal_Private) */
68   ierr = MPI_Comm_rank(map->comm,&rank);CHKERRQ(ierr);
69   ierr = PetscMalloc1(n,&lidxs);CHKERRQ(ierr);
70   for (r = 0; r < n; ++r) lidxs[r] = -1;
71   ierr = PetscMalloc1(N,&ridxs);CHKERRQ(ierr);
72   for (r = 0; r < N; ++r) {
73     const PetscInt idx = idxs[r];
74     if (idx < 0 || map->N <= idx) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Index %D out of range [0,%D)",idx,map->N);
75     if (idx < owners[p] || owners[p+1] <= idx) { /* short-circuit the search if the last p owns this idx too */
76       ierr = PetscLayoutFindOwner(map,idx,&p);CHKERRQ(ierr);
77     }
78     ridxs[r].rank = p;
79     ridxs[r].index = idxs[r] - owners[p];
80   }
81   ierr = PetscSFCreate(map->comm,&sf);CHKERRQ(ierr);
82   ierr = PetscSFSetGraph(sf,n,N,NULL,PETSC_OWN_POINTER,ridxs,PETSC_OWN_POINTER);CHKERRQ(ierr);
83   ierr = PetscSFReduceBegin(sf,MPIU_INT,(PetscInt*)idxs,lidxs,MPI_LOR);CHKERRQ(ierr);
84   ierr = PetscSFReduceEnd(sf,MPIU_INT,(PetscInt*)idxs,lidxs,MPI_LOR);CHKERRQ(ierr);
85   if ((gidxs && ogidxs) || ogidxs) {
86     ierr = PetscMalloc1(n,&work);CHKERRQ(ierr);
87     if (!gidxs) { /* if gidxs is not provided, global indices are inferred */
88       PetscInt cum = 0,start,*work2;
89       ierr = PetscCalloc1(N,&work2);CHKERRQ(ierr);
90       for (r = 0; r < N; ++r) if (idxs[r] >=0) cum++;
91       ierr = MPI_Scan(&cum,&start,1,MPIU_INT,MPI_SUM,map->comm);CHKERRQ(ierr);
92       start -= cum;
93       cum = 0;
94       for (r = 0; r < N; ++r) if (idxs[r] >=0) work2[r] = start+cum++;
95       ierr = PetscSFReduceBegin(sf,MPIU_INT,work2,work,MPIU_REPLACE);CHKERRQ(ierr);
96       ierr = PetscSFReduceEnd(sf,MPIU_INT,work2,work,MPIU_REPLACE);CHKERRQ(ierr);
97       ierr = PetscFree(work2);CHKERRQ(ierr);
98     } else {
99       ierr = PetscSFReduceBegin(sf,MPIU_INT,(PetscInt*)gidxs,work,MPIU_REPLACE);CHKERRQ(ierr);
100       ierr = PetscSFReduceEnd(sf,MPIU_INT,(PetscInt*)gidxs,work,MPIU_REPLACE);CHKERRQ(ierr);
101     }
102   }
103   ierr = PetscSFDestroy(&sf);CHKERRQ(ierr);
104   /* Compress and put in indices */
105   for (r = 0; r < n; ++r)
106     if (lidxs[r] >= 0) {
107       if (work) work[len] = work[r];
108       lidxs[len++] = r;
109     }
110   if (on) *on = len;
111   if (oidxs) *oidxs = lidxs;
112   if (ogidxs) *ogidxs = work;
113   PetscFunctionReturn(0);
114 }
115 
116 #undef __FUNCT__
117 #define __FUNCT__ "MatGetSubMatrix_IS"
118 static PetscErrorCode MatGetSubMatrix_IS(Mat mat,IS irow,IS icol,MatReuse scall,Mat *newmat)
119 {
120   Mat               locmat,newlocmat;
121   Mat_IS            *newmatis;
122 #if defined(PETSC_USE_DEBUG)
123   Vec               rtest,ltest;
124   const PetscScalar *array;
125 #endif
126   const PetscInt    *idxs;
127   PetscInt          i,m,n;
128   PetscErrorCode    ierr;
129 
130   PetscFunctionBegin;
131   if (scall == MAT_REUSE_MATRIX) {
132     PetscBool ismatis;
133 
134     ierr = PetscObjectTypeCompare((PetscObject)*newmat,MATIS,&ismatis);CHKERRQ(ierr);
135     if (!ismatis) SETERRQ(PetscObjectComm((PetscObject)*newmat),PETSC_ERR_ARG_WRONG,"Cannot reuse matrix! Not of MATIS type");
136     newmatis = (Mat_IS*)(*newmat)->data;
137     if (!newmatis->getsub_ris) SETERRQ(PetscObjectComm((PetscObject)*newmat),PETSC_ERR_ARG_WRONG,"Cannot reuse matrix! Misses local row IS");
138     if (!newmatis->getsub_cis) SETERRQ(PetscObjectComm((PetscObject)*newmat),PETSC_ERR_ARG_WRONG,"Cannot reuse matrix! Misses local col IS");
139   }
140   /* irow and icol may not have duplicate entries */
141 #if defined(PETSC_USE_DEBUG)
142   ierr = MatCreateVecs(mat,&ltest,&rtest);CHKERRQ(ierr);
143   ierr = ISGetLocalSize(irow,&n);CHKERRQ(ierr);
144   ierr = ISGetIndices(irow,&idxs);CHKERRQ(ierr);
145   for (i=0;i<n;i++) {
146     ierr = VecSetValue(rtest,idxs[i],1.0,ADD_VALUES);CHKERRQ(ierr);
147   }
148   ierr = VecAssemblyBegin(rtest);CHKERRQ(ierr);
149   ierr = VecAssemblyEnd(rtest);CHKERRQ(ierr);
150   ierr = VecGetLocalSize(rtest,&n);CHKERRQ(ierr);
151   ierr = VecGetOwnershipRange(rtest,&m,NULL);CHKERRQ(ierr);
152   ierr = VecGetArrayRead(rtest,&array);CHKERRQ(ierr);
153   for (i=0;i<n;i++) if (array[i] != 0. && array[i] != 1.) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_SUP,"Index %D counted %D times! Irow may not have duplicate entries",i+m,(PetscInt)PetscRealPart(array[i]));
154   ierr = VecRestoreArrayRead(rtest,&array);CHKERRQ(ierr);
155   ierr = ISRestoreIndices(irow,&idxs);CHKERRQ(ierr);
156   ierr = ISGetLocalSize(icol,&n);CHKERRQ(ierr);
157   ierr = ISGetIndices(icol,&idxs);CHKERRQ(ierr);
158   for (i=0;i<n;i++) {
159     ierr = VecSetValue(ltest,idxs[i],1.0,ADD_VALUES);CHKERRQ(ierr);
160   }
161   ierr = VecAssemblyBegin(ltest);CHKERRQ(ierr);
162   ierr = VecAssemblyEnd(ltest);CHKERRQ(ierr);
163   ierr = VecGetLocalSize(ltest,&n);CHKERRQ(ierr);
164   ierr = VecGetOwnershipRange(ltest,&m,NULL);CHKERRQ(ierr);
165   ierr = VecGetArrayRead(ltest,&array);CHKERRQ(ierr);
166   for (i=0;i<n;i++) if (array[i] != 0. && array[i] != 1.) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_SUP,"Index %D counted %D times! Icol may not have duplicate entries",i+m,(PetscInt)PetscRealPart(array[i]));
167   ierr = VecRestoreArrayRead(ltest,&array);CHKERRQ(ierr);
168   ierr = ISRestoreIndices(icol,&idxs);CHKERRQ(ierr);
169   ierr = VecDestroy(&rtest);CHKERRQ(ierr);
170   ierr = VecDestroy(&ltest);CHKERRQ(ierr);
171 #endif
172   if (scall == MAT_INITIAL_MATRIX) {
173     Mat_IS                 *matis = (Mat_IS*)mat->data;
174     ISLocalToGlobalMapping rl2g;
175     IS                     is;
176     PetscInt               *lidxs,*lgidxs,*newgidxs;
177     PetscInt               ll,newloc;
178     MPI_Comm               comm;
179 
180     ierr = PetscObjectGetComm((PetscObject)mat,&comm);CHKERRQ(ierr);
181     ierr = ISGetLocalSize(irow,&m);CHKERRQ(ierr);
182     ierr = ISGetLocalSize(icol,&n);CHKERRQ(ierr);
183     ierr = MatCreate(comm,newmat);CHKERRQ(ierr);
184     ierr = MatSetType(*newmat,MATIS);CHKERRQ(ierr);
185     ierr = MatSetSizes(*newmat,m,n,PETSC_DECIDE,PETSC_DECIDE);CHKERRQ(ierr);
186     /* communicate irow to their owners in the layout */
187     ierr = ISGetIndices(irow,&idxs);CHKERRQ(ierr);
188     ierr = PetscLayoutMapLocal_Private(mat->rmap,m,idxs,NULL,&ll,&lidxs,&lgidxs);CHKERRQ(ierr);
189     ierr = ISRestoreIndices(irow,&idxs);CHKERRQ(ierr);
190     if (!matis->sf) { /* setup SF if not yet created and allocate rootdata and leafdata */
191       ierr = MatISComputeSF_Private(mat);CHKERRQ(ierr);
192     }
193     ierr = PetscMemzero(matis->sf_rootdata,matis->sf_nroots*sizeof(PetscInt));CHKERRQ(ierr);
194     for (i=0;i<ll;i++) matis->sf_rootdata[lidxs[i]] = lgidxs[i]+1;
195     ierr = PetscFree(lidxs);CHKERRQ(ierr);
196     ierr = PetscFree(lgidxs);CHKERRQ(ierr);
197     ierr = PetscSFBcastBegin(matis->sf,MPIU_INT,matis->sf_rootdata,matis->sf_leafdata);CHKERRQ(ierr);
198     ierr = PetscSFBcastEnd(matis->sf,MPIU_INT,matis->sf_rootdata,matis->sf_leafdata);CHKERRQ(ierr);
199     for (i=0,newloc=0;i<matis->sf_nleaves;i++) if (matis->sf_leafdata[i]) newloc++;
200     ierr = PetscMalloc1(newloc,&newgidxs);CHKERRQ(ierr);
201     ierr = PetscMalloc1(newloc,&lidxs);CHKERRQ(ierr);
202     for (i=0,newloc=0;i<matis->sf_nleaves;i++)
203       if (matis->sf_leafdata[i]) {
204         lidxs[newloc] = i;
205         newgidxs[newloc++] = matis->sf_leafdata[i]-1;
206       }
207     ierr = ISCreateGeneral(comm,newloc,newgidxs,PETSC_OWN_POINTER,&is);CHKERRQ(ierr);
208     ierr = ISLocalToGlobalMappingCreateIS(is,&rl2g);CHKERRQ(ierr);
209     ierr = ISDestroy(&is);CHKERRQ(ierr);
210     /* local is to extract local submatrix */
211     newmatis = (Mat_IS*)(*newmat)->data;
212     ierr = ISCreateGeneral(comm,newloc,lidxs,PETSC_OWN_POINTER,&newmatis->getsub_ris);CHKERRQ(ierr);
213     if (mat->congruentlayouts == -1) { /* first time we compare rows and cols layouts */
214       PetscBool cong;
215       ierr = PetscLayoutCompare(mat->rmap,mat->cmap,&cong);CHKERRQ(ierr);
216       if (cong) mat->congruentlayouts = 1;
217       else      mat->congruentlayouts = 0;
218     }
219     if (mat->congruentlayouts && irow == icol) {
220       ierr = MatSetLocalToGlobalMapping(*newmat,rl2g,rl2g);CHKERRQ(ierr);
221       ierr = PetscObjectReference((PetscObject)newmatis->getsub_ris);CHKERRQ(ierr);
222       newmatis->getsub_cis = newmatis->getsub_ris;
223     } else {
224       ISLocalToGlobalMapping cl2g;
225 
226       /* communicate icol to their owners in the layout */
227       ierr = ISGetIndices(icol,&idxs);CHKERRQ(ierr);
228       ierr = PetscLayoutMapLocal_Private(mat->cmap,n,idxs,NULL,&ll,&lidxs,&lgidxs);CHKERRQ(ierr);
229       ierr = ISRestoreIndices(icol,&idxs);CHKERRQ(ierr);
230       ierr = PetscMemzero(matis->csf_rootdata,matis->csf_nroots*sizeof(PetscInt));CHKERRQ(ierr);
231       for (i=0;i<ll;i++) matis->csf_rootdata[lidxs[i]] = lgidxs[i]+1;
232       ierr = PetscFree(lidxs);CHKERRQ(ierr);
233       ierr = PetscFree(lgidxs);CHKERRQ(ierr);
234       ierr = PetscSFBcastBegin(matis->csf,MPIU_INT,matis->csf_rootdata,matis->csf_leafdata);CHKERRQ(ierr);
235       ierr = PetscSFBcastEnd(matis->csf,MPIU_INT,matis->csf_rootdata,matis->csf_leafdata);CHKERRQ(ierr);
236       for (i=0,newloc=0;i<matis->csf_nleaves;i++) if (matis->csf_leafdata[i]) newloc++;
237       ierr = PetscMalloc1(newloc,&newgidxs);CHKERRQ(ierr);
238       ierr = PetscMalloc1(newloc,&lidxs);CHKERRQ(ierr);
239       for (i=0,newloc=0;i<matis->csf_nleaves;i++)
240         if (matis->csf_leafdata[i]) {
241           lidxs[newloc] = i;
242           newgidxs[newloc++] = matis->csf_leafdata[i]-1;
243         }
244       ierr = ISCreateGeneral(comm,newloc,newgidxs,PETSC_OWN_POINTER,&is);CHKERRQ(ierr);
245       ierr = ISLocalToGlobalMappingCreateIS(is,&cl2g);CHKERRQ(ierr);
246       ierr = ISDestroy(&is);CHKERRQ(ierr);
247       /* local is to extract local submatrix */
248       ierr = ISCreateGeneral(comm,newloc,lidxs,PETSC_OWN_POINTER,&newmatis->getsub_cis);CHKERRQ(ierr);
249       ierr = MatSetLocalToGlobalMapping(*newmat,rl2g,cl2g);CHKERRQ(ierr);
250       ierr = ISLocalToGlobalMappingDestroy(&cl2g);CHKERRQ(ierr);
251     }
252     ierr = ISLocalToGlobalMappingDestroy(&rl2g);CHKERRQ(ierr);
253   } else {
254     ierr = MatISGetLocalMat(*newmat,&newlocmat);CHKERRQ(ierr);
255   }
256   ierr = MatISGetLocalMat(mat,&locmat);CHKERRQ(ierr);
257   newmatis = (Mat_IS*)(*newmat)->data;
258   ierr = MatGetSubMatrix(locmat,newmatis->getsub_ris,newmatis->getsub_cis,scall,&newlocmat);CHKERRQ(ierr);
259   if (scall == MAT_INITIAL_MATRIX) {
260     ierr = MatISSetLocalMat(*newmat,newlocmat);CHKERRQ(ierr);
261     ierr = MatDestroy(&newlocmat);CHKERRQ(ierr);
262   }
263   ierr = MatAssemblyBegin(*newmat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
264   ierr = MatAssemblyEnd(*newmat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
265   PetscFunctionReturn(0);
266 }
267 
268 #undef __FUNCT__
269 #define __FUNCT__ "MatCopy_IS"
270 static PetscErrorCode MatCopy_IS(Mat A,Mat B,MatStructure str)
271 {
272   Mat_IS         *a = (Mat_IS*)A->data,*b;
273   PetscBool      ismatis;
274   PetscErrorCode ierr;
275 
276   PetscFunctionBegin;
277   ierr = PetscObjectTypeCompare((PetscObject)B,MATIS,&ismatis);CHKERRQ(ierr);
278   if (!ismatis) SETERRQ(PetscObjectComm((PetscObject)B),PETSC_ERR_SUP,"Need to be implemented");
279   b = (Mat_IS*)B->data;
280   ierr = MatCopy(a->A,b->A,str);CHKERRQ(ierr);
281   PetscFunctionReturn(0);
282 }
283 
284 #undef __FUNCT__
285 #define __FUNCT__ "MatMissingDiagonal_IS"
286 static PetscErrorCode MatMissingDiagonal_IS(Mat A,PetscBool  *missing,PetscInt *d)
287 {
288   Vec               v;
289   const PetscScalar *array;
290   PetscInt          i,n;
291   PetscErrorCode    ierr;
292 
293   PetscFunctionBegin;
294   *missing = PETSC_FALSE;
295   ierr = MatCreateVecs(A,NULL,&v);CHKERRQ(ierr);
296   ierr = MatGetDiagonal(A,v);CHKERRQ(ierr);
297   ierr = VecGetLocalSize(v,&n);CHKERRQ(ierr);
298   ierr = VecGetArrayRead(v,&array);CHKERRQ(ierr);
299   for (i=0;i<n;i++) if (array[i] == 0.) break;
300   ierr = VecRestoreArrayRead(v,&array);CHKERRQ(ierr);
301   ierr = VecDestroy(&v);CHKERRQ(ierr);
302   if (i != n) *missing = PETSC_TRUE;
303   if (d) {
304     *d = -1;
305     if (*missing) {
306       PetscInt rstart;
307       ierr = MatGetOwnershipRange(A,&rstart,NULL);CHKERRQ(ierr);
308       *d = i+rstart;
309     }
310   }
311   PetscFunctionReturn(0);
312 }
313 
314 #undef __FUNCT__
315 #define __FUNCT__ "MatISComputeSF_Private"
316 static PetscErrorCode MatISComputeSF_Private(Mat B)
317 {
318   Mat_IS         *matis = (Mat_IS*)(B->data);
319   const PetscInt *gidxs;
320   PetscErrorCode ierr;
321 
322   PetscFunctionBegin;
323   ierr = MatGetSize(matis->A,&matis->sf_nleaves,NULL);CHKERRQ(ierr);
324   ierr = MatGetLocalSize(B,&matis->sf_nroots,NULL);CHKERRQ(ierr);
325   ierr = PetscSFCreate(PetscObjectComm((PetscObject)B),&matis->sf);CHKERRQ(ierr);
326   ierr = ISLocalToGlobalMappingGetIndices(B->rmap->mapping,&gidxs);CHKERRQ(ierr);
327   ierr = PetscSFSetGraphLayout(matis->sf,B->rmap,matis->sf_nleaves,NULL,PETSC_OWN_POINTER,gidxs);CHKERRQ(ierr);
328   ierr = ISLocalToGlobalMappingRestoreIndices(B->rmap->mapping,&gidxs);CHKERRQ(ierr);
329   ierr = PetscMalloc2(matis->sf_nroots,&matis->sf_rootdata,matis->sf_nleaves,&matis->sf_leafdata);CHKERRQ(ierr);
330   if (B->rmap->mapping != B->cmap->mapping) { /* setup SF for columns */
331     ierr = MatGetSize(matis->A,NULL,&matis->csf_nleaves);CHKERRQ(ierr);
332     ierr = MatGetLocalSize(B,NULL,&matis->csf_nroots);CHKERRQ(ierr);
333     ierr = PetscSFCreate(PetscObjectComm((PetscObject)B),&matis->csf);CHKERRQ(ierr);
334     ierr = ISLocalToGlobalMappingGetIndices(B->cmap->mapping,&gidxs);CHKERRQ(ierr);
335     ierr = PetscSFSetGraphLayout(matis->csf,B->cmap,matis->csf_nleaves,NULL,PETSC_OWN_POINTER,gidxs);CHKERRQ(ierr);
336     ierr = ISLocalToGlobalMappingRestoreIndices(B->cmap->mapping,&gidxs);CHKERRQ(ierr);
337     ierr = PetscMalloc2(matis->csf_nroots,&matis->csf_rootdata,matis->csf_nleaves,&matis->csf_leafdata);CHKERRQ(ierr);
338   } else {
339     matis->csf = matis->sf;
340     matis->csf_nleaves = matis->sf_nleaves;
341     matis->csf_nroots = matis->sf_nroots;
342     matis->csf_leafdata = matis->sf_leafdata;
343     matis->csf_rootdata = matis->sf_rootdata;
344   }
345   PetscFunctionReturn(0);
346 }
347 
348 #undef __FUNCT__
349 #define __FUNCT__ "MatISSetPreallocation"
350 /*@
351    MatISSetPreallocation - Preallocates memory for a MATIS parallel matrix.
352 
353    Collective on MPI_Comm
354 
355    Input Parameters:
356 +  B - the matrix
357 .  d_nz  - number of nonzeros per row in DIAGONAL portion of local submatrix
358            (same value is used for all local rows)
359 .  d_nnz - array containing the number of nonzeros in the various rows of the
360            DIAGONAL portion of the local submatrix (possibly different for each row)
361            or NULL, if d_nz is used to specify the nonzero structure.
362            The size of this array is equal to the number of local rows, i.e 'm'.
363            For matrices that will be factored, you must leave room for (and set)
364            the diagonal entry even if it is zero.
365 .  o_nz  - number of nonzeros per row in the OFF-DIAGONAL portion of local
366            submatrix (same value is used for all local rows).
367 -  o_nnz - array containing the number of nonzeros in the various rows of the
368            OFF-DIAGONAL portion of the local submatrix (possibly different for
369            each row) or NULL, if o_nz is used to specify the nonzero
370            structure. The size of this array is equal to the number
371            of local rows, i.e 'm'.
372 
373    If the *_nnz parameter is given then the *_nz parameter is ignored
374 
375    Level: intermediate
376 
377    Notes: This function has the same interface as the MPIAIJ preallocation routine in order to simplify the transition
378           from the asssembled format to the unassembled one. It overestimates the preallocation of MATIS local
379           matrices; for exact preallocation, the user should set the preallocation directly on local matrix objects.
380 
381 .keywords: matrix
382 
383 .seealso: MatCreate(), MatCreateIS(), MatMPIAIJSetPreallocation(), MatISGetLocalMat(), MATIS
384 @*/
385 PetscErrorCode  MatISSetPreallocation(Mat B,PetscInt d_nz,const PetscInt d_nnz[],PetscInt o_nz,const PetscInt o_nnz[])
386 {
387   PetscErrorCode ierr;
388 
389   PetscFunctionBegin;
390   PetscValidHeaderSpecific(B,MAT_CLASSID,1);
391   PetscValidType(B,1);
392   ierr = PetscTryMethod(B,"MatISSetPreallocation_C",(Mat,PetscInt,const PetscInt[],PetscInt,const PetscInt[]),(B,d_nz,d_nnz,o_nz,o_nnz));CHKERRQ(ierr);
393   PetscFunctionReturn(0);
394 }
395 
396 #undef __FUNCT__
397 #define __FUNCT__ "MatISSetPreallocation_IS"
398 static PetscErrorCode  MatISSetPreallocation_IS(Mat B,PetscInt d_nz,const PetscInt d_nnz[],PetscInt o_nz,const PetscInt o_nnz[])
399 {
400   Mat_IS         *matis = (Mat_IS*)(B->data);
401   PetscInt       bs,i,nlocalcols;
402   PetscErrorCode ierr;
403 
404   PetscFunctionBegin;
405   if (!matis->A) SETERRQ(PetscObjectComm((PetscObject)B),PETSC_ERR_SUP,"You should first call MatSetLocalToGlobalMapping");
406   if (!matis->sf) { /* setup SF if not yet created and allocate rootdata and leafdata */
407     ierr = MatISComputeSF_Private(B);CHKERRQ(ierr);
408   }
409   if (!d_nnz) {
410     for (i=0;i<matis->sf_nroots;i++) matis->sf_rootdata[i] = d_nz;
411   } else {
412     for (i=0;i<matis->sf_nroots;i++) matis->sf_rootdata[i] = d_nnz[i];
413   }
414   if (!o_nnz) {
415     for (i=0;i<matis->sf_nroots;i++) matis->sf_rootdata[i] += o_nz;
416   } else {
417     for (i=0;i<matis->sf_nroots;i++) matis->sf_rootdata[i] += o_nnz[i];
418   }
419   ierr = PetscSFBcastBegin(matis->sf,MPIU_INT,matis->sf_rootdata,matis->sf_leafdata);CHKERRQ(ierr);
420   ierr = MatGetSize(matis->A,NULL,&nlocalcols);CHKERRQ(ierr);
421   ierr = MatGetBlockSize(matis->A,&bs);CHKERRQ(ierr);
422   ierr = PetscSFBcastEnd(matis->sf,MPIU_INT,matis->sf_rootdata,matis->sf_leafdata);CHKERRQ(ierr);
423   for (i=0;i<matis->sf_nleaves;i++) {
424     matis->sf_leafdata[i] = PetscMin(matis->sf_leafdata[i],nlocalcols);
425   }
426   ierr = MatSeqAIJSetPreallocation(matis->A,0,matis->sf_leafdata);CHKERRQ(ierr);
427   for (i=0;i<matis->sf_nleaves/bs;i++) {
428     matis->sf_leafdata[i] = matis->sf_leafdata[i*bs]/bs;
429   }
430   ierr = MatSeqBAIJSetPreallocation(matis->A,bs,0,matis->sf_leafdata);CHKERRQ(ierr);
431   for (i=0;i<matis->sf_nleaves/bs;i++) {
432     matis->sf_leafdata[i] = matis->sf_leafdata[i]-i;
433   }
434   ierr = MatSeqSBAIJSetPreallocation(matis->A,bs,0,matis->sf_leafdata);CHKERRQ(ierr);
435   PetscFunctionReturn(0);
436 }
437 
438 #undef __FUNCT__
439 #define __FUNCT__ "MatISSetMPIXAIJPreallocation_Private"
440 PETSC_EXTERN PetscErrorCode MatISSetMPIXAIJPreallocation_Private(Mat A, Mat B, PetscBool maxreduce)
441 {
442   Mat_IS          *matis = (Mat_IS*)(A->data);
443   PetscInt        *my_dnz,*my_onz,*dnz,*onz,*mat_ranges,*row_ownership;
444   const PetscInt  *global_indices_r,*global_indices_c;
445   PetscInt        i,j,bs,rows,cols;
446   PetscInt        lrows,lcols;
447   PetscInt        local_rows,local_cols;
448   PetscMPIInt     nsubdomains;
449   PetscBool       isdense,issbaij;
450   PetscErrorCode  ierr;
451 
452   PetscFunctionBegin;
453   ierr = MPI_Comm_size(PetscObjectComm((PetscObject)A),&nsubdomains);CHKERRQ(ierr);
454   ierr = MatGetSize(A,&rows,&cols);CHKERRQ(ierr);
455   ierr = MatGetBlockSize(A,&bs);CHKERRQ(ierr);
456   ierr = MatGetSize(matis->A,&local_rows,&local_cols);CHKERRQ(ierr);
457   ierr = PetscObjectTypeCompare((PetscObject)matis->A,MATSEQDENSE,&isdense);CHKERRQ(ierr);
458   ierr = PetscObjectTypeCompare((PetscObject)matis->A,MATSEQSBAIJ,&issbaij);CHKERRQ(ierr);
459   ierr = ISLocalToGlobalMappingGetIndices(A->rmap->mapping,&global_indices_r);CHKERRQ(ierr);
460   if (A->rmap->mapping != A->cmap->mapping) {
461     ierr = ISLocalToGlobalMappingGetIndices(A->cmap->mapping,&global_indices_c);CHKERRQ(ierr);
462   } else {
463     global_indices_c = global_indices_r;
464   }
465 
466   if (issbaij) {
467     ierr = MatGetRowUpperTriangular(matis->A);CHKERRQ(ierr);
468   }
469   /*
470      An SF reduce is needed to sum up properly on shared rows.
471      Note that generally preallocation is not exact, since it overestimates nonzeros
472   */
473   if (!matis->sf) { /* setup SF if not yet created and allocate rootdata and leafdata */
474     ierr = MatISComputeSF_Private(A);CHKERRQ(ierr);
475   }
476   ierr = MatGetLocalSize(A,&lrows,&lcols);CHKERRQ(ierr);
477   ierr = MatPreallocateInitialize(PetscObjectComm((PetscObject)A),lrows,lcols,dnz,onz);CHKERRQ(ierr);
478   /* All processes need to compute entire row ownership */
479   ierr = PetscMalloc1(rows,&row_ownership);CHKERRQ(ierr);
480   ierr = MatGetOwnershipRanges(A,(const PetscInt**)&mat_ranges);CHKERRQ(ierr);
481   for (i=0;i<nsubdomains;i++) {
482     for (j=mat_ranges[i];j<mat_ranges[i+1];j++) {
483       row_ownership[j] = i;
484     }
485   }
486   ierr = MatGetOwnershipRangesColumn(A,(const PetscInt**)&mat_ranges);CHKERRQ(ierr);
487 
488   /*
489      my_dnz and my_onz contains exact contribution to preallocation from each local mat
490      then, they will be summed up properly. This way, preallocation is always sufficient
491   */
492   ierr = PetscCalloc2(local_rows,&my_dnz,local_rows,&my_onz);CHKERRQ(ierr);
493   /* preallocation as a MATAIJ */
494   if (isdense) { /* special case for dense local matrices */
495     for (i=0;i<local_rows;i++) {
496       PetscInt index_row = global_indices_r[i];
497       for (j=i;j<local_rows;j++) {
498         PetscInt owner = row_ownership[index_row];
499         PetscInt index_col = global_indices_c[j];
500         if (index_col > mat_ranges[owner]-1 && index_col < mat_ranges[owner+1] ) { /* diag block */
501           my_dnz[i] += 1;
502         } else { /* offdiag block */
503           my_onz[i] += 1;
504         }
505         /* same as before, interchanging rows and cols */
506         if (i != j) {
507           owner = row_ownership[index_col];
508           if (index_row > mat_ranges[owner]-1 && index_row < mat_ranges[owner+1] ) {
509             my_dnz[j] += 1;
510           } else {
511             my_onz[j] += 1;
512           }
513         }
514       }
515     }
516   } else if (matis->A->ops->getrowij) {
517     const PetscInt *ii,*jj,*jptr;
518     PetscBool      done;
519     ierr = MatGetRowIJ(matis->A,0,PETSC_FALSE,PETSC_FALSE,&local_rows,&ii,&jj,&done);CHKERRQ(ierr);
520     if (!done) SETERRQ1(PetscObjectComm((PetscObject)(matis->A)),PETSC_ERR_PLIB,"Error in MatGetRowIJ called from %s\n",__FUNCT__);
521     jptr = jj;
522     for (i=0;i<local_rows;i++) {
523       PetscInt index_row = global_indices_r[i];
524       for (j=0;j<ii[i+1]-ii[i];j++,jptr++) {
525         PetscInt owner = row_ownership[index_row];
526         PetscInt index_col = global_indices_c[*jptr];
527         if (index_col > mat_ranges[owner]-1 && index_col < mat_ranges[owner+1] ) { /* diag block */
528           my_dnz[i] += 1;
529         } else { /* offdiag block */
530           my_onz[i] += 1;
531         }
532         /* same as before, interchanging rows and cols */
533         if (issbaij && index_col != index_row) {
534           owner = row_ownership[index_col];
535           if (index_row > mat_ranges[owner]-1 && index_row < mat_ranges[owner+1] ) {
536             my_dnz[*jptr] += 1;
537           } else {
538             my_onz[*jptr] += 1;
539           }
540         }
541       }
542     }
543     ierr = MatRestoreRowIJ(matis->A,0,PETSC_FALSE,PETSC_FALSE,&local_rows,&ii,&jj,&done);CHKERRQ(ierr);
544     if (!done) SETERRQ1(PetscObjectComm((PetscObject)(matis->A)),PETSC_ERR_PLIB,"Error in MatRestoreRowIJ called from %s\n",__FUNCT__);
545   } else { /* loop over rows and use MatGetRow */
546     for (i=0;i<local_rows;i++) {
547       const PetscInt *cols;
548       PetscInt       ncols,index_row = global_indices_r[i];
549       ierr = MatGetRow(matis->A,i,&ncols,&cols,NULL);CHKERRQ(ierr);
550       for (j=0;j<ncols;j++) {
551         PetscInt owner = row_ownership[index_row];
552         PetscInt index_col = global_indices_c[cols[j]];
553         if (index_col > mat_ranges[owner]-1 && index_col < mat_ranges[owner+1] ) { /* diag block */
554           my_dnz[i] += 1;
555         } else { /* offdiag block */
556           my_onz[i] += 1;
557         }
558         /* same as before, interchanging rows and cols */
559         if (issbaij && index_col != index_row) {
560           owner = row_ownership[index_col];
561           if (index_row > mat_ranges[owner]-1 && index_row < mat_ranges[owner+1] ) {
562             my_dnz[cols[j]] += 1;
563           } else {
564             my_onz[cols[j]] += 1;
565           }
566         }
567       }
568       ierr = MatRestoreRow(matis->A,i,&ncols,&cols,NULL);CHKERRQ(ierr);
569     }
570   }
571   ierr = ISLocalToGlobalMappingRestoreIndices(A->rmap->mapping,&global_indices_r);CHKERRQ(ierr);
572   if (global_indices_c != global_indices_r) {
573     ierr = ISLocalToGlobalMappingRestoreIndices(A->cmap->mapping,&global_indices_c);CHKERRQ(ierr);
574   }
575   ierr = PetscFree(row_ownership);CHKERRQ(ierr);
576 
577   /* Reduce my_dnz and my_onz */
578   if (maxreduce) {
579     ierr = PetscSFReduceBegin(matis->sf,MPIU_INT,my_dnz,dnz,MPI_MAX);CHKERRQ(ierr);
580     ierr = PetscSFReduceBegin(matis->sf,MPIU_INT,my_onz,onz,MPI_MAX);CHKERRQ(ierr);
581     ierr = PetscSFReduceEnd(matis->sf,MPIU_INT,my_dnz,dnz,MPI_MAX);CHKERRQ(ierr);
582     ierr = PetscSFReduceEnd(matis->sf,MPIU_INT,my_onz,onz,MPI_MAX);CHKERRQ(ierr);
583   } else {
584     ierr = PetscSFReduceBegin(matis->sf,MPIU_INT,my_dnz,dnz,MPI_SUM);CHKERRQ(ierr);
585     ierr = PetscSFReduceBegin(matis->sf,MPIU_INT,my_onz,onz,MPI_SUM);CHKERRQ(ierr);
586     ierr = PetscSFReduceEnd(matis->sf,MPIU_INT,my_dnz,dnz,MPI_SUM);CHKERRQ(ierr);
587     ierr = PetscSFReduceEnd(matis->sf,MPIU_INT,my_onz,onz,MPI_SUM);CHKERRQ(ierr);
588   }
589   ierr = PetscFree2(my_dnz,my_onz);CHKERRQ(ierr);
590 
591   /* Resize preallocation if overestimated */
592   for (i=0;i<lrows;i++) {
593     dnz[i] = PetscMin(dnz[i],lcols);
594     onz[i] = PetscMin(onz[i],cols-lcols);
595   }
596   /* set preallocation */
597   ierr = MatMPIAIJSetPreallocation(B,0,dnz,0,onz);CHKERRQ(ierr);
598   for (i=0;i<lrows/bs;i++) {
599     dnz[i] = dnz[i*bs]/bs;
600     onz[i] = onz[i*bs]/bs;
601   }
602   ierr = MatMPIBAIJSetPreallocation(B,bs,0,dnz,0,onz);CHKERRQ(ierr);
603   ierr = MatMPISBAIJSetPreallocation(B,bs,0,dnz,0,onz);CHKERRQ(ierr);
604   ierr = MatPreallocateFinalize(dnz,onz);CHKERRQ(ierr);
605   if (issbaij) {
606     ierr = MatRestoreRowUpperTriangular(matis->A);CHKERRQ(ierr);
607   }
608   PetscFunctionReturn(0);
609 }
610 
611 #undef __FUNCT__
612 #define __FUNCT__ "MatISGetMPIXAIJ_IS"
613 static PetscErrorCode MatISGetMPIXAIJ_IS(Mat mat, MatReuse reuse, Mat *M)
614 {
615   Mat_IS         *matis = (Mat_IS*)(mat->data);
616   Mat            local_mat;
617   /* info on mat */
618   PetscInt       bs,rows,cols,lrows,lcols;
619   PetscInt       local_rows,local_cols;
620   PetscBool      isdense,issbaij,isseqaij;
621   PetscMPIInt    nsubdomains;
622   /* values insertion */
623   PetscScalar    *array;
624   /* work */
625   PetscErrorCode ierr;
626 
627   PetscFunctionBegin;
628   /* get info from mat */
629   ierr = MPI_Comm_size(PetscObjectComm((PetscObject)mat),&nsubdomains);CHKERRQ(ierr);
630   if (nsubdomains == 1) {
631     if (reuse == MAT_INITIAL_MATRIX) {
632       ierr = MatDuplicate(matis->A,MAT_COPY_VALUES,&(*M));CHKERRQ(ierr);
633     } else {
634       ierr = MatCopy(matis->A,*M,SAME_NONZERO_PATTERN);CHKERRQ(ierr);
635     }
636     PetscFunctionReturn(0);
637   }
638   ierr = MatGetSize(mat,&rows,&cols);CHKERRQ(ierr);
639   ierr = MatGetBlockSize(mat,&bs);CHKERRQ(ierr);
640   ierr = MatGetLocalSize(mat,&lrows,&lcols);CHKERRQ(ierr);
641   ierr = MatGetSize(matis->A,&local_rows,&local_cols);CHKERRQ(ierr);
642   ierr = PetscObjectTypeCompare((PetscObject)matis->A,MATSEQDENSE,&isdense);CHKERRQ(ierr);
643   ierr = PetscObjectTypeCompare((PetscObject)matis->A,MATSEQAIJ,&isseqaij);CHKERRQ(ierr);
644   ierr = PetscObjectTypeCompare((PetscObject)matis->A,MATSEQSBAIJ,&issbaij);CHKERRQ(ierr);
645 
646   if (reuse == MAT_INITIAL_MATRIX) {
647     MatType     new_mat_type;
648     PetscBool   issbaij_red;
649 
650     /* determining new matrix type */
651     ierr = MPIU_Allreduce(&issbaij,&issbaij_red,1,MPIU_BOOL,MPI_LAND,PetscObjectComm((PetscObject)mat));CHKERRQ(ierr);
652     if (issbaij_red) {
653       new_mat_type = MATSBAIJ;
654     } else {
655       if (bs>1) {
656         new_mat_type = MATBAIJ;
657       } else {
658         new_mat_type = MATAIJ;
659       }
660     }
661 
662     ierr = MatCreate(PetscObjectComm((PetscObject)mat),M);CHKERRQ(ierr);
663     ierr = MatSetSizes(*M,lrows,lcols,rows,cols);CHKERRQ(ierr);
664     ierr = MatSetBlockSize(*M,bs);CHKERRQ(ierr);
665     ierr = MatSetType(*M,new_mat_type);CHKERRQ(ierr);
666     ierr = MatISSetMPIXAIJPreallocation_Private(mat,*M,PETSC_FALSE);CHKERRQ(ierr);
667   } else {
668     PetscInt mbs,mrows,mcols,mlrows,mlcols;
669     /* some checks */
670     ierr = MatGetBlockSize(*M,&mbs);CHKERRQ(ierr);
671     ierr = MatGetSize(*M,&mrows,&mcols);CHKERRQ(ierr);
672     ierr = MatGetLocalSize(*M,&mlrows,&mlcols);CHKERRQ(ierr);
673     if (mrows != rows) SETERRQ2(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Cannot reuse matrix. Wrong number of rows (%d != %d)",rows,mrows);
674     if (mcols != cols) SETERRQ2(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Cannot reuse matrix. Wrong number of cols (%d != %d)",cols,mcols);
675     if (mlrows != lrows) SETERRQ2(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Cannot reuse matrix. Wrong number of local rows (%d != %d)",lrows,mlrows);
676     if (mlcols != lcols) SETERRQ2(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Cannot reuse matrix. Wrong number of local cols (%d != %d)",lcols,mlcols);
677     if (mbs != bs) SETERRQ2(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Cannot reuse matrix. Wrong block size (%d != %d)",bs,mbs);
678     ierr = MatZeroEntries(*M);CHKERRQ(ierr);
679   }
680 
681   if (issbaij) {
682     ierr = MatConvert(matis->A,MATSEQBAIJ,MAT_INITIAL_MATRIX,&local_mat);CHKERRQ(ierr);
683   } else {
684     ierr = PetscObjectReference((PetscObject)matis->A);CHKERRQ(ierr);
685     local_mat = matis->A;
686   }
687 
688   /* Set values */
689   ierr = MatSetLocalToGlobalMapping(*M,mat->rmap->mapping,mat->cmap->mapping);CHKERRQ(ierr);
690   if (isdense) { /* special case for dense local matrices */
691     PetscInt i,*dummy_rows,*dummy_cols;
692 
693     if (local_rows != local_cols) {
694       ierr = PetscMalloc2(local_rows,&dummy_rows,local_cols,&dummy_cols);CHKERRQ(ierr);
695       for (i=0;i<local_rows;i++) dummy_rows[i] = i;
696       for (i=0;i<local_cols;i++) dummy_cols[i] = i;
697     } else {
698       ierr = PetscMalloc1(local_rows,&dummy_rows);CHKERRQ(ierr);
699       for (i=0;i<local_rows;i++) dummy_rows[i] = i;
700       dummy_cols = dummy_rows;
701     }
702     ierr = MatSetOption(*M,MAT_ROW_ORIENTED,PETSC_FALSE);CHKERRQ(ierr);
703     ierr = MatDenseGetArray(local_mat,&array);CHKERRQ(ierr);
704     ierr = MatSetValuesLocal(*M,local_rows,dummy_rows,local_cols,dummy_cols,array,ADD_VALUES);CHKERRQ(ierr);
705     ierr = MatDenseRestoreArray(local_mat,&array);CHKERRQ(ierr);
706     if (dummy_rows != dummy_cols) {
707       ierr = PetscFree2(dummy_rows,dummy_cols);CHKERRQ(ierr);
708     } else {
709       ierr = PetscFree(dummy_rows);CHKERRQ(ierr);
710     }
711   } else if (isseqaij) {
712     PetscInt  i,nvtxs,*xadj,*adjncy;
713     PetscBool done;
714 
715     ierr = MatGetRowIJ(local_mat,0,PETSC_FALSE,PETSC_FALSE,&nvtxs,(const PetscInt**)&xadj,(const PetscInt**)&adjncy,&done);CHKERRQ(ierr);
716     if (!done) SETERRQ1(PetscObjectComm((PetscObject)local_mat),PETSC_ERR_PLIB,"Error in MatGetRowIJ called from %s\n",__FUNCT__);
717     ierr = MatSeqAIJGetArray(local_mat,&array);CHKERRQ(ierr);
718     for (i=0;i<nvtxs;i++) {
719       ierr = MatSetValuesLocal(*M,1,&i,xadj[i+1]-xadj[i],adjncy+xadj[i],array+xadj[i],ADD_VALUES);CHKERRQ(ierr);
720     }
721     ierr = MatRestoreRowIJ(local_mat,0,PETSC_FALSE,PETSC_FALSE,&nvtxs,(const PetscInt**)&xadj,(const PetscInt**)&adjncy,&done);CHKERRQ(ierr);
722     if (!done) SETERRQ1(PetscObjectComm((PetscObject)local_mat),PETSC_ERR_PLIB,"Error in MatRestoreRowIJ called from %s\n",__FUNCT__);
723     ierr = MatSeqAIJRestoreArray(local_mat,&array);CHKERRQ(ierr);
724   } else { /* very basic values insertion for all other matrix types */
725     PetscInt i;
726 
727     for (i=0;i<local_rows;i++) {
728       PetscInt       j;
729       const PetscInt *local_indices_cols;
730 
731       ierr = MatGetRow(local_mat,i,&j,&local_indices_cols,(const PetscScalar**)&array);CHKERRQ(ierr);
732       ierr = MatSetValuesLocal(*M,1,&i,j,local_indices_cols,array,ADD_VALUES);CHKERRQ(ierr);
733       ierr = MatRestoreRow(local_mat,i,&j,&local_indices_cols,(const PetscScalar**)&array);CHKERRQ(ierr);
734     }
735   }
736   ierr = MatAssemblyBegin(*M,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
737   ierr = MatDestroy(&local_mat);CHKERRQ(ierr);
738   ierr = MatAssemblyEnd(*M,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
739   if (isdense) {
740     ierr = MatSetOption(*M,MAT_ROW_ORIENTED,PETSC_TRUE);CHKERRQ(ierr);
741   }
742   PetscFunctionReturn(0);
743 }
744 
745 #undef __FUNCT__
746 #define __FUNCT__ "MatISGetMPIXAIJ"
747 /*@
748     MatISGetMPIXAIJ - Converts MATIS matrix into a parallel AIJ format
749 
750   Input Parameter:
751 .  mat - the matrix (should be of type MATIS)
752 .  reuse - either MAT_INITIAL_MATRIX or MAT_REUSE_MATRIX
753 
754   Output Parameter:
755 .  newmat - the matrix in AIJ format
756 
757   Level: developer
758 
759   Notes: mat and *newmat cannot be the same object when MAT_REUSE_MATRIX is requested.
760 
761 .seealso: MATIS
762 @*/
763 PetscErrorCode MatISGetMPIXAIJ(Mat mat, MatReuse reuse, Mat *newmat)
764 {
765   PetscErrorCode ierr;
766 
767   PetscFunctionBegin;
768   PetscValidHeaderSpecific(mat,MAT_CLASSID,1);
769   PetscValidLogicalCollectiveEnum(mat,reuse,2);
770   PetscValidPointer(newmat,3);
771   if (reuse != MAT_INITIAL_MATRIX) {
772     PetscValidHeaderSpecific(*newmat,MAT_CLASSID,3);
773     PetscCheckSameComm(mat,1,*newmat,3);
774     if (mat == *newmat) SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Cannot reuse the same matrix");
775   }
776   ierr = PetscUseMethod(mat,"MatISGetMPIXAIJ_C",(Mat,MatReuse,Mat*),(mat,reuse,newmat));CHKERRQ(ierr);
777   PetscFunctionReturn(0);
778 }
779 
780 #undef __FUNCT__
781 #define __FUNCT__ "MatDuplicate_IS"
782 PetscErrorCode MatDuplicate_IS(Mat mat,MatDuplicateOption op,Mat *newmat)
783 {
784   PetscErrorCode ierr;
785   Mat_IS         *matis = (Mat_IS*)(mat->data);
786   PetscInt       bs,m,n,M,N;
787   Mat            B,localmat;
788 
789   PetscFunctionBegin;
790   ierr = MatGetBlockSize(mat,&bs);CHKERRQ(ierr);
791   ierr = MatGetSize(mat,&M,&N);CHKERRQ(ierr);
792   ierr = MatGetLocalSize(mat,&m,&n);CHKERRQ(ierr);
793   ierr = MatCreateIS(PetscObjectComm((PetscObject)mat),bs,m,n,M,N,mat->rmap->mapping,mat->cmap->mapping,&B);CHKERRQ(ierr);
794   ierr = MatDuplicate(matis->A,op,&localmat);CHKERRQ(ierr);
795   ierr = MatISSetLocalMat(B,localmat);CHKERRQ(ierr);
796   ierr = MatDestroy(&localmat);CHKERRQ(ierr);
797   ierr = MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
798   ierr = MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
799   *newmat = B;
800   PetscFunctionReturn(0);
801 }
802 
803 #undef __FUNCT__
804 #define __FUNCT__ "MatIsHermitian_IS"
805 static PetscErrorCode MatIsHermitian_IS(Mat A,PetscReal tol,PetscBool  *flg)
806 {
807   PetscErrorCode ierr;
808   Mat_IS         *matis = (Mat_IS*)A->data;
809   PetscBool      local_sym;
810 
811   PetscFunctionBegin;
812   ierr = MatIsHermitian(matis->A,tol,&local_sym);CHKERRQ(ierr);
813   ierr = MPIU_Allreduce(&local_sym,flg,1,MPIU_BOOL,MPI_LAND,PetscObjectComm((PetscObject)A));CHKERRQ(ierr);
814   PetscFunctionReturn(0);
815 }
816 
817 #undef __FUNCT__
818 #define __FUNCT__ "MatIsSymmetric_IS"
819 static PetscErrorCode MatIsSymmetric_IS(Mat A,PetscReal tol,PetscBool  *flg)
820 {
821   PetscErrorCode ierr;
822   Mat_IS         *matis = (Mat_IS*)A->data;
823   PetscBool      local_sym;
824 
825   PetscFunctionBegin;
826   ierr = MatIsSymmetric(matis->A,tol,&local_sym);CHKERRQ(ierr);
827   ierr = MPIU_Allreduce(&local_sym,flg,1,MPIU_BOOL,MPI_LAND,PetscObjectComm((PetscObject)A));CHKERRQ(ierr);
828   PetscFunctionReturn(0);
829 }
830 
831 #undef __FUNCT__
832 #define __FUNCT__ "MatDestroy_IS"
833 static PetscErrorCode MatDestroy_IS(Mat A)
834 {
835   PetscErrorCode ierr;
836   Mat_IS         *b = (Mat_IS*)A->data;
837 
838   PetscFunctionBegin;
839   ierr = MatDestroy(&b->A);CHKERRQ(ierr);
840   ierr = VecScatterDestroy(&b->cctx);CHKERRQ(ierr);
841   ierr = VecScatterDestroy(&b->rctx);CHKERRQ(ierr);
842   ierr = VecDestroy(&b->x);CHKERRQ(ierr);
843   ierr = VecDestroy(&b->y);CHKERRQ(ierr);
844   ierr = ISDestroy(&b->getsub_ris);CHKERRQ(ierr);
845   ierr = ISDestroy(&b->getsub_cis);CHKERRQ(ierr);
846   if (b->sf != b->csf) {
847     ierr = PetscSFDestroy(&b->csf);CHKERRQ(ierr);
848     ierr = PetscFree2(b->csf_rootdata,b->csf_leafdata);CHKERRQ(ierr);
849   }
850   ierr = PetscSFDestroy(&b->sf);CHKERRQ(ierr);
851   ierr = PetscFree2(b->sf_rootdata,b->sf_leafdata);CHKERRQ(ierr);
852   ierr = PetscFree(A->data);CHKERRQ(ierr);
853   ierr = PetscObjectChangeTypeName((PetscObject)A,0);CHKERRQ(ierr);
854   ierr = PetscObjectComposeFunction((PetscObject)A,"MatISGetLocalMat_C",NULL);CHKERRQ(ierr);
855   ierr = PetscObjectComposeFunction((PetscObject)A,"MatISSetLocalMat_C",NULL);CHKERRQ(ierr);
856   ierr = PetscObjectComposeFunction((PetscObject)A,"MatISGetMPIXAIJ_C",NULL);CHKERRQ(ierr);
857   ierr = PetscObjectComposeFunction((PetscObject)A,"MatISSetPreallocation_C",NULL);CHKERRQ(ierr);
858   PetscFunctionReturn(0);
859 }
860 
861 #undef __FUNCT__
862 #define __FUNCT__ "MatMult_IS"
863 static PetscErrorCode MatMult_IS(Mat A,Vec x,Vec y)
864 {
865   PetscErrorCode ierr;
866   Mat_IS         *is  = (Mat_IS*)A->data;
867   PetscScalar    zero = 0.0;
868 
869   PetscFunctionBegin;
870   /*  scatter the global vector x into the local work vector */
871   ierr = VecScatterBegin(is->cctx,x,is->x,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
872   ierr = VecScatterEnd(is->cctx,x,is->x,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
873 
874   /* multiply the local matrix */
875   ierr = MatMult(is->A,is->x,is->y);CHKERRQ(ierr);
876 
877   /* scatter product back into global memory */
878   ierr = VecSet(y,zero);CHKERRQ(ierr);
879   ierr = VecScatterBegin(is->rctx,is->y,y,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
880   ierr = VecScatterEnd(is->rctx,is->y,y,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
881   PetscFunctionReturn(0);
882 }
883 
884 #undef __FUNCT__
885 #define __FUNCT__ "MatMultAdd_IS"
886 static PetscErrorCode MatMultAdd_IS(Mat A,Vec v1,Vec v2,Vec v3)
887 {
888   Vec            temp_vec;
889   PetscErrorCode ierr;
890 
891   PetscFunctionBegin; /*  v3 = v2 + A * v1.*/
892   if (v3 != v2) {
893     ierr = MatMult(A,v1,v3);CHKERRQ(ierr);
894     ierr = VecAXPY(v3,1.0,v2);CHKERRQ(ierr);
895   } else {
896     ierr = VecDuplicate(v2,&temp_vec);CHKERRQ(ierr);
897     ierr = MatMult(A,v1,temp_vec);CHKERRQ(ierr);
898     ierr = VecAXPY(temp_vec,1.0,v2);CHKERRQ(ierr);
899     ierr = VecCopy(temp_vec,v3);CHKERRQ(ierr);
900     ierr = VecDestroy(&temp_vec);CHKERRQ(ierr);
901   }
902   PetscFunctionReturn(0);
903 }
904 
905 #undef __FUNCT__
906 #define __FUNCT__ "MatMultTranspose_IS"
907 static PetscErrorCode MatMultTranspose_IS(Mat A,Vec y,Vec x)
908 {
909   Mat_IS         *is = (Mat_IS*)A->data;
910   PetscErrorCode ierr;
911 
912   PetscFunctionBegin;
913   /*  scatter the global vector x into the local work vector */
914   ierr = VecScatterBegin(is->rctx,y,is->y,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
915   ierr = VecScatterEnd(is->rctx,y,is->y,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
916 
917   /* multiply the local matrix */
918   ierr = MatMultTranspose(is->A,is->y,is->x);CHKERRQ(ierr);
919 
920   /* scatter product back into global vector */
921   ierr = VecSet(x,0);CHKERRQ(ierr);
922   ierr = VecScatterBegin(is->cctx,is->x,x,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
923   ierr = VecScatterEnd(is->cctx,is->x,x,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
924   PetscFunctionReturn(0);
925 }
926 
927 #undef __FUNCT__
928 #define __FUNCT__ "MatMultTransposeAdd_IS"
929 static PetscErrorCode MatMultTransposeAdd_IS(Mat A,Vec v1,Vec v2,Vec v3)
930 {
931   Vec            temp_vec;
932   PetscErrorCode ierr;
933 
934   PetscFunctionBegin; /*  v3 = v2 + A' * v1.*/
935   if (v3 != v2) {
936     ierr = MatMultTranspose(A,v1,v3);CHKERRQ(ierr);
937     ierr = VecAXPY(v3,1.0,v2);CHKERRQ(ierr);
938   } else {
939     ierr = VecDuplicate(v2,&temp_vec);CHKERRQ(ierr);
940     ierr = MatMultTranspose(A,v1,temp_vec);CHKERRQ(ierr);
941     ierr = VecAXPY(temp_vec,1.0,v2);CHKERRQ(ierr);
942     ierr = VecCopy(temp_vec,v3);CHKERRQ(ierr);
943     ierr = VecDestroy(&temp_vec);CHKERRQ(ierr);
944   }
945   PetscFunctionReturn(0);
946 }
947 
948 #undef __FUNCT__
949 #define __FUNCT__ "MatView_IS"
950 static PetscErrorCode MatView_IS(Mat A,PetscViewer viewer)
951 {
952   Mat_IS         *a = (Mat_IS*)A->data;
953   PetscErrorCode ierr;
954   PetscViewer    sviewer;
955 
956   PetscFunctionBegin;
957   ierr = PetscViewerGetSubViewer(viewer,PETSC_COMM_SELF,&sviewer);CHKERRQ(ierr);
958   ierr = MatView(a->A,sviewer);CHKERRQ(ierr);
959   ierr = PetscViewerRestoreSubViewer(viewer,PETSC_COMM_SELF,&sviewer);CHKERRQ(ierr);
960   ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
961   PetscFunctionReturn(0);
962 }
963 
964 #undef __FUNCT__
965 #define __FUNCT__ "MatSetLocalToGlobalMapping_IS"
966 static PetscErrorCode MatSetLocalToGlobalMapping_IS(Mat A,ISLocalToGlobalMapping rmapping,ISLocalToGlobalMapping cmapping)
967 {
968   PetscErrorCode ierr;
969   PetscInt       nr,rbs,nc,cbs;
970   Mat_IS         *is = (Mat_IS*)A->data;
971   IS             from,to;
972   Vec            cglobal,rglobal;
973 
974   PetscFunctionBegin;
975   PetscCheckSameComm(A,1,rmapping,2);
976   PetscCheckSameComm(A,1,cmapping,3);
977   /* Destroy any previous data */
978   ierr = VecDestroy(&is->x);CHKERRQ(ierr);
979   ierr = VecDestroy(&is->y);CHKERRQ(ierr);
980   ierr = VecScatterDestroy(&is->rctx);CHKERRQ(ierr);
981   ierr = VecScatterDestroy(&is->cctx);CHKERRQ(ierr);
982   ierr = MatDestroy(&is->A);CHKERRQ(ierr);
983   ierr = PetscSFDestroy(&is->sf);CHKERRQ(ierr);
984   ierr = PetscFree2(is->sf_rootdata,is->sf_leafdata);CHKERRQ(ierr);
985 
986   /* Setup Layout and set local to global maps */
987   ierr = PetscLayoutSetUp(A->rmap);CHKERRQ(ierr);
988   ierr = PetscLayoutSetUp(A->cmap);CHKERRQ(ierr);
989   ierr = PetscLayoutSetISLocalToGlobalMapping(A->rmap,rmapping);CHKERRQ(ierr);
990   ierr = PetscLayoutSetISLocalToGlobalMapping(A->cmap,cmapping);CHKERRQ(ierr);
991 
992   /* Create the local matrix A */
993   ierr = ISLocalToGlobalMappingGetSize(rmapping,&nr);CHKERRQ(ierr);
994   ierr = ISLocalToGlobalMappingGetBlockSize(rmapping,&rbs);CHKERRQ(ierr);
995   ierr = ISLocalToGlobalMappingGetSize(cmapping,&nc);CHKERRQ(ierr);
996   ierr = ISLocalToGlobalMappingGetBlockSize(cmapping,&cbs);CHKERRQ(ierr);
997   ierr = MatCreate(PETSC_COMM_SELF,&is->A);CHKERRQ(ierr);
998   ierr = MatSetType(is->A,MATAIJ);CHKERRQ(ierr);
999   ierr = MatSetSizes(is->A,nr,nc,nr,nc);CHKERRQ(ierr);
1000   ierr = MatSetBlockSizes(is->A,rbs,cbs);CHKERRQ(ierr);
1001   ierr = MatSetOptionsPrefix(is->A,((PetscObject)A)->prefix);CHKERRQ(ierr);
1002   ierr = MatAppendOptionsPrefix(is->A,"is_");CHKERRQ(ierr);
1003   ierr = MatSetFromOptions(is->A);CHKERRQ(ierr);
1004 
1005   if (!is->islocalref) { /* setup scatters and local vectors for MatMult */
1006     /* Create the local work vectors */
1007     ierr = MatCreateVecs(is->A,&is->x,&is->y);CHKERRQ(ierr);
1008 
1009     /* setup the global to local scatters */
1010     ierr = MatCreateVecs(A,&cglobal,&rglobal);CHKERRQ(ierr);
1011     ierr = ISCreateStride(PETSC_COMM_SELF,nr,0,1,&to);CHKERRQ(ierr);
1012     ierr = ISLocalToGlobalMappingApplyIS(rmapping,to,&from);CHKERRQ(ierr);
1013     ierr = VecScatterCreate(rglobal,from,is->y,to,&is->rctx);CHKERRQ(ierr);
1014     if (rmapping != cmapping) {
1015       ierr = ISDestroy(&to);CHKERRQ(ierr);
1016       ierr = ISDestroy(&from);CHKERRQ(ierr);
1017       ierr = ISCreateStride(PETSC_COMM_SELF,nc,0,1,&to);CHKERRQ(ierr);
1018       ierr = ISLocalToGlobalMappingApplyIS(cmapping,to,&from);CHKERRQ(ierr);
1019       ierr = VecScatterCreate(cglobal,from,is->x,to,&is->cctx);CHKERRQ(ierr);
1020     } else {
1021       ierr = PetscObjectReference((PetscObject)is->rctx);CHKERRQ(ierr);
1022       is->cctx = is->rctx;
1023     }
1024     ierr = VecDestroy(&rglobal);CHKERRQ(ierr);
1025     ierr = VecDestroy(&cglobal);CHKERRQ(ierr);
1026     ierr = ISDestroy(&to);CHKERRQ(ierr);
1027     ierr = ISDestroy(&from);CHKERRQ(ierr);
1028   }
1029   ierr = MatSetUp(A);CHKERRQ(ierr);
1030   PetscFunctionReturn(0);
1031 }
1032 
1033 #undef __FUNCT__
1034 #define __FUNCT__ "MatSetValues_IS"
1035 static PetscErrorCode MatSetValues_IS(Mat mat, PetscInt m,const PetscInt *rows, PetscInt n,const PetscInt *cols, const PetscScalar *values, InsertMode addv)
1036 {
1037   Mat_IS         *is = (Mat_IS*)mat->data;
1038   PetscErrorCode ierr;
1039 #if defined(PETSC_USE_DEBUG)
1040   PetscInt       i,zm,zn;
1041 #endif
1042   PetscInt       rows_l[MATIS_MAX_ENTRIES_INSERTION],cols_l[MATIS_MAX_ENTRIES_INSERTION];
1043 
1044   PetscFunctionBegin;
1045 #if defined(PETSC_USE_DEBUG)
1046   if (m > MATIS_MAX_ENTRIES_INSERTION || n > MATIS_MAX_ENTRIES_INSERTION) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_SUP,"Number of row/column indices must be <= %D: they are %D %D",MATIS_MAX_ENTRIES_INSERTION,m,n);
1047   /* count negative indices */
1048   for (i=0,zm=0;i<m;i++) if (rows[i] < 0) zm++;
1049   for (i=0,zn=0;i<n;i++) if (cols[i] < 0) zn++;
1050 #endif
1051   ierr = ISGlobalToLocalMappingApply(mat->rmap->mapping,IS_GTOLM_MASK,m,rows,&m,rows_l);CHKERRQ(ierr);
1052   ierr = ISGlobalToLocalMappingApply(mat->cmap->mapping,IS_GTOLM_MASK,n,cols,&n,cols_l);CHKERRQ(ierr);
1053 #if defined(PETSC_USE_DEBUG)
1054   /* count negative indices (should be the same as before) */
1055   for (i=0;i<m;i++) if (rows_l[i] < 0) zm--;
1056   for (i=0;i<n;i++) if (cols_l[i] < 0) zn--;
1057   if (zm) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Some of the row indices can not be mapped! Maybe you should not use MATIS");
1058   if (zn) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Some of the column indices can not be mapped! Maybe you should not use MATIS");
1059 #endif
1060   ierr = MatSetValues(is->A,m,rows_l,n,cols_l,values,addv);CHKERRQ(ierr);
1061   PetscFunctionReturn(0);
1062 }
1063 
1064 #undef __FUNCT__
1065 #define __FUNCT__ "MatSetValuesBlocked_IS"
1066 static PetscErrorCode MatSetValuesBlocked_IS(Mat mat, PetscInt m,const PetscInt *rows, PetscInt n,const PetscInt *cols, const PetscScalar *values, InsertMode addv)
1067 {
1068   Mat_IS         *is = (Mat_IS*)mat->data;
1069   PetscErrorCode ierr;
1070 #if defined(PETSC_USE_DEBUG)
1071   PetscInt       i,zm,zn;
1072 #endif
1073   PetscInt       rows_l[MATIS_MAX_ENTRIES_INSERTION],cols_l[MATIS_MAX_ENTRIES_INSERTION];
1074 
1075   PetscFunctionBegin;
1076 #if defined(PETSC_USE_DEBUG)
1077   if (m > MATIS_MAX_ENTRIES_INSERTION || n > MATIS_MAX_ENTRIES_INSERTION) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_SUP,"Number of row/column block indices must be <= %D: they are %D %D",MATIS_MAX_ENTRIES_INSERTION,m,n);
1078   /* count negative indices */
1079   for (i=0,zm=0;i<m;i++) if (rows[i] < 0) zm++;
1080   for (i=0,zn=0;i<n;i++) if (cols[i] < 0) zn++;
1081 #endif
1082   ierr = ISGlobalToLocalMappingApplyBlock(mat->rmap->mapping,IS_GTOLM_MASK,m,rows,&m,rows_l);CHKERRQ(ierr);
1083   ierr = ISGlobalToLocalMappingApplyBlock(mat->cmap->mapping,IS_GTOLM_MASK,n,cols,&n,cols_l);CHKERRQ(ierr);
1084 #if defined(PETSC_USE_DEBUG)
1085   /* count negative indices (should be the same as before) */
1086   for (i=0;i<m;i++) if (rows_l[i] < 0) zm--;
1087   for (i=0;i<n;i++) if (cols_l[i] < 0) zn--;
1088   if (zm) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Some of the row indices can not be mapped! Maybe you should not use MATIS");
1089   if (zn) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Some of the column indices can not be mapped! Maybe you should not use MATIS");
1090 #endif
1091   ierr = MatSetValuesBlocked(is->A,m,rows_l,n,cols_l,values,addv);CHKERRQ(ierr);
1092   PetscFunctionReturn(0);
1093 }
1094 
1095 #undef __FUNCT__
1096 #define __FUNCT__ "MatSetValuesLocal_IS"
1097 static PetscErrorCode MatSetValuesLocal_IS(Mat A,PetscInt m,const PetscInt *rows, PetscInt n,const PetscInt *cols,const PetscScalar *values,InsertMode addv)
1098 {
1099   PetscErrorCode ierr;
1100   Mat_IS         *is = (Mat_IS*)A->data;
1101 
1102   PetscFunctionBegin;
1103   ierr = MatSetValues(is->A,m,rows,n,cols,values,addv);CHKERRQ(ierr);
1104   PetscFunctionReturn(0);
1105 }
1106 
1107 #undef __FUNCT__
1108 #define __FUNCT__ "MatSetValuesBlockedLocal_IS"
1109 static PetscErrorCode MatSetValuesBlockedLocal_IS(Mat A,PetscInt m,const PetscInt *rows, PetscInt n,const PetscInt *cols,const PetscScalar *values,InsertMode addv)
1110 {
1111   PetscErrorCode ierr;
1112   Mat_IS         *is = (Mat_IS*)A->data;
1113 
1114   PetscFunctionBegin;
1115   ierr = MatSetValuesBlocked(is->A,m,rows,n,cols,values,addv);CHKERRQ(ierr);
1116   PetscFunctionReturn(0);
1117 }
1118 
1119 #undef __FUNCT__
1120 #define __FUNCT__ "MatZeroRows_IS"
1121 static PetscErrorCode MatZeroRows_IS(Mat A,PetscInt n,const PetscInt rows[],PetscScalar diag,Vec x,Vec b)
1122 {
1123   Mat_IS         *matis = (Mat_IS*)A->data;
1124   PetscInt       nr,nl,len,i;
1125   PetscInt       *lrows;
1126   PetscErrorCode ierr;
1127 
1128   PetscFunctionBegin;
1129   /* get locally owned rows */
1130   ierr = MatZeroRowsMapLocal_Private(A,n,rows,&len,&lrows);CHKERRQ(ierr);
1131   /* fix right hand side if needed */
1132   if (x && b) {
1133     const PetscScalar *xx;
1134     PetscScalar       *bb;
1135 
1136     ierr = VecGetArrayRead(x, &xx);CHKERRQ(ierr);
1137     ierr = VecGetArray(b, &bb);CHKERRQ(ierr);
1138     for (i=0;i<len;++i) bb[lrows[i]] = diag*xx[lrows[i]];
1139     ierr = VecRestoreArrayRead(x, &xx);CHKERRQ(ierr);
1140     ierr = VecRestoreArray(b, &bb);CHKERRQ(ierr);
1141   }
1142   /* get rows associated to the local matrices */
1143   if (!matis->sf) { /* setup SF if not yet created and allocate rootdata and leafdata */
1144     ierr = MatISComputeSF_Private(A);CHKERRQ(ierr);
1145   }
1146   ierr = MatGetSize(matis->A,&nl,NULL);CHKERRQ(ierr);
1147   ierr = PetscMemzero(matis->sf_leafdata,nl*sizeof(PetscInt));CHKERRQ(ierr);
1148   ierr = PetscMemzero(matis->sf_rootdata,A->rmap->n*sizeof(PetscInt));CHKERRQ(ierr);
1149   for (i=0;i<len;i++) matis->sf_rootdata[lrows[i]] = 1;
1150   ierr = PetscFree(lrows);CHKERRQ(ierr);
1151   ierr = PetscSFBcastBegin(matis->sf,MPIU_INT,matis->sf_rootdata,matis->sf_leafdata);CHKERRQ(ierr);
1152   ierr = PetscSFBcastEnd(matis->sf,MPIU_INT,matis->sf_rootdata,matis->sf_leafdata);CHKERRQ(ierr);
1153   ierr = PetscMalloc1(nl,&lrows);CHKERRQ(ierr);
1154   for (i=0,nr=0;i<nl;i++) if (matis->sf_leafdata[i]) lrows[nr++] = i;
1155   ierr = MatISZeroRowsLocal_Private(A,nr,lrows,diag);CHKERRQ(ierr);
1156   ierr = PetscFree(lrows);CHKERRQ(ierr);
1157   PetscFunctionReturn(0);
1158 }
1159 
1160 #undef __FUNCT__
1161 #define __FUNCT__ "MatISZeroRowsLocal_Private"
1162 static PetscErrorCode MatISZeroRowsLocal_Private(Mat A,PetscInt n,const PetscInt rows[],PetscScalar diag)
1163 {
1164   Mat_IS         *is = (Mat_IS*)A->data;
1165   PetscErrorCode ierr;
1166 
1167   PetscFunctionBegin;
1168   if (diag != 0.) {
1169     /*
1170        Set up is->x as a "counting vector". This is in order to MatMult_IS
1171        work properly in the interface nodes.
1172     */
1173     Vec counter;
1174     ierr = MatCreateVecs(A,NULL,&counter);CHKERRQ(ierr);
1175     ierr = VecSet(counter,0.);CHKERRQ(ierr);
1176     ierr = VecSet(is->y,1.);CHKERRQ(ierr);
1177     ierr = VecScatterBegin(is->rctx,is->y,counter,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
1178     ierr = VecScatterEnd(is->rctx,is->y,counter,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
1179     ierr = VecScatterBegin(is->rctx,counter,is->y,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
1180     ierr = VecScatterEnd(is->rctx,counter,is->y,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
1181     ierr = VecDestroy(&counter);CHKERRQ(ierr);
1182   }
1183 
1184   if (!n) {
1185     is->pure_neumann = PETSC_TRUE;
1186   } else {
1187     PetscInt i;
1188     is->pure_neumann = PETSC_FALSE;
1189 
1190     ierr = MatZeroRows(is->A,n,rows,diag,0,0);CHKERRQ(ierr);
1191     if (diag != 0.) {
1192       const PetscScalar *array;
1193       ierr = VecGetArrayRead(is->y,&array);CHKERRQ(ierr);
1194       for (i=0; i<n; i++) {
1195         ierr = MatSetValue(is->A,rows[i],rows[i],diag/(array[rows[i]]),INSERT_VALUES);CHKERRQ(ierr);
1196       }
1197       ierr = VecRestoreArrayRead(is->y,&array);CHKERRQ(ierr);
1198     }
1199     ierr = MatAssemblyBegin(is->A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
1200     ierr = MatAssemblyEnd(is->A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
1201   }
1202   PetscFunctionReturn(0);
1203 }
1204 
1205 #undef __FUNCT__
1206 #define __FUNCT__ "MatAssemblyBegin_IS"
1207 static PetscErrorCode MatAssemblyBegin_IS(Mat A,MatAssemblyType type)
1208 {
1209   Mat_IS         *is = (Mat_IS*)A->data;
1210   PetscErrorCode ierr;
1211 
1212   PetscFunctionBegin;
1213   ierr = MatAssemblyBegin(is->A,type);CHKERRQ(ierr);
1214   PetscFunctionReturn(0);
1215 }
1216 
1217 #undef __FUNCT__
1218 #define __FUNCT__ "MatAssemblyEnd_IS"
1219 static PetscErrorCode MatAssemblyEnd_IS(Mat A,MatAssemblyType type)
1220 {
1221   Mat_IS         *is = (Mat_IS*)A->data;
1222   PetscErrorCode ierr;
1223 
1224   PetscFunctionBegin;
1225   ierr = MatAssemblyEnd(is->A,type);CHKERRQ(ierr);
1226   PetscFunctionReturn(0);
1227 }
1228 
1229 #undef __FUNCT__
1230 #define __FUNCT__ "MatISGetLocalMat_IS"
1231 static PetscErrorCode MatISGetLocalMat_IS(Mat mat,Mat *local)
1232 {
1233   Mat_IS *is = (Mat_IS*)mat->data;
1234 
1235   PetscFunctionBegin;
1236   *local = is->A;
1237   PetscFunctionReturn(0);
1238 }
1239 
1240 #undef __FUNCT__
1241 #define __FUNCT__ "MatISGetLocalMat"
1242 /*@
1243     MatISGetLocalMat - Gets the local matrix stored inside a MATIS matrix.
1244 
1245   Input Parameter:
1246 .  mat - the matrix
1247 
1248   Output Parameter:
1249 .  local - the local matrix
1250 
1251   Level: advanced
1252 
1253   Notes:
1254     This can be called if you have precomputed the nonzero structure of the
1255   matrix and want to provide it to the inner matrix object to improve the performance
1256   of the MatSetValues() operation.
1257 
1258 .seealso: MATIS
1259 @*/
1260 PetscErrorCode MatISGetLocalMat(Mat mat,Mat *local)
1261 {
1262   PetscErrorCode ierr;
1263 
1264   PetscFunctionBegin;
1265   PetscValidHeaderSpecific(mat,MAT_CLASSID,1);
1266   PetscValidPointer(local,2);
1267   ierr = PetscUseMethod(mat,"MatISGetLocalMat_C",(Mat,Mat*),(mat,local));CHKERRQ(ierr);
1268   PetscFunctionReturn(0);
1269 }
1270 
1271 #undef __FUNCT__
1272 #define __FUNCT__ "MatISSetLocalMat_IS"
1273 static PetscErrorCode MatISSetLocalMat_IS(Mat mat,Mat local)
1274 {
1275   Mat_IS         *is = (Mat_IS*)mat->data;
1276   PetscInt       nrows,ncols,orows,ocols;
1277   PetscErrorCode ierr;
1278 
1279   PetscFunctionBegin;
1280   if (is->A) {
1281     ierr = MatGetSize(is->A,&orows,&ocols);CHKERRQ(ierr);
1282     ierr = MatGetSize(local,&nrows,&ncols);CHKERRQ(ierr);
1283     if (orows != nrows || ocols != ncols) SETERRQ4(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Local MATIS matrix should be of size %dx%d (you passed a %dx%d matrix)\n",orows,ocols,nrows,ncols);
1284   }
1285   ierr  = PetscObjectReference((PetscObject)local);CHKERRQ(ierr);
1286   ierr  = MatDestroy(&is->A);CHKERRQ(ierr);
1287   is->A = local;
1288   PetscFunctionReturn(0);
1289 }
1290 
1291 #undef __FUNCT__
1292 #define __FUNCT__ "MatISSetLocalMat"
1293 /*@
1294     MatISSetLocalMat - Replace the local matrix stored inside a MATIS object.
1295 
1296   Input Parameter:
1297 .  mat - the matrix
1298 .  local - the local matrix
1299 
1300   Output Parameter:
1301 
1302   Level: advanced
1303 
1304   Notes:
1305     This can be called if you have precomputed the local matrix and
1306   want to provide it to the matrix object MATIS.
1307 
1308 .seealso: MATIS
1309 @*/
1310 PetscErrorCode MatISSetLocalMat(Mat mat,Mat local)
1311 {
1312   PetscErrorCode ierr;
1313 
1314   PetscFunctionBegin;
1315   PetscValidHeaderSpecific(mat,MAT_CLASSID,1);
1316   PetscValidHeaderSpecific(local,MAT_CLASSID,2);
1317   ierr = PetscUseMethod(mat,"MatISSetLocalMat_C",(Mat,Mat),(mat,local));CHKERRQ(ierr);
1318   PetscFunctionReturn(0);
1319 }
1320 
1321 #undef __FUNCT__
1322 #define __FUNCT__ "MatZeroEntries_IS"
1323 static PetscErrorCode MatZeroEntries_IS(Mat A)
1324 {
1325   Mat_IS         *a = (Mat_IS*)A->data;
1326   PetscErrorCode ierr;
1327 
1328   PetscFunctionBegin;
1329   ierr = MatZeroEntries(a->A);CHKERRQ(ierr);
1330   PetscFunctionReturn(0);
1331 }
1332 
1333 #undef __FUNCT__
1334 #define __FUNCT__ "MatScale_IS"
1335 static PetscErrorCode MatScale_IS(Mat A,PetscScalar a)
1336 {
1337   Mat_IS         *is = (Mat_IS*)A->data;
1338   PetscErrorCode ierr;
1339 
1340   PetscFunctionBegin;
1341   ierr = MatScale(is->A,a);CHKERRQ(ierr);
1342   PetscFunctionReturn(0);
1343 }
1344 
1345 #undef __FUNCT__
1346 #define __FUNCT__ "MatGetDiagonal_IS"
1347 static PetscErrorCode MatGetDiagonal_IS(Mat A, Vec v)
1348 {
1349   Mat_IS         *is = (Mat_IS*)A->data;
1350   PetscErrorCode ierr;
1351 
1352   PetscFunctionBegin;
1353   /* get diagonal of the local matrix */
1354   ierr = MatGetDiagonal(is->A,is->y);CHKERRQ(ierr);
1355 
1356   /* scatter diagonal back into global vector */
1357   ierr = VecSet(v,0);CHKERRQ(ierr);
1358   ierr = VecScatterBegin(is->rctx,is->y,v,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
1359   ierr = VecScatterEnd(is->rctx,is->y,v,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
1360   PetscFunctionReturn(0);
1361 }
1362 
1363 #undef __FUNCT__
1364 #define __FUNCT__ "MatSetOption_IS"
1365 static PetscErrorCode MatSetOption_IS(Mat A,MatOption op,PetscBool flg)
1366 {
1367   Mat_IS         *a = (Mat_IS*)A->data;
1368   PetscErrorCode ierr;
1369 
1370   PetscFunctionBegin;
1371   ierr = MatSetOption(a->A,op,flg);CHKERRQ(ierr);
1372   PetscFunctionReturn(0);
1373 }
1374 
1375 #undef __FUNCT__
1376 #define __FUNCT__ "MatAXPY_IS"
1377 static PetscErrorCode MatAXPY_IS(Mat Y,PetscScalar a,Mat X,MatStructure str)
1378 {
1379   Mat_IS         *y = (Mat_IS*)Y->data;
1380   Mat_IS         *x;
1381 #if defined(PETSC_USE_DEBUG)
1382   PetscBool      ismatis;
1383 #endif
1384   PetscErrorCode ierr;
1385 
1386   PetscFunctionBegin;
1387 #if defined(PETSC_USE_DEBUG)
1388   ierr = PetscObjectTypeCompare((PetscObject)X,MATIS,&ismatis);CHKERRQ(ierr);
1389   if (!ismatis) SETERRQ(PetscObjectComm((PetscObject)Y),PETSC_ERR_SUP,"Cannot call MatAXPY(Y,a,X,str) with X not of type MATIS");
1390 #endif
1391   x = (Mat_IS*)X->data;
1392   ierr = MatAXPY(y->A,a,x->A,str);CHKERRQ(ierr);
1393   PetscFunctionReturn(0);
1394 }
1395 
1396 #undef __FUNCT__
1397 #define __FUNCT__ "MatGetLocalSubMatrix_IS"
1398 static PetscErrorCode MatGetLocalSubMatrix_IS(Mat A,IS row,IS col,Mat *submat)
1399 {
1400   Mat                    lA;
1401   Mat_IS                 *matis;
1402   ISLocalToGlobalMapping rl2g,cl2g;
1403   IS                     is;
1404   const PetscInt         *rg,*rl;
1405   PetscInt               nrg;
1406   PetscInt               N,M,nrl,i,*idxs;
1407   PetscErrorCode         ierr;
1408 
1409   PetscFunctionBegin;
1410   ierr = ISLocalToGlobalMappingGetIndices(A->rmap->mapping,&rg);CHKERRQ(ierr);
1411   ierr = ISGetLocalSize(row,&nrl);CHKERRQ(ierr);
1412   ierr = ISGetIndices(row,&rl);CHKERRQ(ierr);
1413   ierr = ISLocalToGlobalMappingGetSize(A->rmap->mapping,&nrg);CHKERRQ(ierr);
1414 #if defined(PETSC_USE_DEBUG)
1415   for (i=0;i<nrl;i++) if (rl[i]>=nrg) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Local row index %d -> %d greater than maximum possible %d\n",i,rl[i],nrg);
1416 #endif
1417   ierr = PetscMalloc1(nrg,&idxs);CHKERRQ(ierr);
1418   /* map from [0,nrl) to row */
1419   for (i=0;i<nrl;i++) idxs[i] = rl[i];
1420 #if defined(PETSC_USE_DEBUG)
1421   for (i=nrl;i<nrg;i++) idxs[i] = nrg;
1422 #else
1423   for (i=nrl;i<nrg;i++) idxs[i] = -1;
1424 #endif
1425   ierr = ISRestoreIndices(row,&rl);CHKERRQ(ierr);
1426   ierr = ISLocalToGlobalMappingRestoreIndices(A->rmap->mapping,&rg);CHKERRQ(ierr);
1427   ierr = ISCreateGeneral(PetscObjectComm((PetscObject)A),nrg,idxs,PETSC_OWN_POINTER,&is);CHKERRQ(ierr);
1428   ierr = ISLocalToGlobalMappingCreateIS(is,&rl2g);CHKERRQ(ierr);
1429   ierr = ISDestroy(&is);CHKERRQ(ierr);
1430   /* compute new l2g map for columns */
1431   if (col != row || A->rmap->mapping != A->cmap->mapping) {
1432     const PetscInt *cg,*cl;
1433     PetscInt       ncg;
1434     PetscInt       ncl;
1435 
1436     ierr = ISLocalToGlobalMappingGetIndices(A->cmap->mapping,&cg);CHKERRQ(ierr);
1437     ierr = ISGetLocalSize(col,&ncl);CHKERRQ(ierr);
1438     ierr = ISGetIndices(col,&cl);CHKERRQ(ierr);
1439     ierr = ISLocalToGlobalMappingGetSize(A->cmap->mapping,&ncg);CHKERRQ(ierr);
1440 #if defined(PETSC_USE_DEBUG)
1441     for (i=0;i<ncl;i++) if (cl[i]>=ncg) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Local column index %d -> %d greater than maximum possible %d\n",i,cl[i],ncg);
1442 #endif
1443     ierr = PetscMalloc1(ncg,&idxs);CHKERRQ(ierr);
1444     /* map from [0,ncl) to col */
1445     for (i=0;i<ncl;i++) idxs[i] = cl[i];
1446 #if defined(PETSC_USE_DEBUG)
1447     for (i=ncl;i<ncg;i++) idxs[i] = ncg;
1448 #else
1449     for (i=ncl;i<ncg;i++) idxs[i] = -1;
1450 #endif
1451     ierr = ISRestoreIndices(col,&cl);CHKERRQ(ierr);
1452     ierr = ISLocalToGlobalMappingRestoreIndices(A->cmap->mapping,&cg);CHKERRQ(ierr);
1453     ierr = ISCreateGeneral(PetscObjectComm((PetscObject)A),ncg,idxs,PETSC_OWN_POINTER,&is);CHKERRQ(ierr);
1454     ierr = ISLocalToGlobalMappingCreateIS(is,&cl2g);CHKERRQ(ierr);
1455     ierr = ISDestroy(&is);CHKERRQ(ierr);
1456   } else {
1457     ierr = PetscObjectReference((PetscObject)rl2g);CHKERRQ(ierr);
1458     cl2g = rl2g;
1459   }
1460   /* create the MATIS submatrix */
1461   ierr = MatGetSize(A,&M,&N);CHKERRQ(ierr);
1462   ierr = MatCreate(PetscObjectComm((PetscObject)A),submat);CHKERRQ(ierr);
1463   ierr = MatSetSizes(*submat,PETSC_DECIDE,PETSC_DECIDE,M,N);CHKERRQ(ierr);
1464   ierr = MatSetType(*submat,MATIS);CHKERRQ(ierr);
1465   matis = (Mat_IS*)((*submat)->data);
1466   matis->islocalref = PETSC_TRUE;
1467   ierr = MatSetLocalToGlobalMapping(*submat,rl2g,cl2g);CHKERRQ(ierr);
1468   ierr = MatISGetLocalMat(A,&lA);CHKERRQ(ierr);
1469   ierr = MatISSetLocalMat(*submat,lA);CHKERRQ(ierr);
1470   ierr = MatSetUp(*submat);CHKERRQ(ierr);
1471   ierr = MatAssemblyBegin(*submat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
1472   ierr = MatAssemblyEnd(*submat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
1473   ierr = ISLocalToGlobalMappingDestroy(&rl2g);CHKERRQ(ierr);
1474   ierr = ISLocalToGlobalMappingDestroy(&cl2g);CHKERRQ(ierr);
1475   /* remove unsupported ops */
1476   ierr = PetscMemzero((*submat)->ops,sizeof(struct _MatOps));CHKERRQ(ierr);
1477   (*submat)->ops->destroy               = MatDestroy_IS;
1478   (*submat)->ops->setvalueslocal        = MatSetValuesLocal_SubMat_IS;
1479   (*submat)->ops->setvaluesblockedlocal = MatSetValuesBlockedLocal_SubMat_IS;
1480   (*submat)->ops->assemblybegin         = MatAssemblyBegin_IS;
1481   (*submat)->ops->assemblyend           = MatAssemblyEnd_IS;
1482   PetscFunctionReturn(0);
1483 }
1484 
1485 #undef __FUNCT__
1486 #define __FUNCT__ "MatCreateIS"
1487 /*@
1488     MatCreateIS - Creates a "process" unassembled matrix, assembled on each
1489        process but not across processes.
1490 
1491    Input Parameters:
1492 +     comm    - MPI communicator that will share the matrix
1493 .     bs      - block size of the matrix
1494 .     m,n,M,N - local and/or global sizes of the left and right vector used in matrix vector products
1495 .     rmap    - local to global map for rows
1496 -     cmap    - local to global map for cols
1497 
1498    Output Parameter:
1499 .    A - the resulting matrix
1500 
1501    Level: advanced
1502 
1503    Notes: See MATIS for more details.
1504           m and n are NOT related to the size of the map; they are the size of the part of the vector owned
1505           by that process. The sizes of rmap and cmap define the size of the local matrices.
1506           If either rmap or cmap are NULL, then the matrix is assumed to be square.
1507 
1508 .seealso: MATIS, MatSetLocalToGlobalMapping()
1509 @*/
1510 PetscErrorCode  MatCreateIS(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,ISLocalToGlobalMapping rmap,ISLocalToGlobalMapping cmap,Mat *A)
1511 {
1512   PetscErrorCode ierr;
1513 
1514   PetscFunctionBegin;
1515   if (!rmap && !cmap) SETERRQ(comm,PETSC_ERR_USER,"You need to provide at least one of the mapping");
1516   ierr = MatCreate(comm,A);CHKERRQ(ierr);
1517   ierr = MatSetBlockSize(*A,bs);CHKERRQ(ierr);
1518   ierr = MatSetSizes(*A,m,n,M,N);CHKERRQ(ierr);
1519   ierr = MatSetType(*A,MATIS);CHKERRQ(ierr);
1520   if (rmap && cmap) {
1521     ierr = MatSetLocalToGlobalMapping(*A,rmap,cmap);CHKERRQ(ierr);
1522   } else if (!rmap) {
1523     ierr = MatSetLocalToGlobalMapping(*A,cmap,cmap);CHKERRQ(ierr);
1524   } else {
1525     ierr = MatSetLocalToGlobalMapping(*A,rmap,rmap);CHKERRQ(ierr);
1526   }
1527   PetscFunctionReturn(0);
1528 }
1529 
1530 /*MC
1531    MATIS - MATIS = "is" - A matrix type to be used for using the non-overlapping domain decomposition methods (e.g. PCBDDC or KSPFETIDP).
1532    This stores the matrices in globally unassembled form. Each processor
1533    assembles only its local Neumann problem and the parallel matrix vector
1534    product is handled "implicitly".
1535 
1536    Operations Provided:
1537 +  MatMult()
1538 .  MatMultAdd()
1539 .  MatMultTranspose()
1540 .  MatMultTransposeAdd()
1541 .  MatZeroEntries()
1542 .  MatSetOption()
1543 .  MatZeroRows()
1544 .  MatSetValues()
1545 .  MatSetValuesBlocked()
1546 .  MatSetValuesLocal()
1547 .  MatSetValuesBlockedLocal()
1548 .  MatScale()
1549 .  MatGetDiagonal()
1550 .  MatMissingDiagonal()
1551 .  MatDuplicate()
1552 .  MatCopy()
1553 .  MatAXPY()
1554 .  MatGetSubMatrix()
1555 .  MatGetLocalSubMatrix()
1556 -  MatSetLocalToGlobalMapping()
1557 
1558    Options Database Keys:
1559 . -mat_type is - sets the matrix type to "is" during a call to MatSetFromOptions()
1560 
1561    Notes: Options prefix for the inner matrix are given by -is_mat_xxx
1562 
1563           You must call MatSetLocalToGlobalMapping() before using this matrix type.
1564 
1565           You can do matrix preallocation on the local matrix after you obtain it with
1566           MatISGetLocalMat(); otherwise, you could use MatISSetPreallocation()
1567 
1568   Level: advanced
1569 
1570 .seealso: Mat, MatISGetLocalMat(), MatSetLocalToGlobalMapping(), MatISSetPreallocation(), MatCreateIS(), PCBDDC, KSPFETIDP
1571 
1572 M*/
1573 
1574 #undef __FUNCT__
1575 #define __FUNCT__ "MatCreate_IS"
1576 PETSC_EXTERN PetscErrorCode MatCreate_IS(Mat A)
1577 {
1578   PetscErrorCode ierr;
1579   Mat_IS         *b;
1580 
1581   PetscFunctionBegin;
1582   ierr    = PetscNewLog(A,&b);CHKERRQ(ierr);
1583   A->data = (void*)b;
1584 
1585   /* matrix ops */
1586   ierr    = PetscMemzero(A->ops,sizeof(struct _MatOps));CHKERRQ(ierr);
1587   A->ops->mult                    = MatMult_IS;
1588   A->ops->multadd                 = MatMultAdd_IS;
1589   A->ops->multtranspose           = MatMultTranspose_IS;
1590   A->ops->multtransposeadd        = MatMultTransposeAdd_IS;
1591   A->ops->destroy                 = MatDestroy_IS;
1592   A->ops->setlocaltoglobalmapping = MatSetLocalToGlobalMapping_IS;
1593   A->ops->setvalues               = MatSetValues_IS;
1594   A->ops->setvaluesblocked        = MatSetValuesBlocked_IS;
1595   A->ops->setvalueslocal          = MatSetValuesLocal_IS;
1596   A->ops->setvaluesblockedlocal   = MatSetValuesBlockedLocal_IS;
1597   A->ops->zerorows                = MatZeroRows_IS;
1598   A->ops->assemblybegin           = MatAssemblyBegin_IS;
1599   A->ops->assemblyend             = MatAssemblyEnd_IS;
1600   A->ops->view                    = MatView_IS;
1601   A->ops->zeroentries             = MatZeroEntries_IS;
1602   A->ops->scale                   = MatScale_IS;
1603   A->ops->getdiagonal             = MatGetDiagonal_IS;
1604   A->ops->setoption               = MatSetOption_IS;
1605   A->ops->ishermitian             = MatIsHermitian_IS;
1606   A->ops->issymmetric             = MatIsSymmetric_IS;
1607   A->ops->duplicate               = MatDuplicate_IS;
1608   A->ops->missingdiagonal         = MatMissingDiagonal_IS;
1609   A->ops->copy                    = MatCopy_IS;
1610   A->ops->getlocalsubmatrix       = MatGetLocalSubMatrix_IS;
1611   A->ops->getsubmatrix            = MatGetSubMatrix_IS;
1612   A->ops->axpy                    = MatAXPY_IS;
1613 
1614   /* special MATIS functions */
1615   ierr = PetscObjectComposeFunction((PetscObject)A,"MatISGetLocalMat_C",MatISGetLocalMat_IS);CHKERRQ(ierr);
1616   ierr = PetscObjectComposeFunction((PetscObject)A,"MatISSetLocalMat_C",MatISSetLocalMat_IS);CHKERRQ(ierr);
1617   ierr = PetscObjectComposeFunction((PetscObject)A,"MatISGetMPIXAIJ_C",MatISGetMPIXAIJ_IS);CHKERRQ(ierr);
1618   ierr = PetscObjectComposeFunction((PetscObject)A,"MatISSetPreallocation_C",MatISSetPreallocation_IS);CHKERRQ(ierr);
1619   ierr = PetscObjectChangeTypeName((PetscObject)A,MATIS);CHKERRQ(ierr);
1620   PetscFunctionReturn(0);
1621 }
1622