xref: /petsc/src/mat/impls/is/matis.c (revision 21e3ffae2f3b73c0bd738cf6d0a809700fc04bb0)
1 /*
2     Creates a matrix class for using the Neumann-Neumann type preconditioners.
3     This stores the matrices in globally unassembled form. Each processor
4     assembles only its local Neumann problem and the parallel matrix vector
5     product is handled "implicitly".
6 
7     Currently this allows for only one subdomain per processor.
8 */
9 
10 #include <petsc/private/matisimpl.h> /*I "petscmat.h" I*/
11 #include <petsc/private/sfimpl.h>
12 #include <petsc/private/vecimpl.h>
13 #include <petsc/private/hashseti.h>
14 
15 #define MATIS_MAX_ENTRIES_INSERTION 2048
16 static PetscErrorCode MatSetValuesLocal_IS(Mat, PetscInt, const PetscInt *, PetscInt, const PetscInt *, const PetscScalar *, InsertMode);
17 static PetscErrorCode MatSetValuesBlockedLocal_IS(Mat, PetscInt, const PetscInt *, PetscInt, const PetscInt *, const PetscScalar *, InsertMode);
18 static PetscErrorCode MatISSetUpScatters_Private(Mat);
19 
20 static PetscErrorCode MatISContainerDestroyPtAP_Private(void *ptr)
21 {
22   MatISPtAP ptap = (MatISPtAP)ptr;
23 
24   PetscFunctionBegin;
25   PetscCall(MatDestroySubMatrices(ptap->ris1 ? 2 : 1, &ptap->lP));
26   PetscCall(ISDestroy(&ptap->cis0));
27   PetscCall(ISDestroy(&ptap->cis1));
28   PetscCall(ISDestroy(&ptap->ris0));
29   PetscCall(ISDestroy(&ptap->ris1));
30   PetscCall(PetscFree(ptap));
31   PetscFunctionReturn(PETSC_SUCCESS);
32 }
33 
34 static PetscErrorCode MatPtAPNumeric_IS_XAIJ(Mat A, Mat P, Mat C)
35 {
36   MatISPtAP      ptap;
37   Mat_IS        *matis = (Mat_IS *)A->data;
38   Mat            lA, lC;
39   MatReuse       reuse;
40   IS             ris[2], cis[2];
41   PetscContainer c;
42   PetscInt       n;
43 
44   PetscFunctionBegin;
45   PetscCall(PetscObjectQuery((PetscObject)C, "_MatIS_PtAP", (PetscObject *)&c));
46   PetscCheck(c, PetscObjectComm((PetscObject)C), PETSC_ERR_PLIB, "Missing PtAP information");
47   PetscCall(PetscContainerGetPointer(c, (void **)&ptap));
48   ris[0] = ptap->ris0;
49   ris[1] = ptap->ris1;
50   cis[0] = ptap->cis0;
51   cis[1] = ptap->cis1;
52   n      = ptap->ris1 ? 2 : 1;
53   reuse  = ptap->lP ? MAT_REUSE_MATRIX : MAT_INITIAL_MATRIX;
54   PetscCall(MatCreateSubMatrices(P, n, ris, cis, reuse, &ptap->lP));
55 
56   PetscCall(MatISGetLocalMat(A, &lA));
57   PetscCall(MatISGetLocalMat(C, &lC));
58   if (ptap->ris1) { /* unsymmetric A mapping */
59     Mat lPt;
60 
61     PetscCall(MatTranspose(ptap->lP[1], MAT_INITIAL_MATRIX, &lPt));
62     PetscCall(MatMatMatMult(lPt, lA, ptap->lP[0], reuse, ptap->fill, &lC));
63     if (matis->storel2l) PetscCall(PetscObjectCompose((PetscObject)(A), "_MatIS_PtAP_l2l", (PetscObject)lPt));
64     PetscCall(MatDestroy(&lPt));
65   } else {
66     PetscCall(MatPtAP(lA, ptap->lP[0], reuse, ptap->fill, &lC));
67     if (matis->storel2l) PetscCall(PetscObjectCompose((PetscObject)C, "_MatIS_PtAP_l2l", (PetscObject)ptap->lP[0]));
68   }
69   if (reuse == MAT_INITIAL_MATRIX) {
70     PetscCall(MatISSetLocalMat(C, lC));
71     PetscCall(MatDestroy(&lC));
72   }
73   PetscCall(MatAssemblyBegin(C, MAT_FINAL_ASSEMBLY));
74   PetscCall(MatAssemblyEnd(C, MAT_FINAL_ASSEMBLY));
75   PetscFunctionReturn(PETSC_SUCCESS);
76 }
77 
78 static PetscErrorCode MatGetNonzeroColumnsLocal_Private(Mat PT, IS *cis)
79 {
80   Mat             Po, Pd;
81   IS              zd, zo;
82   const PetscInt *garray;
83   PetscInt       *aux, i, bs;
84   PetscInt        dc, stc, oc, ctd, cto;
85   PetscBool       ismpiaij, ismpibaij, isseqaij, isseqbaij;
86   MPI_Comm        comm;
87 
88   PetscFunctionBegin;
89   PetscValidHeaderSpecific(PT, MAT_CLASSID, 1);
90   PetscValidPointer(cis, 2);
91   PetscCall(PetscObjectGetComm((PetscObject)PT, &comm));
92   bs = 1;
93   PetscCall(PetscObjectBaseTypeCompare((PetscObject)PT, MATMPIAIJ, &ismpiaij));
94   PetscCall(PetscObjectBaseTypeCompare((PetscObject)PT, MATMPIBAIJ, &ismpibaij));
95   PetscCall(PetscObjectBaseTypeCompare((PetscObject)PT, MATSEQAIJ, &isseqaij));
96   PetscCall(PetscObjectTypeCompare((PetscObject)PT, MATSEQBAIJ, &isseqbaij));
97   if (isseqaij || isseqbaij) {
98     Pd     = PT;
99     Po     = NULL;
100     garray = NULL;
101   } else if (ismpiaij) {
102     PetscCall(MatMPIAIJGetSeqAIJ(PT, &Pd, &Po, &garray));
103   } else if (ismpibaij) {
104     PetscCall(MatMPIBAIJGetSeqBAIJ(PT, &Pd, &Po, &garray));
105     PetscCall(MatGetBlockSize(PT, &bs));
106   } else SETERRQ(comm, PETSC_ERR_SUP, "Not for matrix type %s", ((PetscObject)(PT))->type_name);
107 
108   /* identify any null columns in Pd or Po */
109   /* We use a tolerance comparison since it may happen that, with geometric multigrid,
110      some of the columns are not really zero, but very close to */
111   zo = zd = NULL;
112   if (Po) PetscCall(MatFindNonzeroRowsOrCols_Basic(Po, PETSC_TRUE, PETSC_SMALL, &zo));
113   PetscCall(MatFindNonzeroRowsOrCols_Basic(Pd, PETSC_TRUE, PETSC_SMALL, &zd));
114 
115   PetscCall(MatGetLocalSize(PT, NULL, &dc));
116   PetscCall(MatGetOwnershipRangeColumn(PT, &stc, NULL));
117   if (Po) PetscCall(MatGetLocalSize(Po, NULL, &oc));
118   else oc = 0;
119   PetscCall(PetscMalloc1((dc + oc) / bs, &aux));
120   if (zd) {
121     const PetscInt *idxs;
122     PetscInt        nz;
123 
124     /* this will throw an error if bs is not valid */
125     PetscCall(ISSetBlockSize(zd, bs));
126     PetscCall(ISGetLocalSize(zd, &nz));
127     PetscCall(ISGetIndices(zd, &idxs));
128     ctd = nz / bs;
129     for (i = 0; i < ctd; i++) aux[i] = (idxs[bs * i] + stc) / bs;
130     PetscCall(ISRestoreIndices(zd, &idxs));
131   } else {
132     ctd = dc / bs;
133     for (i = 0; i < ctd; i++) aux[i] = i + stc / bs;
134   }
135   if (zo) {
136     const PetscInt *idxs;
137     PetscInt        nz;
138 
139     /* this will throw an error if bs is not valid */
140     PetscCall(ISSetBlockSize(zo, bs));
141     PetscCall(ISGetLocalSize(zo, &nz));
142     PetscCall(ISGetIndices(zo, &idxs));
143     cto = nz / bs;
144     for (i = 0; i < cto; i++) aux[i + ctd] = garray[idxs[bs * i] / bs];
145     PetscCall(ISRestoreIndices(zo, &idxs));
146   } else {
147     cto = oc / bs;
148     for (i = 0; i < cto; i++) aux[i + ctd] = garray[i];
149   }
150   PetscCall(ISCreateBlock(comm, bs, ctd + cto, aux, PETSC_OWN_POINTER, cis));
151   PetscCall(ISDestroy(&zd));
152   PetscCall(ISDestroy(&zo));
153   PetscFunctionReturn(PETSC_SUCCESS);
154 }
155 
156 static PetscErrorCode MatPtAPSymbolic_IS_XAIJ(Mat A, Mat P, PetscReal fill, Mat C)
157 {
158   Mat                    PT, lA;
159   MatISPtAP              ptap;
160   ISLocalToGlobalMapping Crl2g, Ccl2g, rl2g, cl2g;
161   PetscContainer         c;
162   MatType                lmtype;
163   const PetscInt        *garray;
164   PetscInt               ibs, N, dc;
165   MPI_Comm               comm;
166 
167   PetscFunctionBegin;
168   PetscCall(PetscObjectGetComm((PetscObject)A, &comm));
169   PetscCall(MatSetType(C, MATIS));
170   PetscCall(MatISGetLocalMat(A, &lA));
171   PetscCall(MatGetType(lA, &lmtype));
172   PetscCall(MatISSetLocalMatType(C, lmtype));
173   PetscCall(MatGetSize(P, NULL, &N));
174   PetscCall(MatGetLocalSize(P, NULL, &dc));
175   PetscCall(MatSetSizes(C, dc, dc, N, N));
176   /* Not sure about this
177   PetscCall(MatGetBlockSizes(P,NULL,&ibs));
178   PetscCall(MatSetBlockSize(*C,ibs));
179 */
180 
181   PetscCall(PetscNew(&ptap));
182   PetscCall(PetscContainerCreate(PETSC_COMM_SELF, &c));
183   PetscCall(PetscContainerSetPointer(c, ptap));
184   PetscCall(PetscContainerSetUserDestroy(c, MatISContainerDestroyPtAP_Private));
185   PetscCall(PetscObjectCompose((PetscObject)C, "_MatIS_PtAP", (PetscObject)c));
186   PetscCall(PetscContainerDestroy(&c));
187   ptap->fill = fill;
188 
189   PetscCall(MatISGetLocalToGlobalMapping(A, &rl2g, &cl2g));
190 
191   PetscCall(ISLocalToGlobalMappingGetBlockSize(cl2g, &ibs));
192   PetscCall(ISLocalToGlobalMappingGetSize(cl2g, &N));
193   PetscCall(ISLocalToGlobalMappingGetBlockIndices(cl2g, &garray));
194   PetscCall(ISCreateBlock(comm, ibs, N / ibs, garray, PETSC_COPY_VALUES, &ptap->ris0));
195   PetscCall(ISLocalToGlobalMappingRestoreBlockIndices(cl2g, &garray));
196 
197   PetscCall(MatCreateSubMatrix(P, ptap->ris0, NULL, MAT_INITIAL_MATRIX, &PT));
198   PetscCall(MatGetNonzeroColumnsLocal_Private(PT, &ptap->cis0));
199   PetscCall(ISLocalToGlobalMappingCreateIS(ptap->cis0, &Ccl2g));
200   PetscCall(MatDestroy(&PT));
201 
202   Crl2g = NULL;
203   if (rl2g != cl2g) { /* unsymmetric A mapping */
204     PetscBool same, lsame = PETSC_FALSE;
205     PetscInt  N1, ibs1;
206 
207     PetscCall(ISLocalToGlobalMappingGetSize(rl2g, &N1));
208     PetscCall(ISLocalToGlobalMappingGetBlockSize(rl2g, &ibs1));
209     PetscCall(ISLocalToGlobalMappingGetBlockIndices(rl2g, &garray));
210     PetscCall(ISCreateBlock(comm, ibs, N / ibs, garray, PETSC_COPY_VALUES, &ptap->ris1));
211     PetscCall(ISLocalToGlobalMappingRestoreBlockIndices(rl2g, &garray));
212     if (ibs1 == ibs && N1 == N) { /* check if the l2gmaps are the same */
213       const PetscInt *i1, *i2;
214 
215       PetscCall(ISBlockGetIndices(ptap->ris0, &i1));
216       PetscCall(ISBlockGetIndices(ptap->ris1, &i2));
217       PetscCall(PetscArraycmp(i1, i2, N, &lsame));
218     }
219     PetscCall(MPIU_Allreduce(&lsame, &same, 1, MPIU_BOOL, MPI_LAND, comm));
220     if (same) {
221       PetscCall(ISDestroy(&ptap->ris1));
222     } else {
223       PetscCall(MatCreateSubMatrix(P, ptap->ris1, NULL, MAT_INITIAL_MATRIX, &PT));
224       PetscCall(MatGetNonzeroColumnsLocal_Private(PT, &ptap->cis1));
225       PetscCall(ISLocalToGlobalMappingCreateIS(ptap->cis1, &Crl2g));
226       PetscCall(MatDestroy(&PT));
227     }
228   }
229   /* Not sure about this
230   if (!Crl2g) {
231     PetscCall(MatGetBlockSize(C,&ibs));
232     PetscCall(ISLocalToGlobalMappingSetBlockSize(Ccl2g,ibs));
233   }
234 */
235   PetscCall(MatSetLocalToGlobalMapping(C, Crl2g ? Crl2g : Ccl2g, Ccl2g));
236   PetscCall(ISLocalToGlobalMappingDestroy(&Crl2g));
237   PetscCall(ISLocalToGlobalMappingDestroy(&Ccl2g));
238 
239   C->ops->ptapnumeric = MatPtAPNumeric_IS_XAIJ;
240   PetscFunctionReturn(PETSC_SUCCESS);
241 }
242 
243 /* ----------------------------------------- */
244 static PetscErrorCode MatProductSymbolic_PtAP_IS_XAIJ(Mat C)
245 {
246   Mat_Product *product = C->product;
247   Mat          A = product->A, P = product->B;
248   PetscReal    fill = product->fill;
249 
250   PetscFunctionBegin;
251   PetscCall(MatPtAPSymbolic_IS_XAIJ(A, P, fill, C));
252   C->ops->productnumeric = MatProductNumeric_PtAP;
253   PetscFunctionReturn(PETSC_SUCCESS);
254 }
255 
256 static PetscErrorCode MatProductSetFromOptions_IS_XAIJ_PtAP(Mat C)
257 {
258   PetscFunctionBegin;
259   C->ops->productsymbolic = MatProductSymbolic_PtAP_IS_XAIJ;
260   PetscFunctionReturn(PETSC_SUCCESS);
261 }
262 
263 PETSC_INTERN PetscErrorCode MatProductSetFromOptions_IS_XAIJ(Mat C)
264 {
265   Mat_Product *product = C->product;
266 
267   PetscFunctionBegin;
268   if (product->type == MATPRODUCT_PtAP) PetscCall(MatProductSetFromOptions_IS_XAIJ_PtAP(C));
269   PetscFunctionReturn(PETSC_SUCCESS);
270 }
271 
272 /* ----------------------------------------- */
273 static PetscErrorCode MatISContainerDestroyFields_Private(void *ptr)
274 {
275   MatISLocalFields lf = (MatISLocalFields)ptr;
276   PetscInt         i;
277 
278   PetscFunctionBegin;
279   for (i = 0; i < lf->nr; i++) PetscCall(ISDestroy(&lf->rf[i]));
280   for (i = 0; i < lf->nc; i++) PetscCall(ISDestroy(&lf->cf[i]));
281   PetscCall(PetscFree2(lf->rf, lf->cf));
282   PetscCall(PetscFree(lf));
283   PetscFunctionReturn(PETSC_SUCCESS);
284 }
285 
286 static PetscErrorCode MatConvert_SeqXAIJ_IS(Mat A, MatType type, MatReuse reuse, Mat *newmat)
287 {
288   Mat B, lB;
289 
290   PetscFunctionBegin;
291   if (reuse != MAT_REUSE_MATRIX) {
292     ISLocalToGlobalMapping rl2g, cl2g;
293     PetscInt               bs;
294     IS                     is;
295 
296     PetscCall(MatGetBlockSize(A, &bs));
297     PetscCall(ISCreateStride(PetscObjectComm((PetscObject)A), A->rmap->n / bs, 0, 1, &is));
298     if (bs > 1) {
299       IS       is2;
300       PetscInt i, *aux;
301 
302       PetscCall(ISGetLocalSize(is, &i));
303       PetscCall(ISGetIndices(is, (const PetscInt **)&aux));
304       PetscCall(ISCreateBlock(PetscObjectComm((PetscObject)A), bs, i, aux, PETSC_COPY_VALUES, &is2));
305       PetscCall(ISRestoreIndices(is, (const PetscInt **)&aux));
306       PetscCall(ISDestroy(&is));
307       is = is2;
308     }
309     PetscCall(ISSetIdentity(is));
310     PetscCall(ISLocalToGlobalMappingCreateIS(is, &rl2g));
311     PetscCall(ISDestroy(&is));
312     PetscCall(ISCreateStride(PetscObjectComm((PetscObject)A), A->cmap->n / bs, 0, 1, &is));
313     if (bs > 1) {
314       IS       is2;
315       PetscInt i, *aux;
316 
317       PetscCall(ISGetLocalSize(is, &i));
318       PetscCall(ISGetIndices(is, (const PetscInt **)&aux));
319       PetscCall(ISCreateBlock(PetscObjectComm((PetscObject)A), bs, i, aux, PETSC_COPY_VALUES, &is2));
320       PetscCall(ISRestoreIndices(is, (const PetscInt **)&aux));
321       PetscCall(ISDestroy(&is));
322       is = is2;
323     }
324     PetscCall(ISSetIdentity(is));
325     PetscCall(ISLocalToGlobalMappingCreateIS(is, &cl2g));
326     PetscCall(ISDestroy(&is));
327     PetscCall(MatCreateIS(PetscObjectComm((PetscObject)A), bs, A->rmap->n, A->cmap->n, A->rmap->N, A->cmap->N, rl2g, cl2g, &B));
328     PetscCall(ISLocalToGlobalMappingDestroy(&rl2g));
329     PetscCall(ISLocalToGlobalMappingDestroy(&cl2g));
330     PetscCall(MatDuplicate(A, MAT_COPY_VALUES, &lB));
331     if (reuse == MAT_INITIAL_MATRIX) *newmat = B;
332   } else {
333     B = *newmat;
334     PetscCall(PetscObjectReference((PetscObject)A));
335     lB = A;
336   }
337   PetscCall(MatISSetLocalMat(B, lB));
338   PetscCall(MatDestroy(&lB));
339   PetscCall(MatAssemblyBegin(B, MAT_FINAL_ASSEMBLY));
340   PetscCall(MatAssemblyEnd(B, MAT_FINAL_ASSEMBLY));
341   if (reuse == MAT_INPLACE_MATRIX) PetscCall(MatHeaderReplace(A, &B));
342   PetscFunctionReturn(PETSC_SUCCESS);
343 }
344 
345 static PetscErrorCode MatISScaleDisassembling_Private(Mat A)
346 {
347   Mat_IS         *matis = (Mat_IS *)(A->data);
348   PetscScalar    *aa;
349   const PetscInt *ii, *jj;
350   PetscInt        i, n, m;
351   PetscInt       *ecount, **eneighs;
352   PetscBool       flg;
353 
354   PetscFunctionBegin;
355   PetscCall(MatGetRowIJ(matis->A, 0, PETSC_FALSE, PETSC_FALSE, &m, &ii, &jj, &flg));
356   PetscCheck(flg, PETSC_COMM_SELF, PETSC_ERR_SUP, "Cannot get IJ structure");
357   PetscCall(ISLocalToGlobalMappingGetNodeInfo(matis->rmapping, &n, &ecount, &eneighs));
358   PetscCheck(m == n, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Unexpected %" PetscInt_FMT " != %" PetscInt_FMT, m, n);
359   PetscCall(MatSeqAIJGetArray(matis->A, &aa));
360   for (i = 0; i < n; i++) {
361     if (ecount[i] > 1) {
362       PetscInt j;
363 
364       for (j = ii[i]; j < ii[i + 1]; j++) {
365         PetscInt  i2   = jj[j], p, p2;
366         PetscReal scal = 0.0;
367 
368         for (p = 0; p < ecount[i]; p++) {
369           for (p2 = 0; p2 < ecount[i2]; p2++) {
370             if (eneighs[i][p] == eneighs[i2][p2]) {
371               scal += 1.0;
372               break;
373             }
374           }
375         }
376         if (scal) aa[j] /= scal;
377       }
378     }
379   }
380   PetscCall(ISLocalToGlobalMappingRestoreNodeInfo(matis->rmapping, &n, &ecount, &eneighs));
381   PetscCall(MatSeqAIJRestoreArray(matis->A, &aa));
382   PetscCall(MatRestoreRowIJ(matis->A, 0, PETSC_FALSE, PETSC_FALSE, &m, &ii, &jj, &flg));
383   PetscCheck(flg, PETSC_COMM_SELF, PETSC_ERR_SUP, "Cannot restore IJ structure");
384   PetscFunctionReturn(PETSC_SUCCESS);
385 }
386 
387 typedef enum {
388   MAT_IS_DISASSEMBLE_L2G_NATURAL,
389   MAT_IS_DISASSEMBLE_L2G_MAT,
390   MAT_IS_DISASSEMBLE_L2G_ND
391 } MatISDisassemblel2gType;
392 
393 static PetscErrorCode MatMPIXAIJComputeLocalToGlobalMapping_Private(Mat A, ISLocalToGlobalMapping *l2g)
394 {
395   Mat                     Ad, Ao;
396   IS                      is, ndmap, ndsub;
397   MPI_Comm                comm;
398   const PetscInt         *garray, *ndmapi;
399   PetscInt                bs, i, cnt, nl, *ncount, *ndmapc;
400   PetscBool               ismpiaij, ismpibaij;
401   const char *const       MatISDisassemblel2gTypes[] = {"NATURAL", "MAT", "ND", "MatISDisassemblel2gType", "MAT_IS_DISASSEMBLE_L2G_", NULL};
402   MatISDisassemblel2gType mode                       = MAT_IS_DISASSEMBLE_L2G_NATURAL;
403   MatPartitioning         part;
404   PetscSF                 sf;
405   PetscObject             dm;
406 
407   PetscFunctionBegin;
408   PetscOptionsBegin(PetscObjectComm((PetscObject)A), ((PetscObject)A)->prefix, "MatIS l2g disassembling options", "Mat");
409   PetscCall(PetscOptionsEnum("-mat_is_disassemble_l2g_type", "Type of local-to-global mapping to be used for disassembling", "MatISDisassemblel2gType", MatISDisassemblel2gTypes, (PetscEnum)mode, (PetscEnum *)&mode, NULL));
410   PetscOptionsEnd();
411   if (mode == MAT_IS_DISASSEMBLE_L2G_MAT) {
412     PetscCall(MatGetLocalToGlobalMapping(A, l2g, NULL));
413     PetscFunctionReturn(PETSC_SUCCESS);
414   }
415   PetscCall(PetscObjectGetComm((PetscObject)A, &comm));
416   PetscCall(PetscObjectBaseTypeCompare((PetscObject)A, MATMPIAIJ, &ismpiaij));
417   PetscCall(PetscObjectBaseTypeCompare((PetscObject)A, MATMPIBAIJ, &ismpibaij));
418   PetscCall(MatGetBlockSize(A, &bs));
419   switch (mode) {
420   case MAT_IS_DISASSEMBLE_L2G_ND:
421     PetscCall(MatPartitioningCreate(comm, &part));
422     PetscCall(MatPartitioningSetAdjacency(part, A));
423     PetscCall(PetscObjectSetOptionsPrefix((PetscObject)part, ((PetscObject)A)->prefix));
424     PetscCall(MatPartitioningSetFromOptions(part));
425     PetscCall(MatPartitioningApplyND(part, &ndmap));
426     PetscCall(MatPartitioningDestroy(&part));
427     PetscCall(ISBuildTwoSided(ndmap, NULL, &ndsub));
428     PetscCall(MatMPIAIJSetUseScalableIncreaseOverlap(A, PETSC_TRUE));
429     PetscCall(MatIncreaseOverlap(A, 1, &ndsub, 1));
430     PetscCall(ISLocalToGlobalMappingCreateIS(ndsub, l2g));
431 
432     /* it may happen that a separator node is not properly shared */
433     PetscCall(ISLocalToGlobalMappingGetNodeInfo(*l2g, &nl, &ncount, NULL));
434     PetscCall(PetscSFCreate(comm, &sf));
435     PetscCall(ISLocalToGlobalMappingGetIndices(*l2g, &garray));
436     PetscCall(PetscSFSetGraphLayout(sf, A->rmap, nl, NULL, PETSC_OWN_POINTER, garray));
437     PetscCall(ISLocalToGlobalMappingRestoreIndices(*l2g, &garray));
438     PetscCall(PetscCalloc1(A->rmap->n, &ndmapc));
439     PetscCall(PetscSFReduceBegin(sf, MPIU_INT, ncount, ndmapc, MPI_REPLACE));
440     PetscCall(PetscSFReduceEnd(sf, MPIU_INT, ncount, ndmapc, MPI_REPLACE));
441     PetscCall(ISLocalToGlobalMappingRestoreNodeInfo(*l2g, NULL, &ncount, NULL));
442     PetscCall(ISGetIndices(ndmap, &ndmapi));
443     for (i = 0, cnt = 0; i < A->rmap->n; i++)
444       if (ndmapi[i] < 0 && ndmapc[i] < 2) cnt++;
445 
446     PetscCall(MPIU_Allreduce(&cnt, &i, 1, MPIU_INT, MPI_MAX, comm));
447     if (i) { /* we detected isolated separator nodes */
448       Mat                    A2, A3;
449       IS                    *workis, is2;
450       PetscScalar           *vals;
451       PetscInt               gcnt = i, *dnz, *onz, j, *lndmapi;
452       ISLocalToGlobalMapping ll2g;
453       PetscBool              flg;
454       const PetscInt        *ii, *jj;
455 
456       /* communicate global id of separators */
457       MatPreallocateBegin(comm, A->rmap->n, A->cmap->n, dnz, onz);
458       for (i = 0, cnt = 0; i < A->rmap->n; i++) dnz[i] = ndmapi[i] < 0 ? i + A->rmap->rstart : -1;
459 
460       PetscCall(PetscMalloc1(nl, &lndmapi));
461       PetscCall(PetscSFBcastBegin(sf, MPIU_INT, dnz, lndmapi, MPI_REPLACE));
462 
463       /* compute adjacency of isolated separators node */
464       PetscCall(PetscMalloc1(gcnt, &workis));
465       for (i = 0, cnt = 0; i < A->rmap->n; i++) {
466         if (ndmapi[i] < 0 && ndmapc[i] < 2) PetscCall(ISCreateStride(comm, 1, i + A->rmap->rstart, 1, &workis[cnt++]));
467       }
468       for (i = cnt; i < gcnt; i++) PetscCall(ISCreateStride(comm, 0, 0, 1, &workis[i]));
469       for (i = 0; i < gcnt; i++) {
470         PetscCall(PetscObjectSetName((PetscObject)workis[i], "ISOLATED"));
471         PetscCall(ISViewFromOptions(workis[i], NULL, "-view_isolated_separators"));
472       }
473 
474       /* no communications since all the ISes correspond to locally owned rows */
475       PetscCall(MatIncreaseOverlap(A, gcnt, workis, 1));
476 
477       /* end communicate global id of separators */
478       PetscCall(PetscSFBcastEnd(sf, MPIU_INT, dnz, lndmapi, MPI_REPLACE));
479 
480       /* communicate new layers : create a matrix and transpose it */
481       PetscCall(PetscArrayzero(dnz, A->rmap->n));
482       PetscCall(PetscArrayzero(onz, A->rmap->n));
483       for (i = 0, j = 0; i < A->rmap->n; i++) {
484         if (ndmapi[i] < 0 && ndmapc[i] < 2) {
485           const PetscInt *idxs;
486           PetscInt        s;
487 
488           PetscCall(ISGetLocalSize(workis[j], &s));
489           PetscCall(ISGetIndices(workis[j], &idxs));
490           PetscCall(MatPreallocateSet(i + A->rmap->rstart, s, idxs, dnz, onz));
491           j++;
492         }
493       }
494       PetscCheck(j == cnt, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Unexpected local count %" PetscInt_FMT " != %" PetscInt_FMT, j, cnt);
495 
496       for (i = 0; i < gcnt; i++) {
497         PetscCall(PetscObjectSetName((PetscObject)workis[i], "EXTENDED"));
498         PetscCall(ISViewFromOptions(workis[i], NULL, "-view_isolated_separators"));
499       }
500 
501       for (i = 0, j = 0; i < A->rmap->n; i++) j = PetscMax(j, dnz[i] + onz[i]);
502       PetscCall(PetscMalloc1(j, &vals));
503       for (i = 0; i < j; i++) vals[i] = 1.0;
504 
505       PetscCall(MatCreate(comm, &A2));
506       PetscCall(MatSetType(A2, MATMPIAIJ));
507       PetscCall(MatSetSizes(A2, A->rmap->n, A->cmap->n, A->rmap->N, A->cmap->N));
508       PetscCall(MatMPIAIJSetPreallocation(A2, 0, dnz, 0, onz));
509       PetscCall(MatSetOption(A2, MAT_NO_OFF_PROC_ENTRIES, PETSC_TRUE));
510       for (i = 0, j = 0; i < A2->rmap->n; i++) {
511         PetscInt        row = i + A2->rmap->rstart, s = dnz[i] + onz[i];
512         const PetscInt *idxs;
513 
514         if (s) {
515           PetscCall(ISGetIndices(workis[j], &idxs));
516           PetscCall(MatSetValues(A2, 1, &row, s, idxs, vals, INSERT_VALUES));
517           PetscCall(ISRestoreIndices(workis[j], &idxs));
518           j++;
519         }
520       }
521       PetscCheck(j == cnt, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Unexpected local count %" PetscInt_FMT " != %" PetscInt_FMT, j, cnt);
522       PetscCall(PetscFree(vals));
523       PetscCall(MatAssemblyBegin(A2, MAT_FINAL_ASSEMBLY));
524       PetscCall(MatAssemblyEnd(A2, MAT_FINAL_ASSEMBLY));
525       PetscCall(MatTranspose(A2, MAT_INPLACE_MATRIX, &A2));
526 
527       /* extract submatrix corresponding to the coupling "owned separators" x "isolated separators" */
528       for (i = 0, j = 0; i < nl; i++)
529         if (lndmapi[i] >= 0) lndmapi[j++] = lndmapi[i];
530       PetscCall(ISCreateGeneral(comm, j, lndmapi, PETSC_USE_POINTER, &is));
531       PetscCall(MatMPIAIJGetLocalMatCondensed(A2, MAT_INITIAL_MATRIX, &is, NULL, &A3));
532       PetscCall(ISDestroy(&is));
533       PetscCall(MatDestroy(&A2));
534 
535       /* extend local to global map to include connected isolated separators */
536       PetscCall(PetscObjectQuery((PetscObject)A3, "_petsc_GetLocalMatCondensed_iscol", (PetscObject *)&is));
537       PetscCheck(is, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Missing column map");
538       PetscCall(ISLocalToGlobalMappingCreateIS(is, &ll2g));
539       PetscCall(MatGetRowIJ(A3, 0, PETSC_FALSE, PETSC_FALSE, &i, &ii, &jj, &flg));
540       PetscCheck(flg, PETSC_COMM_SELF, PETSC_ERR_SUP, "Cannot get IJ structure");
541       PetscCall(ISCreateGeneral(PETSC_COMM_SELF, ii[i], jj, PETSC_COPY_VALUES, &is));
542       PetscCall(MatRestoreRowIJ(A3, 0, PETSC_FALSE, PETSC_FALSE, &i, &ii, &jj, &flg));
543       PetscCheck(flg, PETSC_COMM_SELF, PETSC_ERR_SUP, "Cannot get IJ structure");
544       PetscCall(ISLocalToGlobalMappingApplyIS(ll2g, is, &is2));
545       PetscCall(ISDestroy(&is));
546       PetscCall(ISLocalToGlobalMappingDestroy(&ll2g));
547 
548       /* add new nodes to the local-to-global map */
549       PetscCall(ISLocalToGlobalMappingDestroy(l2g));
550       PetscCall(ISExpand(ndsub, is2, &is));
551       PetscCall(ISDestroy(&is2));
552       PetscCall(ISLocalToGlobalMappingCreateIS(is, l2g));
553       PetscCall(ISDestroy(&is));
554 
555       PetscCall(MatDestroy(&A3));
556       PetscCall(PetscFree(lndmapi));
557       MatPreallocateEnd(dnz, onz);
558       for (i = 0; i < gcnt; i++) PetscCall(ISDestroy(&workis[i]));
559       PetscCall(PetscFree(workis));
560     }
561     PetscCall(ISRestoreIndices(ndmap, &ndmapi));
562     PetscCall(PetscSFDestroy(&sf));
563     PetscCall(PetscFree(ndmapc));
564     PetscCall(ISDestroy(&ndmap));
565     PetscCall(ISDestroy(&ndsub));
566     PetscCall(ISLocalToGlobalMappingSetBlockSize(*l2g, bs));
567     PetscCall(ISLocalToGlobalMappingViewFromOptions(*l2g, NULL, "-matis_nd_l2g_view"));
568     break;
569   case MAT_IS_DISASSEMBLE_L2G_NATURAL:
570     PetscCall(PetscObjectQuery((PetscObject)A, "__PETSc_dm", (PetscObject *)&dm));
571     if (dm) { /* if a matrix comes from a DM, most likely we can use the l2gmap if any */
572       PetscCall(MatGetLocalToGlobalMapping(A, l2g, NULL));
573       PetscCall(PetscObjectReference((PetscObject)*l2g));
574       if (*l2g) PetscFunctionReturn(PETSC_SUCCESS);
575     }
576     if (ismpiaij) {
577       PetscCall(MatMPIAIJGetSeqAIJ(A, &Ad, &Ao, &garray));
578     } else if (ismpibaij) {
579       PetscCall(MatMPIBAIJGetSeqBAIJ(A, &Ad, &Ao, &garray));
580     } else SETERRQ(comm, PETSC_ERR_SUP, "Type %s", ((PetscObject)A)->type_name);
581     if (A->rmap->n) {
582       PetscInt dc, oc, stc, *aux;
583 
584       PetscCall(MatGetLocalSize(Ad, NULL, &dc));
585       PetscCall(MatGetLocalSize(Ao, NULL, &oc));
586       PetscCheck(!oc || garray, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "garray not present");
587       PetscCall(MatGetOwnershipRangeColumn(A, &stc, NULL));
588       PetscCall(PetscMalloc1((dc + oc) / bs, &aux));
589       for (i = 0; i < dc / bs; i++) aux[i] = i + stc / bs;
590       for (i = 0; i < oc / bs; i++) aux[i + dc / bs] = (ismpiaij ? garray[i * bs] / bs : garray[i]);
591       PetscCall(ISCreateBlock(comm, bs, (dc + oc) / bs, aux, PETSC_OWN_POINTER, &is));
592     } else {
593       PetscCall(ISCreateBlock(comm, 1, 0, NULL, PETSC_OWN_POINTER, &is));
594     }
595     PetscCall(ISLocalToGlobalMappingCreateIS(is, l2g));
596     PetscCall(ISDestroy(&is));
597     break;
598   default:
599     SETERRQ(comm, PETSC_ERR_ARG_WRONG, "Unsupported l2g disassembling type %d", mode);
600   }
601   PetscFunctionReturn(PETSC_SUCCESS);
602 }
603 
604 PETSC_INTERN PetscErrorCode MatConvert_XAIJ_IS(Mat A, MatType type, MatReuse reuse, Mat *newmat)
605 {
606   Mat                    lA, Ad, Ao, B = NULL;
607   ISLocalToGlobalMapping rl2g, cl2g;
608   IS                     is;
609   MPI_Comm               comm;
610   void                  *ptrs[2];
611   const char            *names[2] = {"_convert_csr_aux", "_convert_csr_data"};
612   const PetscInt        *garray;
613   PetscScalar           *dd, *od, *aa, *data;
614   const PetscInt        *di, *dj, *oi, *oj;
615   const PetscInt        *odi, *odj, *ooi, *ooj;
616   PetscInt              *aux, *ii, *jj;
617   PetscInt               bs, lc, dr, dc, oc, str, stc, nnz, i, jd, jo, cum;
618   PetscBool              flg, ismpiaij, ismpibaij, was_inplace = PETSC_FALSE;
619   PetscMPIInt            size;
620 
621   PetscFunctionBegin;
622   PetscCall(PetscObjectGetComm((PetscObject)A, &comm));
623   PetscCallMPI(MPI_Comm_size(comm, &size));
624   if (size == 1) {
625     PetscCall(MatConvert_SeqXAIJ_IS(A, type, reuse, newmat));
626     PetscFunctionReturn(PETSC_SUCCESS);
627   }
628   if (reuse != MAT_REUSE_MATRIX && A->cmap->N == A->rmap->N) {
629     PetscCall(MatMPIXAIJComputeLocalToGlobalMapping_Private(A, &rl2g));
630     PetscCall(MatCreate(comm, &B));
631     PetscCall(MatSetType(B, MATIS));
632     PetscCall(MatSetSizes(B, A->rmap->n, A->cmap->n, A->rmap->N, A->cmap->N));
633     PetscCall(MatSetLocalToGlobalMapping(B, rl2g, rl2g));
634     PetscCall(MatGetBlockSize(A, &bs));
635     PetscCall(MatSetBlockSize(B, bs));
636     PetscCall(ISLocalToGlobalMappingDestroy(&rl2g));
637     if (reuse == MAT_INPLACE_MATRIX) was_inplace = PETSC_TRUE;
638     reuse = MAT_REUSE_MATRIX;
639   }
640   if (reuse == MAT_REUSE_MATRIX) {
641     Mat            *newlA, lA;
642     IS              rows, cols;
643     const PetscInt *ridx, *cidx;
644     PetscInt        rbs, cbs, nr, nc;
645 
646     if (!B) B = *newmat;
647     PetscCall(MatISGetLocalToGlobalMapping(B, &rl2g, &cl2g));
648     PetscCall(ISLocalToGlobalMappingGetBlockIndices(rl2g, &ridx));
649     PetscCall(ISLocalToGlobalMappingGetBlockIndices(cl2g, &cidx));
650     PetscCall(ISLocalToGlobalMappingGetSize(rl2g, &nr));
651     PetscCall(ISLocalToGlobalMappingGetSize(cl2g, &nc));
652     PetscCall(ISLocalToGlobalMappingGetBlockSize(rl2g, &rbs));
653     PetscCall(ISLocalToGlobalMappingGetBlockSize(cl2g, &cbs));
654     PetscCall(ISCreateBlock(comm, rbs, nr / rbs, ridx, PETSC_USE_POINTER, &rows));
655     if (rl2g != cl2g) {
656       PetscCall(ISCreateBlock(comm, cbs, nc / cbs, cidx, PETSC_USE_POINTER, &cols));
657     } else {
658       PetscCall(PetscObjectReference((PetscObject)rows));
659       cols = rows;
660     }
661     PetscCall(MatISGetLocalMat(B, &lA));
662     PetscCall(MatCreateSubMatrices(A, 1, &rows, &cols, MAT_INITIAL_MATRIX, &newlA));
663     PetscCall(MatConvert(newlA[0], MATSEQAIJ, MAT_INPLACE_MATRIX, &newlA[0]));
664     PetscCall(ISLocalToGlobalMappingRestoreBlockIndices(rl2g, &ridx));
665     PetscCall(ISLocalToGlobalMappingRestoreBlockIndices(cl2g, &cidx));
666     PetscCall(ISDestroy(&rows));
667     PetscCall(ISDestroy(&cols));
668     if (!lA->preallocated) { /* first time */
669       PetscCall(MatDuplicate(newlA[0], MAT_COPY_VALUES, &lA));
670       PetscCall(MatISSetLocalMat(B, lA));
671       PetscCall(PetscObjectDereference((PetscObject)lA));
672     }
673     PetscCall(MatCopy(newlA[0], lA, SAME_NONZERO_PATTERN));
674     PetscCall(MatDestroySubMatrices(1, &newlA));
675     PetscCall(MatISScaleDisassembling_Private(B));
676     PetscCall(MatAssemblyBegin(B, MAT_FINAL_ASSEMBLY));
677     PetscCall(MatAssemblyEnd(B, MAT_FINAL_ASSEMBLY));
678     if (was_inplace) PetscCall(MatHeaderReplace(A, &B));
679     else *newmat = B;
680     PetscFunctionReturn(PETSC_SUCCESS);
681   }
682   /* rectangular case, just compress out the column space */
683   PetscCall(PetscObjectBaseTypeCompare((PetscObject)A, MATMPIAIJ, &ismpiaij));
684   PetscCall(PetscObjectBaseTypeCompare((PetscObject)A, MATMPIBAIJ, &ismpibaij));
685   if (ismpiaij) {
686     bs = 1;
687     PetscCall(MatMPIAIJGetSeqAIJ(A, &Ad, &Ao, &garray));
688   } else if (ismpibaij) {
689     PetscCall(MatGetBlockSize(A, &bs));
690     PetscCall(MatMPIBAIJGetSeqBAIJ(A, &Ad, &Ao, &garray));
691     PetscCall(MatConvert(Ad, MATSEQAIJ, MAT_INITIAL_MATRIX, &Ad));
692     PetscCall(MatConvert(Ao, MATSEQAIJ, MAT_INITIAL_MATRIX, &Ao));
693   } else SETERRQ(comm, PETSC_ERR_SUP, "Type %s", ((PetscObject)A)->type_name);
694   PetscCall(MatSeqAIJGetArray(Ad, &dd));
695   PetscCall(MatSeqAIJGetArray(Ao, &od));
696   PetscCheck(garray, comm, PETSC_ERR_ARG_WRONGSTATE, "garray not present");
697 
698   /* access relevant information from MPIAIJ */
699   PetscCall(MatGetOwnershipRange(A, &str, NULL));
700   PetscCall(MatGetOwnershipRangeColumn(A, &stc, NULL));
701   PetscCall(MatGetLocalSize(A, &dr, &dc));
702   PetscCall(MatGetLocalSize(Ao, NULL, &oc));
703   PetscCall(MatGetRowIJ(Ad, 0, PETSC_FALSE, PETSC_FALSE, &i, &di, &dj, &flg));
704   PetscCheck(flg, PETSC_COMM_SELF, PETSC_ERR_SUP, "Cannot get IJ structure");
705   PetscCall(MatGetRowIJ(Ao, 0, PETSC_FALSE, PETSC_FALSE, &i, &oi, &oj, &flg));
706   PetscCheck(flg, PETSC_COMM_SELF, PETSC_ERR_SUP, "Cannot get IJ structure");
707   nnz = di[dr] + oi[dr];
708   /* store original pointers to be restored later */
709   odi = di;
710   odj = dj;
711   ooi = oi;
712   ooj = oj;
713 
714   /* generate l2g maps for rows and cols */
715   PetscCall(ISCreateStride(comm, dr / bs, str / bs, 1, &is));
716   if (bs > 1) {
717     IS is2;
718 
719     PetscCall(ISGetLocalSize(is, &i));
720     PetscCall(ISGetIndices(is, (const PetscInt **)&aux));
721     PetscCall(ISCreateBlock(comm, bs, i, aux, PETSC_COPY_VALUES, &is2));
722     PetscCall(ISRestoreIndices(is, (const PetscInt **)&aux));
723     PetscCall(ISDestroy(&is));
724     is = is2;
725   }
726   PetscCall(ISLocalToGlobalMappingCreateIS(is, &rl2g));
727   PetscCall(ISDestroy(&is));
728   if (dr) {
729     PetscCall(PetscMalloc1((dc + oc) / bs, &aux));
730     for (i = 0; i < dc / bs; i++) aux[i] = i + stc / bs;
731     for (i = 0; i < oc / bs; i++) aux[i + dc / bs] = garray[i];
732     PetscCall(ISCreateBlock(comm, bs, (dc + oc) / bs, aux, PETSC_OWN_POINTER, &is));
733     lc = dc + oc;
734   } else {
735     PetscCall(ISCreateBlock(comm, bs, 0, NULL, PETSC_OWN_POINTER, &is));
736     lc = 0;
737   }
738   PetscCall(ISLocalToGlobalMappingCreateIS(is, &cl2g));
739   PetscCall(ISDestroy(&is));
740 
741   /* create MATIS object */
742   PetscCall(MatCreate(comm, &B));
743   PetscCall(MatSetSizes(B, dr, dc, PETSC_DECIDE, PETSC_DECIDE));
744   PetscCall(MatSetType(B, MATIS));
745   PetscCall(MatSetBlockSize(B, bs));
746   PetscCall(MatSetLocalToGlobalMapping(B, rl2g, cl2g));
747   PetscCall(ISLocalToGlobalMappingDestroy(&rl2g));
748   PetscCall(ISLocalToGlobalMappingDestroy(&cl2g));
749 
750   /* merge local matrices */
751   PetscCall(PetscMalloc1(nnz + dr + 1, &aux));
752   PetscCall(PetscMalloc1(nnz, &data));
753   ii  = aux;
754   jj  = aux + dr + 1;
755   aa  = data;
756   *ii = *(di++) + *(oi++);
757   for (jd = 0, jo = 0, cum = 0; *ii < nnz; cum++) {
758     for (; jd < *di; jd++) {
759       *jj++ = *dj++;
760       *aa++ = *dd++;
761     }
762     for (; jo < *oi; jo++) {
763       *jj++ = *oj++ + dc;
764       *aa++ = *od++;
765     }
766     *(++ii) = *(di++) + *(oi++);
767   }
768   for (; cum < dr; cum++) *(++ii) = nnz;
769 
770   PetscCall(MatRestoreRowIJ(Ad, 0, PETSC_FALSE, PETSC_FALSE, &i, &odi, &odj, &flg));
771   PetscCheck(flg, PETSC_COMM_SELF, PETSC_ERR_SUP, "Cannot restore IJ structure");
772   PetscCall(MatRestoreRowIJ(Ao, 0, PETSC_FALSE, PETSC_FALSE, &i, &ooi, &ooj, &flg));
773   PetscCheck(flg, PETSC_COMM_SELF, PETSC_ERR_SUP, "Cannot restore IJ structure");
774   PetscCall(MatSeqAIJRestoreArray(Ad, &dd));
775   PetscCall(MatSeqAIJRestoreArray(Ao, &od));
776 
777   ii = aux;
778   jj = aux + dr + 1;
779   aa = data;
780   PetscCall(MatCreateSeqAIJWithArrays(PETSC_COMM_SELF, dr, lc, ii, jj, aa, &lA));
781 
782   /* create containers to destroy the data */
783   ptrs[0] = aux;
784   ptrs[1] = data;
785   for (i = 0; i < 2; i++) {
786     PetscContainer c;
787 
788     PetscCall(PetscContainerCreate(PETSC_COMM_SELF, &c));
789     PetscCall(PetscContainerSetPointer(c, ptrs[i]));
790     PetscCall(PetscContainerSetUserDestroy(c, PetscContainerUserDestroyDefault));
791     PetscCall(PetscObjectCompose((PetscObject)lA, names[i], (PetscObject)c));
792     PetscCall(PetscContainerDestroy(&c));
793   }
794   if (ismpibaij) { /* destroy converted local matrices */
795     PetscCall(MatDestroy(&Ad));
796     PetscCall(MatDestroy(&Ao));
797   }
798 
799   /* finalize matrix */
800   PetscCall(MatISSetLocalMat(B, lA));
801   PetscCall(MatDestroy(&lA));
802   PetscCall(MatAssemblyBegin(B, MAT_FINAL_ASSEMBLY));
803   PetscCall(MatAssemblyEnd(B, MAT_FINAL_ASSEMBLY));
804   if (reuse == MAT_INPLACE_MATRIX) {
805     PetscCall(MatHeaderReplace(A, &B));
806   } else *newmat = B;
807   PetscFunctionReturn(PETSC_SUCCESS);
808 }
809 
810 PETSC_INTERN PetscErrorCode MatConvert_Nest_IS(Mat A, MatType type, MatReuse reuse, Mat *newmat)
811 {
812   Mat                  **nest, *snest, **rnest, lA, B;
813   IS                    *iscol, *isrow, *islrow, *islcol;
814   ISLocalToGlobalMapping rl2g, cl2g;
815   MPI_Comm               comm;
816   PetscInt              *lr, *lc, *l2gidxs;
817   PetscInt               i, j, nr, nc, rbs, cbs;
818   PetscBool              convert, lreuse, *istrans;
819 
820   PetscFunctionBegin;
821   PetscCall(MatNestGetSubMats(A, &nr, &nc, &nest));
822   lreuse = PETSC_FALSE;
823   rnest  = NULL;
824   if (reuse == MAT_REUSE_MATRIX) {
825     PetscBool ismatis, isnest;
826 
827     PetscCall(PetscObjectTypeCompare((PetscObject)*newmat, MATIS, &ismatis));
828     PetscCheck(ismatis, PetscObjectComm((PetscObject)*newmat), PETSC_ERR_USER, "Cannot reuse matrix of type %s", ((PetscObject)(*newmat))->type_name);
829     PetscCall(MatISGetLocalMat(*newmat, &lA));
830     PetscCall(PetscObjectTypeCompare((PetscObject)lA, MATNEST, &isnest));
831     if (isnest) {
832       PetscCall(MatNestGetSubMats(lA, &i, &j, &rnest));
833       lreuse = (PetscBool)(i == nr && j == nc);
834       if (!lreuse) rnest = NULL;
835     }
836   }
837   PetscCall(PetscObjectGetComm((PetscObject)A, &comm));
838   PetscCall(PetscCalloc2(nr, &lr, nc, &lc));
839   PetscCall(PetscCalloc6(nr, &isrow, nc, &iscol, nr, &islrow, nc, &islcol, nr * nc, &snest, nr * nc, &istrans));
840   PetscCall(MatNestGetISs(A, isrow, iscol));
841   for (i = 0; i < nr; i++) {
842     for (j = 0; j < nc; j++) {
843       PetscBool ismatis;
844       PetscInt  l1, l2, lb1, lb2, ij = i * nc + j;
845 
846       /* Null matrix pointers are allowed in MATNEST */
847       if (!nest[i][j]) continue;
848 
849       /* Nested matrices should be of type MATIS */
850       PetscCall(PetscObjectTypeCompare((PetscObject)nest[i][j], MATTRANSPOSEVIRTUAL, &istrans[ij]));
851       if (istrans[ij]) {
852         Mat T, lT;
853         PetscCall(MatTransposeGetMat(nest[i][j], &T));
854         PetscCall(PetscObjectTypeCompare((PetscObject)T, MATIS, &ismatis));
855         PetscCheck(ismatis, comm, PETSC_ERR_SUP, "Cannot convert from MATNEST to MATIS! Matrix block (%" PetscInt_FMT ",%" PetscInt_FMT ") (transposed) is not of type MATIS", i, j);
856         PetscCall(MatISGetLocalMat(T, &lT));
857         PetscCall(MatCreateTranspose(lT, &snest[ij]));
858       } else {
859         PetscCall(PetscObjectTypeCompare((PetscObject)nest[i][j], MATIS, &ismatis));
860         PetscCheck(ismatis, comm, PETSC_ERR_SUP, "Cannot convert from MATNEST to MATIS! Matrix block (%" PetscInt_FMT ",%" PetscInt_FMT ") is not of type MATIS", i, j);
861         PetscCall(MatISGetLocalMat(nest[i][j], &snest[ij]));
862       }
863 
864       /* Check compatibility of local sizes */
865       PetscCall(MatGetSize(snest[ij], &l1, &l2));
866       PetscCall(MatGetBlockSizes(snest[ij], &lb1, &lb2));
867       if (!l1 || !l2) continue;
868       PetscCheck(!lr[i] || l1 == lr[i], PETSC_COMM_SELF, PETSC_ERR_SUP, "Cannot convert from MATNEST to MATIS! Matrix block (%" PetscInt_FMT ",%" PetscInt_FMT ") has invalid local size %" PetscInt_FMT " != %" PetscInt_FMT, i, j, lr[i], l1);
869       PetscCheck(!lc[j] || l2 == lc[j], PETSC_COMM_SELF, PETSC_ERR_SUP, "Cannot convert from MATNEST to MATIS! Matrix block (%" PetscInt_FMT ",%" PetscInt_FMT ") has invalid local size %" PetscInt_FMT " != %" PetscInt_FMT, i, j, lc[j], l2);
870       lr[i] = l1;
871       lc[j] = l2;
872 
873       /* check compatibility for local matrix reusage */
874       if (rnest && !rnest[i][j] != !snest[ij]) lreuse = PETSC_FALSE;
875     }
876   }
877 
878   if (PetscDefined(USE_DEBUG)) {
879     /* Check compatibility of l2g maps for rows */
880     for (i = 0; i < nr; i++) {
881       rl2g = NULL;
882       for (j = 0; j < nc; j++) {
883         PetscInt n1, n2;
884 
885         if (!nest[i][j]) continue;
886         if (istrans[i * nc + j]) {
887           Mat T;
888 
889           PetscCall(MatTransposeGetMat(nest[i][j], &T));
890           PetscCall(MatISGetLocalToGlobalMapping(T, NULL, &cl2g));
891         } else {
892           PetscCall(MatISGetLocalToGlobalMapping(nest[i][j], &cl2g, NULL));
893         }
894         PetscCall(ISLocalToGlobalMappingGetSize(cl2g, &n1));
895         if (!n1) continue;
896         if (!rl2g) {
897           rl2g = cl2g;
898         } else {
899           const PetscInt *idxs1, *idxs2;
900           PetscBool       same;
901 
902           PetscCall(ISLocalToGlobalMappingGetSize(rl2g, &n2));
903           PetscCheck(n1 == n2, PETSC_COMM_SELF, PETSC_ERR_SUP, "Cannot convert from MATNEST to MATIS! Matrix block (%" PetscInt_FMT ",%" PetscInt_FMT ") has invalid row l2gmap size %" PetscInt_FMT " != %" PetscInt_FMT, i, j, n1, n2);
904           PetscCall(ISLocalToGlobalMappingGetIndices(cl2g, &idxs1));
905           PetscCall(ISLocalToGlobalMappingGetIndices(rl2g, &idxs2));
906           PetscCall(PetscArraycmp(idxs1, idxs2, n1, &same));
907           PetscCall(ISLocalToGlobalMappingRestoreIndices(cl2g, &idxs1));
908           PetscCall(ISLocalToGlobalMappingRestoreIndices(rl2g, &idxs2));
909           PetscCheck(same, PETSC_COMM_SELF, PETSC_ERR_SUP, "Cannot convert from MATNEST to MATIS! Matrix block (%" PetscInt_FMT ",%" PetscInt_FMT ") has invalid row l2gmap", i, j);
910         }
911       }
912     }
913     /* Check compatibility of l2g maps for columns */
914     for (i = 0; i < nc; i++) {
915       rl2g = NULL;
916       for (j = 0; j < nr; j++) {
917         PetscInt n1, n2;
918 
919         if (!nest[j][i]) continue;
920         if (istrans[j * nc + i]) {
921           Mat T;
922 
923           PetscCall(MatTransposeGetMat(nest[j][i], &T));
924           PetscCall(MatISGetLocalToGlobalMapping(T, &cl2g, NULL));
925         } else {
926           PetscCall(MatISGetLocalToGlobalMapping(nest[j][i], NULL, &cl2g));
927         }
928         PetscCall(ISLocalToGlobalMappingGetSize(cl2g, &n1));
929         if (!n1) continue;
930         if (!rl2g) {
931           rl2g = cl2g;
932         } else {
933           const PetscInt *idxs1, *idxs2;
934           PetscBool       same;
935 
936           PetscCall(ISLocalToGlobalMappingGetSize(rl2g, &n2));
937           PetscCheck(n1 == n2, PETSC_COMM_SELF, PETSC_ERR_SUP, "Cannot convert from MATNEST to MATIS! Matrix block (%" PetscInt_FMT ",%" PetscInt_FMT ") has invalid column l2gmap size %" PetscInt_FMT " != %" PetscInt_FMT, j, i, n1, n2);
938           PetscCall(ISLocalToGlobalMappingGetIndices(cl2g, &idxs1));
939           PetscCall(ISLocalToGlobalMappingGetIndices(rl2g, &idxs2));
940           PetscCall(PetscArraycmp(idxs1, idxs2, n1, &same));
941           PetscCall(ISLocalToGlobalMappingRestoreIndices(cl2g, &idxs1));
942           PetscCall(ISLocalToGlobalMappingRestoreIndices(rl2g, &idxs2));
943           PetscCheck(same, PETSC_COMM_SELF, PETSC_ERR_SUP, "Cannot convert from MATNEST to MATIS! Matrix block (%" PetscInt_FMT ",%" PetscInt_FMT ") has invalid column l2gmap", j, i);
944         }
945       }
946     }
947   }
948 
949   B = NULL;
950   if (reuse != MAT_REUSE_MATRIX) {
951     PetscInt stl;
952 
953     /* Create l2g map for the rows of the new matrix and index sets for the local MATNEST */
954     for (i = 0, stl = 0; i < nr; i++) stl += lr[i];
955     PetscCall(PetscMalloc1(stl, &l2gidxs));
956     for (i = 0, stl = 0; i < nr; i++) {
957       Mat             usedmat;
958       Mat_IS         *matis;
959       const PetscInt *idxs;
960 
961       /* local IS for local NEST */
962       PetscCall(ISCreateStride(PETSC_COMM_SELF, lr[i], stl, 1, &islrow[i]));
963 
964       /* l2gmap */
965       j       = 0;
966       usedmat = nest[i][j];
967       while (!usedmat && j < nc - 1) usedmat = nest[i][++j];
968       PetscCheck(usedmat, comm, PETSC_ERR_SUP, "Cannot find valid row mat");
969 
970       if (istrans[i * nc + j]) {
971         Mat T;
972         PetscCall(MatTransposeGetMat(usedmat, &T));
973         usedmat = T;
974       }
975       matis = (Mat_IS *)(usedmat->data);
976       PetscCall(ISGetIndices(isrow[i], &idxs));
977       if (istrans[i * nc + j]) {
978         PetscCall(PetscSFBcastBegin(matis->csf, MPIU_INT, idxs, l2gidxs + stl, MPI_REPLACE));
979         PetscCall(PetscSFBcastEnd(matis->csf, MPIU_INT, idxs, l2gidxs + stl, MPI_REPLACE));
980       } else {
981         PetscCall(PetscSFBcastBegin(matis->sf, MPIU_INT, idxs, l2gidxs + stl, MPI_REPLACE));
982         PetscCall(PetscSFBcastEnd(matis->sf, MPIU_INT, idxs, l2gidxs + stl, MPI_REPLACE));
983       }
984       PetscCall(ISRestoreIndices(isrow[i], &idxs));
985       stl += lr[i];
986     }
987     PetscCall(ISLocalToGlobalMappingCreate(comm, 1, stl, l2gidxs, PETSC_OWN_POINTER, &rl2g));
988 
989     /* Create l2g map for columns of the new matrix and index sets for the local MATNEST */
990     for (i = 0, stl = 0; i < nc; i++) stl += lc[i];
991     PetscCall(PetscMalloc1(stl, &l2gidxs));
992     for (i = 0, stl = 0; i < nc; i++) {
993       Mat             usedmat;
994       Mat_IS         *matis;
995       const PetscInt *idxs;
996 
997       /* local IS for local NEST */
998       PetscCall(ISCreateStride(PETSC_COMM_SELF, lc[i], stl, 1, &islcol[i]));
999 
1000       /* l2gmap */
1001       j       = 0;
1002       usedmat = nest[j][i];
1003       while (!usedmat && j < nr - 1) usedmat = nest[++j][i];
1004       PetscCheck(usedmat, comm, PETSC_ERR_SUP, "Cannot find valid column mat");
1005       if (istrans[j * nc + i]) {
1006         Mat T;
1007         PetscCall(MatTransposeGetMat(usedmat, &T));
1008         usedmat = T;
1009       }
1010       matis = (Mat_IS *)(usedmat->data);
1011       PetscCall(ISGetIndices(iscol[i], &idxs));
1012       if (istrans[j * nc + i]) {
1013         PetscCall(PetscSFBcastBegin(matis->sf, MPIU_INT, idxs, l2gidxs + stl, MPI_REPLACE));
1014         PetscCall(PetscSFBcastEnd(matis->sf, MPIU_INT, idxs, l2gidxs + stl, MPI_REPLACE));
1015       } else {
1016         PetscCall(PetscSFBcastBegin(matis->csf, MPIU_INT, idxs, l2gidxs + stl, MPI_REPLACE));
1017         PetscCall(PetscSFBcastEnd(matis->csf, MPIU_INT, idxs, l2gidxs + stl, MPI_REPLACE));
1018       }
1019       PetscCall(ISRestoreIndices(iscol[i], &idxs));
1020       stl += lc[i];
1021     }
1022     PetscCall(ISLocalToGlobalMappingCreate(comm, 1, stl, l2gidxs, PETSC_OWN_POINTER, &cl2g));
1023 
1024     /* Create MATIS */
1025     PetscCall(MatCreate(comm, &B));
1026     PetscCall(MatSetSizes(B, A->rmap->n, A->cmap->n, A->rmap->N, A->cmap->N));
1027     PetscCall(MatGetBlockSizes(A, &rbs, &cbs));
1028     PetscCall(MatSetBlockSizes(B, rbs, cbs));
1029     PetscCall(MatSetType(B, MATIS));
1030     PetscCall(MatISSetLocalMatType(B, MATNEST));
1031     { /* hack : avoid setup of scatters */
1032       Mat_IS *matis     = (Mat_IS *)(B->data);
1033       matis->islocalref = PETSC_TRUE;
1034     }
1035     PetscCall(MatSetLocalToGlobalMapping(B, rl2g, cl2g));
1036     PetscCall(ISLocalToGlobalMappingDestroy(&rl2g));
1037     PetscCall(ISLocalToGlobalMappingDestroy(&cl2g));
1038     PetscCall(MatCreateNest(PETSC_COMM_SELF, nr, islrow, nc, islcol, snest, &lA));
1039     PetscCall(MatNestSetVecType(lA, VECNEST));
1040     for (i = 0; i < nr * nc; i++) {
1041       if (istrans[i]) PetscCall(MatDestroy(&snest[i]));
1042     }
1043     PetscCall(MatISSetLocalMat(B, lA));
1044     PetscCall(MatDestroy(&lA));
1045     { /* hack : setup of scatters done here */
1046       Mat_IS *matis = (Mat_IS *)(B->data);
1047 
1048       matis->islocalref = PETSC_FALSE;
1049       PetscCall(MatISSetUpScatters_Private(B));
1050     }
1051     PetscCall(MatAssemblyBegin(B, MAT_FINAL_ASSEMBLY));
1052     PetscCall(MatAssemblyEnd(B, MAT_FINAL_ASSEMBLY));
1053     if (reuse == MAT_INPLACE_MATRIX) {
1054       PetscCall(MatHeaderReplace(A, &B));
1055     } else {
1056       *newmat = B;
1057     }
1058   } else {
1059     if (lreuse) {
1060       PetscCall(MatISGetLocalMat(*newmat, &lA));
1061       for (i = 0; i < nr; i++) {
1062         for (j = 0; j < nc; j++) {
1063           if (snest[i * nc + j]) {
1064             PetscCall(MatNestSetSubMat(lA, i, j, snest[i * nc + j]));
1065             if (istrans[i * nc + j]) PetscCall(MatDestroy(&snest[i * nc + j]));
1066           }
1067         }
1068       }
1069     } else {
1070       PetscInt stl;
1071       for (i = 0, stl = 0; i < nr; i++) {
1072         PetscCall(ISCreateStride(PETSC_COMM_SELF, lr[i], stl, 1, &islrow[i]));
1073         stl += lr[i];
1074       }
1075       for (i = 0, stl = 0; i < nc; i++) {
1076         PetscCall(ISCreateStride(PETSC_COMM_SELF, lc[i], stl, 1, &islcol[i]));
1077         stl += lc[i];
1078       }
1079       PetscCall(MatCreateNest(PETSC_COMM_SELF, nr, islrow, nc, islcol, snest, &lA));
1080       for (i = 0; i < nr * nc; i++) {
1081         if (istrans[i]) PetscCall(MatDestroy(&snest[i]));
1082       }
1083       PetscCall(MatISSetLocalMat(*newmat, lA));
1084       PetscCall(MatDestroy(&lA));
1085     }
1086     PetscCall(MatAssemblyBegin(*newmat, MAT_FINAL_ASSEMBLY));
1087     PetscCall(MatAssemblyEnd(*newmat, MAT_FINAL_ASSEMBLY));
1088   }
1089 
1090   /* Create local matrix in MATNEST format */
1091   convert = PETSC_FALSE;
1092   PetscCall(PetscOptionsGetBool(NULL, ((PetscObject)A)->prefix, "-matis_convert_local_nest", &convert, NULL));
1093   if (convert) {
1094     Mat              M;
1095     MatISLocalFields lf;
1096     PetscContainer   c;
1097 
1098     PetscCall(MatISGetLocalMat(*newmat, &lA));
1099     PetscCall(MatConvert(lA, MATAIJ, MAT_INITIAL_MATRIX, &M));
1100     PetscCall(MatISSetLocalMat(*newmat, M));
1101     PetscCall(MatDestroy(&M));
1102 
1103     /* attach local fields to the matrix */
1104     PetscCall(PetscNew(&lf));
1105     PetscCall(PetscMalloc2(nr, &lf->rf, nc, &lf->cf));
1106     for (i = 0; i < nr; i++) {
1107       PetscInt n, st;
1108 
1109       PetscCall(ISGetLocalSize(islrow[i], &n));
1110       PetscCall(ISStrideGetInfo(islrow[i], &st, NULL));
1111       PetscCall(ISCreateStride(comm, n, st, 1, &lf->rf[i]));
1112     }
1113     for (i = 0; i < nc; i++) {
1114       PetscInt n, st;
1115 
1116       PetscCall(ISGetLocalSize(islcol[i], &n));
1117       PetscCall(ISStrideGetInfo(islcol[i], &st, NULL));
1118       PetscCall(ISCreateStride(comm, n, st, 1, &lf->cf[i]));
1119     }
1120     lf->nr = nr;
1121     lf->nc = nc;
1122     PetscCall(PetscContainerCreate(PetscObjectComm((PetscObject)(*newmat)), &c));
1123     PetscCall(PetscContainerSetPointer(c, lf));
1124     PetscCall(PetscContainerSetUserDestroy(c, MatISContainerDestroyFields_Private));
1125     PetscCall(PetscObjectCompose((PetscObject)(*newmat), "_convert_nest_lfields", (PetscObject)c));
1126     PetscCall(PetscContainerDestroy(&c));
1127   }
1128 
1129   /* Free workspace */
1130   for (i = 0; i < nr; i++) PetscCall(ISDestroy(&islrow[i]));
1131   for (i = 0; i < nc; i++) PetscCall(ISDestroy(&islcol[i]));
1132   PetscCall(PetscFree6(isrow, iscol, islrow, islcol, snest, istrans));
1133   PetscCall(PetscFree2(lr, lc));
1134   PetscFunctionReturn(PETSC_SUCCESS);
1135 }
1136 
1137 static PetscErrorCode MatDiagonalScale_IS(Mat A, Vec l, Vec r)
1138 {
1139   Mat_IS            *matis = (Mat_IS *)A->data;
1140   Vec                ll, rr;
1141   const PetscScalar *Y, *X;
1142   PetscScalar       *x, *y;
1143 
1144   PetscFunctionBegin;
1145   if (l) {
1146     ll = matis->y;
1147     PetscCall(VecGetArrayRead(l, &Y));
1148     PetscCall(VecGetArray(ll, &y));
1149     PetscCall(PetscSFBcastBegin(matis->sf, MPIU_SCALAR, Y, y, MPI_REPLACE));
1150   } else {
1151     ll = NULL;
1152   }
1153   if (r) {
1154     rr = matis->x;
1155     PetscCall(VecGetArrayRead(r, &X));
1156     PetscCall(VecGetArray(rr, &x));
1157     PetscCall(PetscSFBcastBegin(matis->csf, MPIU_SCALAR, X, x, MPI_REPLACE));
1158   } else {
1159     rr = NULL;
1160   }
1161   if (ll) {
1162     PetscCall(PetscSFBcastEnd(matis->sf, MPIU_SCALAR, Y, y, MPI_REPLACE));
1163     PetscCall(VecRestoreArrayRead(l, &Y));
1164     PetscCall(VecRestoreArray(ll, &y));
1165   }
1166   if (rr) {
1167     PetscCall(PetscSFBcastEnd(matis->csf, MPIU_SCALAR, X, x, MPI_REPLACE));
1168     PetscCall(VecRestoreArrayRead(r, &X));
1169     PetscCall(VecRestoreArray(rr, &x));
1170   }
1171   PetscCall(MatDiagonalScale(matis->A, ll, rr));
1172   PetscFunctionReturn(PETSC_SUCCESS);
1173 }
1174 
1175 static PetscErrorCode MatGetInfo_IS(Mat A, MatInfoType flag, MatInfo *ginfo)
1176 {
1177   Mat_IS        *matis = (Mat_IS *)A->data;
1178   MatInfo        info;
1179   PetscLogDouble isend[6], irecv[6];
1180   PetscInt       bs;
1181 
1182   PetscFunctionBegin;
1183   PetscCall(MatGetBlockSize(A, &bs));
1184   if (matis->A->ops->getinfo) {
1185     PetscCall(MatGetInfo(matis->A, MAT_LOCAL, &info));
1186     isend[0] = info.nz_used;
1187     isend[1] = info.nz_allocated;
1188     isend[2] = info.nz_unneeded;
1189     isend[3] = info.memory;
1190     isend[4] = info.mallocs;
1191   } else {
1192     isend[0] = 0.;
1193     isend[1] = 0.;
1194     isend[2] = 0.;
1195     isend[3] = 0.;
1196     isend[4] = 0.;
1197   }
1198   isend[5] = matis->A->num_ass;
1199   if (flag == MAT_LOCAL) {
1200     ginfo->nz_used      = isend[0];
1201     ginfo->nz_allocated = isend[1];
1202     ginfo->nz_unneeded  = isend[2];
1203     ginfo->memory       = isend[3];
1204     ginfo->mallocs      = isend[4];
1205     ginfo->assemblies   = isend[5];
1206   } else if (flag == MAT_GLOBAL_MAX) {
1207     PetscCall(MPIU_Allreduce(isend, irecv, 6, MPIU_PETSCLOGDOUBLE, MPI_MAX, PetscObjectComm((PetscObject)A)));
1208 
1209     ginfo->nz_used      = irecv[0];
1210     ginfo->nz_allocated = irecv[1];
1211     ginfo->nz_unneeded  = irecv[2];
1212     ginfo->memory       = irecv[3];
1213     ginfo->mallocs      = irecv[4];
1214     ginfo->assemblies   = irecv[5];
1215   } else if (flag == MAT_GLOBAL_SUM) {
1216     PetscCall(MPIU_Allreduce(isend, irecv, 5, MPIU_PETSCLOGDOUBLE, MPI_SUM, PetscObjectComm((PetscObject)A)));
1217 
1218     ginfo->nz_used      = irecv[0];
1219     ginfo->nz_allocated = irecv[1];
1220     ginfo->nz_unneeded  = irecv[2];
1221     ginfo->memory       = irecv[3];
1222     ginfo->mallocs      = irecv[4];
1223     ginfo->assemblies   = A->num_ass;
1224   }
1225   ginfo->block_size        = bs;
1226   ginfo->fill_ratio_given  = 0;
1227   ginfo->fill_ratio_needed = 0;
1228   ginfo->factor_mallocs    = 0;
1229   PetscFunctionReturn(PETSC_SUCCESS);
1230 }
1231 
1232 static PetscErrorCode MatTranspose_IS(Mat A, MatReuse reuse, Mat *B)
1233 {
1234   Mat C, lC, lA;
1235 
1236   PetscFunctionBegin;
1237   if (reuse == MAT_REUSE_MATRIX) PetscCall(MatTransposeCheckNonzeroState_Private(A, *B));
1238   if (reuse == MAT_INITIAL_MATRIX || reuse == MAT_INPLACE_MATRIX) {
1239     ISLocalToGlobalMapping rl2g, cl2g;
1240     PetscCall(MatCreate(PetscObjectComm((PetscObject)A), &C));
1241     PetscCall(MatSetSizes(C, A->cmap->n, A->rmap->n, A->cmap->N, A->rmap->N));
1242     PetscCall(MatSetBlockSizes(C, PetscAbs(A->cmap->bs), PetscAbs(A->rmap->bs)));
1243     PetscCall(MatSetType(C, MATIS));
1244     PetscCall(MatGetLocalToGlobalMapping(A, &rl2g, &cl2g));
1245     PetscCall(MatSetLocalToGlobalMapping(C, cl2g, rl2g));
1246   } else C = *B;
1247 
1248   /* perform local transposition */
1249   PetscCall(MatISGetLocalMat(A, &lA));
1250   PetscCall(MatTranspose(lA, MAT_INITIAL_MATRIX, &lC));
1251   PetscCall(MatSetLocalToGlobalMapping(lC, lA->cmap->mapping, lA->rmap->mapping));
1252   PetscCall(MatISSetLocalMat(C, lC));
1253   PetscCall(MatDestroy(&lC));
1254 
1255   if (reuse == MAT_INITIAL_MATRIX || reuse == MAT_REUSE_MATRIX) {
1256     *B = C;
1257   } else {
1258     PetscCall(MatHeaderMerge(A, &C));
1259   }
1260   PetscCall(MatAssemblyBegin(*B, MAT_FINAL_ASSEMBLY));
1261   PetscCall(MatAssemblyEnd(*B, MAT_FINAL_ASSEMBLY));
1262   PetscFunctionReturn(PETSC_SUCCESS);
1263 }
1264 
1265 static PetscErrorCode MatDiagonalSet_IS(Mat A, Vec D, InsertMode insmode)
1266 {
1267   Mat_IS *is = (Mat_IS *)A->data;
1268 
1269   PetscFunctionBegin;
1270   if (D) { /* MatShift_IS pass D = NULL */
1271     PetscCall(VecScatterBegin(is->rctx, D, is->y, INSERT_VALUES, SCATTER_FORWARD));
1272     PetscCall(VecScatterEnd(is->rctx, D, is->y, INSERT_VALUES, SCATTER_FORWARD));
1273   }
1274   PetscCall(VecPointwiseDivide(is->y, is->y, is->counter));
1275   PetscCall(MatDiagonalSet(is->A, is->y, insmode));
1276   PetscFunctionReturn(PETSC_SUCCESS);
1277 }
1278 
1279 static PetscErrorCode MatShift_IS(Mat A, PetscScalar a)
1280 {
1281   Mat_IS *is = (Mat_IS *)A->data;
1282 
1283   PetscFunctionBegin;
1284   PetscCall(VecSet(is->y, a));
1285   PetscCall(MatDiagonalSet_IS(A, NULL, ADD_VALUES));
1286   PetscFunctionReturn(PETSC_SUCCESS);
1287 }
1288 
1289 static PetscErrorCode MatSetValuesLocal_SubMat_IS(Mat A, PetscInt m, const PetscInt *rows, PetscInt n, const PetscInt *cols, const PetscScalar *values, InsertMode addv)
1290 {
1291   PetscInt rows_l[MATIS_MAX_ENTRIES_INSERTION], cols_l[MATIS_MAX_ENTRIES_INSERTION];
1292 
1293   PetscFunctionBegin;
1294   PetscCheck(m <= MATIS_MAX_ENTRIES_INSERTION && n <= MATIS_MAX_ENTRIES_INSERTION, PETSC_COMM_SELF, PETSC_ERR_SUP, "Number of row/column indices must be <= %d: they are %" PetscInt_FMT " %" PetscInt_FMT, MATIS_MAX_ENTRIES_INSERTION, m, n);
1295   PetscCall(ISLocalToGlobalMappingApply(A->rmap->mapping, m, rows, rows_l));
1296   PetscCall(ISLocalToGlobalMappingApply(A->cmap->mapping, n, cols, cols_l));
1297   PetscCall(MatSetValuesLocal_IS(A, m, rows_l, n, cols_l, values, addv));
1298   PetscFunctionReturn(PETSC_SUCCESS);
1299 }
1300 
1301 static PetscErrorCode MatSetValuesBlockedLocal_SubMat_IS(Mat A, PetscInt m, const PetscInt *rows, PetscInt n, const PetscInt *cols, const PetscScalar *values, InsertMode addv)
1302 {
1303   PetscInt rows_l[MATIS_MAX_ENTRIES_INSERTION], cols_l[MATIS_MAX_ENTRIES_INSERTION];
1304 
1305   PetscFunctionBegin;
1306   PetscCheck(m <= MATIS_MAX_ENTRIES_INSERTION && n <= MATIS_MAX_ENTRIES_INSERTION, PETSC_COMM_SELF, PETSC_ERR_SUP, "Number of row/column block indices must be <= %d: they are %" PetscInt_FMT " %" PetscInt_FMT, MATIS_MAX_ENTRIES_INSERTION, m, n);
1307   PetscCall(ISLocalToGlobalMappingApplyBlock(A->rmap->mapping, m, rows, rows_l));
1308   PetscCall(ISLocalToGlobalMappingApplyBlock(A->cmap->mapping, n, cols, cols_l));
1309   PetscCall(MatSetValuesBlockedLocal_IS(A, m, rows_l, n, cols_l, values, addv));
1310   PetscFunctionReturn(PETSC_SUCCESS);
1311 }
1312 
1313 static PetscErrorCode MatCreateSubMatrix_IS(Mat mat, IS irow, IS icol, MatReuse scall, Mat *newmat)
1314 {
1315   Mat             locmat, newlocmat;
1316   Mat_IS         *newmatis;
1317   const PetscInt *idxs;
1318   PetscInt        i, m, n;
1319 
1320   PetscFunctionBegin;
1321   if (scall == MAT_REUSE_MATRIX) {
1322     PetscBool ismatis;
1323 
1324     PetscCall(PetscObjectTypeCompare((PetscObject)*newmat, MATIS, &ismatis));
1325     PetscCheck(ismatis, PetscObjectComm((PetscObject)*newmat), PETSC_ERR_ARG_WRONG, "Cannot reuse matrix! Not of MATIS type");
1326     newmatis = (Mat_IS *)(*newmat)->data;
1327     PetscCheck(newmatis->getsub_ris, PetscObjectComm((PetscObject)*newmat), PETSC_ERR_ARG_WRONG, "Cannot reuse matrix! Misses local row IS");
1328     PetscCheck(newmatis->getsub_cis, PetscObjectComm((PetscObject)*newmat), PETSC_ERR_ARG_WRONG, "Cannot reuse matrix! Misses local col IS");
1329   }
1330   /* irow and icol may not have duplicate entries */
1331   if (PetscDefined(USE_DEBUG)) {
1332     Vec                rtest, ltest;
1333     const PetscScalar *array;
1334 
1335     PetscCall(MatCreateVecs(mat, &ltest, &rtest));
1336     PetscCall(ISGetLocalSize(irow, &n));
1337     PetscCall(ISGetIndices(irow, &idxs));
1338     for (i = 0; i < n; i++) PetscCall(VecSetValue(rtest, idxs[i], 1.0, ADD_VALUES));
1339     PetscCall(VecAssemblyBegin(rtest));
1340     PetscCall(VecAssemblyEnd(rtest));
1341     PetscCall(VecGetLocalSize(rtest, &n));
1342     PetscCall(VecGetOwnershipRange(rtest, &m, NULL));
1343     PetscCall(VecGetArrayRead(rtest, &array));
1344     for (i = 0; i < n; i++) PetscCheck(array[i] == 0. || array[i] == 1., PETSC_COMM_SELF, PETSC_ERR_SUP, "Index %" PetscInt_FMT " counted %" PetscInt_FMT " times! Irow may not have duplicate entries", i + m, (PetscInt)PetscRealPart(array[i]));
1345     PetscCall(VecRestoreArrayRead(rtest, &array));
1346     PetscCall(ISRestoreIndices(irow, &idxs));
1347     PetscCall(ISGetLocalSize(icol, &n));
1348     PetscCall(ISGetIndices(icol, &idxs));
1349     for (i = 0; i < n; i++) PetscCall(VecSetValue(ltest, idxs[i], 1.0, ADD_VALUES));
1350     PetscCall(VecAssemblyBegin(ltest));
1351     PetscCall(VecAssemblyEnd(ltest));
1352     PetscCall(VecGetLocalSize(ltest, &n));
1353     PetscCall(VecGetOwnershipRange(ltest, &m, NULL));
1354     PetscCall(VecGetArrayRead(ltest, &array));
1355     for (i = 0; i < n; i++) PetscCheck(array[i] == 0. || array[i] == 1., PETSC_COMM_SELF, PETSC_ERR_SUP, "Index %" PetscInt_FMT " counted %" PetscInt_FMT " times! Icol may not have duplicate entries", i + m, (PetscInt)PetscRealPart(array[i]));
1356     PetscCall(VecRestoreArrayRead(ltest, &array));
1357     PetscCall(ISRestoreIndices(icol, &idxs));
1358     PetscCall(VecDestroy(&rtest));
1359     PetscCall(VecDestroy(&ltest));
1360   }
1361   if (scall == MAT_INITIAL_MATRIX) {
1362     Mat_IS                *matis = (Mat_IS *)mat->data;
1363     ISLocalToGlobalMapping rl2g;
1364     IS                     is;
1365     PetscInt              *lidxs, *lgidxs, *newgidxs;
1366     PetscInt               ll, newloc, irbs, icbs, arbs, acbs, rbs, cbs;
1367     PetscBool              cong;
1368     MPI_Comm               comm;
1369 
1370     PetscCall(PetscObjectGetComm((PetscObject)mat, &comm));
1371     PetscCall(MatGetBlockSizes(mat, &arbs, &acbs));
1372     PetscCall(ISGetBlockSize(irow, &irbs));
1373     PetscCall(ISGetBlockSize(icol, &icbs));
1374     rbs = arbs == irbs ? irbs : 1;
1375     cbs = acbs == icbs ? icbs : 1;
1376     PetscCall(ISGetLocalSize(irow, &m));
1377     PetscCall(ISGetLocalSize(icol, &n));
1378     PetscCall(MatCreate(comm, newmat));
1379     PetscCall(MatSetType(*newmat, MATIS));
1380     PetscCall(MatSetSizes(*newmat, m, n, PETSC_DECIDE, PETSC_DECIDE));
1381     PetscCall(MatSetBlockSizes(*newmat, rbs, cbs));
1382     /* communicate irow to their owners in the layout */
1383     PetscCall(ISGetIndices(irow, &idxs));
1384     PetscCall(PetscLayoutMapLocal(mat->rmap, m, idxs, &ll, &lidxs, &lgidxs));
1385     PetscCall(ISRestoreIndices(irow, &idxs));
1386     PetscCall(PetscArrayzero(matis->sf_rootdata, matis->sf->nroots));
1387     for (i = 0; i < ll; i++) matis->sf_rootdata[lidxs[i]] = lgidxs[i] + 1;
1388     PetscCall(PetscFree(lidxs));
1389     PetscCall(PetscFree(lgidxs));
1390     PetscCall(PetscSFBcastBegin(matis->sf, MPIU_INT, matis->sf_rootdata, matis->sf_leafdata, MPI_REPLACE));
1391     PetscCall(PetscSFBcastEnd(matis->sf, MPIU_INT, matis->sf_rootdata, matis->sf_leafdata, MPI_REPLACE));
1392     for (i = 0, newloc = 0; i < matis->sf->nleaves; i++)
1393       if (matis->sf_leafdata[i]) newloc++;
1394     PetscCall(PetscMalloc1(newloc, &newgidxs));
1395     PetscCall(PetscMalloc1(newloc, &lidxs));
1396     for (i = 0, newloc = 0; i < matis->sf->nleaves; i++)
1397       if (matis->sf_leafdata[i]) {
1398         lidxs[newloc]      = i;
1399         newgidxs[newloc++] = matis->sf_leafdata[i] - 1;
1400       }
1401     PetscCall(ISCreateGeneral(comm, newloc, newgidxs, PETSC_OWN_POINTER, &is));
1402     PetscCall(ISLocalToGlobalMappingCreateIS(is, &rl2g));
1403     PetscCall(ISLocalToGlobalMappingSetBlockSize(rl2g, rbs));
1404     PetscCall(ISDestroy(&is));
1405     /* local is to extract local submatrix */
1406     newmatis = (Mat_IS *)(*newmat)->data;
1407     PetscCall(ISCreateGeneral(comm, newloc, lidxs, PETSC_OWN_POINTER, &newmatis->getsub_ris));
1408     PetscCall(MatHasCongruentLayouts(mat, &cong));
1409     if (cong && irow == icol && matis->csf == matis->sf) {
1410       PetscCall(MatSetLocalToGlobalMapping(*newmat, rl2g, rl2g));
1411       PetscCall(PetscObjectReference((PetscObject)newmatis->getsub_ris));
1412       newmatis->getsub_cis = newmatis->getsub_ris;
1413     } else {
1414       ISLocalToGlobalMapping cl2g;
1415 
1416       /* communicate icol to their owners in the layout */
1417       PetscCall(ISGetIndices(icol, &idxs));
1418       PetscCall(PetscLayoutMapLocal(mat->cmap, n, idxs, &ll, &lidxs, &lgidxs));
1419       PetscCall(ISRestoreIndices(icol, &idxs));
1420       PetscCall(PetscArrayzero(matis->csf_rootdata, matis->csf->nroots));
1421       for (i = 0; i < ll; i++) matis->csf_rootdata[lidxs[i]] = lgidxs[i] + 1;
1422       PetscCall(PetscFree(lidxs));
1423       PetscCall(PetscFree(lgidxs));
1424       PetscCall(PetscSFBcastBegin(matis->csf, MPIU_INT, matis->csf_rootdata, matis->csf_leafdata, MPI_REPLACE));
1425       PetscCall(PetscSFBcastEnd(matis->csf, MPIU_INT, matis->csf_rootdata, matis->csf_leafdata, MPI_REPLACE));
1426       for (i = 0, newloc = 0; i < matis->csf->nleaves; i++)
1427         if (matis->csf_leafdata[i]) newloc++;
1428       PetscCall(PetscMalloc1(newloc, &newgidxs));
1429       PetscCall(PetscMalloc1(newloc, &lidxs));
1430       for (i = 0, newloc = 0; i < matis->csf->nleaves; i++)
1431         if (matis->csf_leafdata[i]) {
1432           lidxs[newloc]      = i;
1433           newgidxs[newloc++] = matis->csf_leafdata[i] - 1;
1434         }
1435       PetscCall(ISCreateGeneral(comm, newloc, newgidxs, PETSC_OWN_POINTER, &is));
1436       PetscCall(ISLocalToGlobalMappingCreateIS(is, &cl2g));
1437       PetscCall(ISLocalToGlobalMappingSetBlockSize(cl2g, cbs));
1438       PetscCall(ISDestroy(&is));
1439       /* local is to extract local submatrix */
1440       PetscCall(ISCreateGeneral(comm, newloc, lidxs, PETSC_OWN_POINTER, &newmatis->getsub_cis));
1441       PetscCall(MatSetLocalToGlobalMapping(*newmat, rl2g, cl2g));
1442       PetscCall(ISLocalToGlobalMappingDestroy(&cl2g));
1443     }
1444     PetscCall(ISLocalToGlobalMappingDestroy(&rl2g));
1445   } else {
1446     PetscCall(MatISGetLocalMat(*newmat, &newlocmat));
1447   }
1448   PetscCall(MatISGetLocalMat(mat, &locmat));
1449   newmatis = (Mat_IS *)(*newmat)->data;
1450   PetscCall(MatCreateSubMatrix(locmat, newmatis->getsub_ris, newmatis->getsub_cis, scall, &newlocmat));
1451   if (scall == MAT_INITIAL_MATRIX) {
1452     PetscCall(MatISSetLocalMat(*newmat, newlocmat));
1453     PetscCall(MatDestroy(&newlocmat));
1454   }
1455   PetscCall(MatAssemblyBegin(*newmat, MAT_FINAL_ASSEMBLY));
1456   PetscCall(MatAssemblyEnd(*newmat, MAT_FINAL_ASSEMBLY));
1457   PetscFunctionReturn(PETSC_SUCCESS);
1458 }
1459 
1460 static PetscErrorCode MatCopy_IS(Mat A, Mat B, MatStructure str)
1461 {
1462   Mat_IS   *a = (Mat_IS *)A->data, *b;
1463   PetscBool ismatis;
1464 
1465   PetscFunctionBegin;
1466   PetscCall(PetscObjectTypeCompare((PetscObject)B, MATIS, &ismatis));
1467   PetscCheck(ismatis, PetscObjectComm((PetscObject)B), PETSC_ERR_SUP, "Need to be implemented");
1468   b = (Mat_IS *)B->data;
1469   PetscCall(MatCopy(a->A, b->A, str));
1470   PetscCall(PetscObjectStateIncrease((PetscObject)B));
1471   PetscFunctionReturn(PETSC_SUCCESS);
1472 }
1473 
1474 static PetscErrorCode MatMissingDiagonal_IS(Mat A, PetscBool *missing, PetscInt *d)
1475 {
1476   Vec                v;
1477   const PetscScalar *array;
1478   PetscInt           i, n;
1479 
1480   PetscFunctionBegin;
1481   *missing = PETSC_FALSE;
1482   PetscCall(MatCreateVecs(A, NULL, &v));
1483   PetscCall(MatGetDiagonal(A, v));
1484   PetscCall(VecGetLocalSize(v, &n));
1485   PetscCall(VecGetArrayRead(v, &array));
1486   for (i = 0; i < n; i++)
1487     if (array[i] == 0.) break;
1488   PetscCall(VecRestoreArrayRead(v, &array));
1489   PetscCall(VecDestroy(&v));
1490   if (i != n) *missing = PETSC_TRUE;
1491   if (d) {
1492     *d = -1;
1493     if (*missing) {
1494       PetscInt rstart;
1495       PetscCall(MatGetOwnershipRange(A, &rstart, NULL));
1496       *d = i + rstart;
1497     }
1498   }
1499   PetscFunctionReturn(PETSC_SUCCESS);
1500 }
1501 
1502 static PetscErrorCode MatISSetUpSF_IS(Mat B)
1503 {
1504   Mat_IS         *matis = (Mat_IS *)(B->data);
1505   const PetscInt *gidxs;
1506   PetscInt        nleaves;
1507 
1508   PetscFunctionBegin;
1509   if (matis->sf) PetscFunctionReturn(PETSC_SUCCESS);
1510   PetscCall(PetscSFCreate(PetscObjectComm((PetscObject)B), &matis->sf));
1511   PetscCall(ISLocalToGlobalMappingGetIndices(matis->rmapping, &gidxs));
1512   PetscCall(ISLocalToGlobalMappingGetSize(matis->rmapping, &nleaves));
1513   PetscCall(PetscSFSetGraphLayout(matis->sf, B->rmap, nleaves, NULL, PETSC_OWN_POINTER, gidxs));
1514   PetscCall(ISLocalToGlobalMappingRestoreIndices(matis->rmapping, &gidxs));
1515   PetscCall(PetscMalloc2(matis->sf->nroots, &matis->sf_rootdata, matis->sf->nleaves, &matis->sf_leafdata));
1516   if (matis->rmapping != matis->cmapping) { /* setup SF for columns */
1517     PetscCall(ISLocalToGlobalMappingGetSize(matis->cmapping, &nleaves));
1518     PetscCall(PetscSFCreate(PetscObjectComm((PetscObject)B), &matis->csf));
1519     PetscCall(ISLocalToGlobalMappingGetIndices(matis->cmapping, &gidxs));
1520     PetscCall(PetscSFSetGraphLayout(matis->csf, B->cmap, nleaves, NULL, PETSC_OWN_POINTER, gidxs));
1521     PetscCall(ISLocalToGlobalMappingRestoreIndices(matis->cmapping, &gidxs));
1522     PetscCall(PetscMalloc2(matis->csf->nroots, &matis->csf_rootdata, matis->csf->nleaves, &matis->csf_leafdata));
1523   } else {
1524     matis->csf          = matis->sf;
1525     matis->csf_leafdata = matis->sf_leafdata;
1526     matis->csf_rootdata = matis->sf_rootdata;
1527   }
1528   PetscFunctionReturn(PETSC_SUCCESS);
1529 }
1530 
1531 /*@
1532    MatISStoreL2L - Store local-to-local operators during the Galerkin process of computing MatPtAP.
1533 
1534    Collective
1535 
1536    Input Parameters:
1537 +  A - the matrix
1538 -  store - the boolean flag
1539 
1540    Level: advanced
1541 
1542 .seealso: `MatCreate()`, `MatCreateIS()`, `MatISSetPreallocation()`, `MatPtAP()`
1543 @*/
1544 PetscErrorCode MatISStoreL2L(Mat A, PetscBool store)
1545 {
1546   PetscFunctionBegin;
1547   PetscValidHeaderSpecific(A, MAT_CLASSID, 1);
1548   PetscValidType(A, 1);
1549   PetscValidLogicalCollectiveBool(A, store, 2);
1550   PetscTryMethod(A, "MatISStoreL2L_C", (Mat, PetscBool), (A, store));
1551   PetscFunctionReturn(PETSC_SUCCESS);
1552 }
1553 
1554 static PetscErrorCode MatISStoreL2L_IS(Mat A, PetscBool store)
1555 {
1556   Mat_IS *matis = (Mat_IS *)(A->data);
1557 
1558   PetscFunctionBegin;
1559   matis->storel2l = store;
1560   if (!store) PetscCall(PetscObjectCompose((PetscObject)(A), "_MatIS_PtAP_l2l", NULL));
1561   PetscFunctionReturn(PETSC_SUCCESS);
1562 }
1563 
1564 /*@
1565    MatISFixLocalEmpty - Compress out zero local rows from the local matrices
1566 
1567    Collective
1568 
1569    Input Parameters:
1570 +  A - the matrix
1571 -  fix - the boolean flag
1572 
1573    Level: advanced
1574 
1575    Note:
1576    When fix is true, new local matrices and l2g maps are generated during the final assembly process.
1577 
1578 .seealso: `MATIS`, `MatCreate()`, `MatCreateIS()`, `MatISSetPreallocation()`, `MatAssemblyEnd()`, `MAT_FINAL_ASSEMBLY`
1579 @*/
1580 PetscErrorCode MatISFixLocalEmpty(Mat A, PetscBool fix)
1581 {
1582   PetscFunctionBegin;
1583   PetscValidHeaderSpecific(A, MAT_CLASSID, 1);
1584   PetscValidType(A, 1);
1585   PetscValidLogicalCollectiveBool(A, fix, 2);
1586   PetscTryMethod(A, "MatISFixLocalEmpty_C", (Mat, PetscBool), (A, fix));
1587   PetscFunctionReturn(PETSC_SUCCESS);
1588 }
1589 
1590 static PetscErrorCode MatISFixLocalEmpty_IS(Mat A, PetscBool fix)
1591 {
1592   Mat_IS *matis = (Mat_IS *)(A->data);
1593 
1594   PetscFunctionBegin;
1595   matis->locempty = fix;
1596   PetscFunctionReturn(PETSC_SUCCESS);
1597 }
1598 
1599 /*@
1600    MatISSetPreallocation - Preallocates memory for a `MATIS` parallel matrix.
1601 
1602    Collective
1603 
1604    Input Parameters:
1605 +  B - the matrix
1606 .  d_nz  - number of nonzeros per row in DIAGONAL portion of local submatrix
1607            (same value is used for all local rows)
1608 .  d_nnz - array containing the number of nonzeros in the various rows of the
1609            DIAGONAL portion of the local submatrix (possibly different for each row)
1610            or NULL, if d_nz is used to specify the nonzero structure.
1611            The size of this array is equal to the number of local rows, i.e 'm'.
1612            For matrices that will be factored, you must leave room for (and set)
1613            the diagonal entry even if it is zero.
1614 .  o_nz  - number of nonzeros per row in the OFF-DIAGONAL portion of local
1615            submatrix (same value is used for all local rows).
1616 -  o_nnz - array containing the number of nonzeros in the various rows of the
1617            OFF-DIAGONAL portion of the local submatrix (possibly different for
1618            each row) or NULL, if o_nz is used to specify the nonzero
1619            structure. The size of this array is equal to the number
1620            of local rows, i.e 'm'.
1621 
1622    If the *_nnz parameter is given then the *_nz parameter is ignored
1623 
1624    Level: intermediate
1625 
1626    Note:
1627    This function has the same interface as the `MATMPIAIJ` preallocation routine in order to simplify the transition
1628    from the asssembled format to the unassembled one. It overestimates the preallocation of `MATIS` local
1629    matrices; for exact preallocation, the user should set the preallocation directly on local matrix objects.
1630 
1631 .seealso: `MatCreate()`, `MatCreateIS()`, `MatMPIAIJSetPreallocation()`, `MatISGetLocalMat()`, `MATIS`
1632 @*/
1633 PetscErrorCode MatISSetPreallocation(Mat B, PetscInt d_nz, const PetscInt d_nnz[], PetscInt o_nz, const PetscInt o_nnz[])
1634 {
1635   PetscFunctionBegin;
1636   PetscValidHeaderSpecific(B, MAT_CLASSID, 1);
1637   PetscValidType(B, 1);
1638   PetscTryMethod(B, "MatISSetPreallocation_C", (Mat, PetscInt, const PetscInt[], PetscInt, const PetscInt[]), (B, d_nz, d_nnz, o_nz, o_nnz));
1639   PetscFunctionReturn(PETSC_SUCCESS);
1640 }
1641 
1642 /* this is used by DMDA */
1643 PETSC_EXTERN PetscErrorCode MatISSetPreallocation_IS(Mat B, PetscInt d_nz, const PetscInt d_nnz[], PetscInt o_nz, const PetscInt o_nnz[])
1644 {
1645   Mat_IS  *matis = (Mat_IS *)(B->data);
1646   PetscInt bs, i, nlocalcols;
1647 
1648   PetscFunctionBegin;
1649   PetscCall(MatSetUp(B));
1650   if (!d_nnz)
1651     for (i = 0; i < matis->sf->nroots; i++) matis->sf_rootdata[i] = d_nz;
1652   else
1653     for (i = 0; i < matis->sf->nroots; i++) matis->sf_rootdata[i] = d_nnz[i];
1654 
1655   if (!o_nnz)
1656     for (i = 0; i < matis->sf->nroots; i++) matis->sf_rootdata[i] += o_nz;
1657   else
1658     for (i = 0; i < matis->sf->nroots; i++) matis->sf_rootdata[i] += o_nnz[i];
1659 
1660   PetscCall(PetscSFBcastBegin(matis->sf, MPIU_INT, matis->sf_rootdata, matis->sf_leafdata, MPI_REPLACE));
1661   PetscCall(MatGetSize(matis->A, NULL, &nlocalcols));
1662   PetscCall(MatGetBlockSize(matis->A, &bs));
1663   PetscCall(PetscSFBcastEnd(matis->sf, MPIU_INT, matis->sf_rootdata, matis->sf_leafdata, MPI_REPLACE));
1664 
1665   for (i = 0; i < matis->sf->nleaves; i++) matis->sf_leafdata[i] = PetscMin(matis->sf_leafdata[i], nlocalcols);
1666   PetscCall(MatSeqAIJSetPreallocation(matis->A, 0, matis->sf_leafdata));
1667 #if defined(PETSC_HAVE_HYPRE)
1668   PetscCall(MatHYPRESetPreallocation(matis->A, 0, matis->sf_leafdata, 0, NULL));
1669 #endif
1670 
1671   for (i = 0; i < matis->sf->nleaves / bs; i++) {
1672     PetscInt b;
1673 
1674     matis->sf_leafdata[i] = matis->sf_leafdata[i * bs] / bs;
1675     for (b = 1; b < bs; b++) matis->sf_leafdata[i] = PetscMax(matis->sf_leafdata[i], matis->sf_leafdata[i * bs + b] / bs);
1676   }
1677   PetscCall(MatSeqBAIJSetPreallocation(matis->A, bs, 0, matis->sf_leafdata));
1678 
1679   nlocalcols /= bs;
1680   for (i = 0; i < matis->sf->nleaves / bs; i++) matis->sf_leafdata[i] = PetscMin(matis->sf_leafdata[i], nlocalcols - i);
1681   PetscCall(MatSeqSBAIJSetPreallocation(matis->A, bs, 0, matis->sf_leafdata));
1682 
1683   /* for other matrix types */
1684   PetscCall(MatSetUp(matis->A));
1685   PetscFunctionReturn(PETSC_SUCCESS);
1686 }
1687 
1688 PETSC_EXTERN PetscErrorCode MatISSetMPIXAIJPreallocation_Private(Mat A, Mat B, PetscBool maxreduce)
1689 {
1690   Mat_IS         *matis = (Mat_IS *)(A->data);
1691   PetscInt       *my_dnz, *my_onz, *dnz, *onz, *mat_ranges, *row_ownership;
1692   const PetscInt *global_indices_r, *global_indices_c;
1693   PetscInt        i, j, bs, rows, cols;
1694   PetscInt        lrows, lcols;
1695   PetscInt        local_rows, local_cols;
1696   PetscMPIInt     size;
1697   PetscBool       isdense, issbaij;
1698 
1699   PetscFunctionBegin;
1700   PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)A), &size));
1701   PetscCall(MatGetSize(A, &rows, &cols));
1702   PetscCall(MatGetBlockSize(A, &bs));
1703   PetscCall(MatGetSize(matis->A, &local_rows, &local_cols));
1704   PetscCall(PetscObjectBaseTypeCompare((PetscObject)matis->A, MATSEQDENSE, &isdense));
1705   PetscCall(PetscObjectBaseTypeCompare((PetscObject)matis->A, MATSEQSBAIJ, &issbaij));
1706   PetscCall(ISLocalToGlobalMappingGetIndices(matis->rmapping, &global_indices_r));
1707   if (matis->rmapping != matis->cmapping) {
1708     PetscCall(ISLocalToGlobalMappingGetIndices(matis->cmapping, &global_indices_c));
1709   } else global_indices_c = global_indices_r;
1710 
1711   if (issbaij) PetscCall(MatGetRowUpperTriangular(matis->A));
1712   /*
1713      An SF reduce is needed to sum up properly on shared rows.
1714      Note that generally preallocation is not exact, since it overestimates nonzeros
1715   */
1716   PetscCall(MatGetLocalSize(A, &lrows, &lcols));
1717   MatPreallocateBegin(PetscObjectComm((PetscObject)A), lrows, lcols, dnz, onz);
1718   /* All processes need to compute entire row ownership */
1719   PetscCall(PetscMalloc1(rows, &row_ownership));
1720   PetscCall(MatGetOwnershipRanges(A, (const PetscInt **)&mat_ranges));
1721   for (i = 0; i < size; i++) {
1722     for (j = mat_ranges[i]; j < mat_ranges[i + 1]; j++) row_ownership[j] = i;
1723   }
1724   PetscCall(MatGetOwnershipRangesColumn(A, (const PetscInt **)&mat_ranges));
1725 
1726   /*
1727      my_dnz and my_onz contains exact contribution to preallocation from each local mat
1728      then, they will be summed up properly. This way, preallocation is always sufficient
1729   */
1730   PetscCall(PetscCalloc2(local_rows, &my_dnz, local_rows, &my_onz));
1731   /* preallocation as a MATAIJ */
1732   if (isdense) { /* special case for dense local matrices */
1733     for (i = 0; i < local_rows; i++) {
1734       PetscInt owner = row_ownership[global_indices_r[i]];
1735       for (j = 0; j < local_cols; j++) {
1736         PetscInt index_col = global_indices_c[j];
1737         if (index_col > mat_ranges[owner] - 1 && index_col < mat_ranges[owner + 1]) { /* diag block */
1738           my_dnz[i] += 1;
1739         } else { /* offdiag block */
1740           my_onz[i] += 1;
1741         }
1742       }
1743     }
1744   } else if (matis->A->ops->getrowij) {
1745     const PetscInt *ii, *jj, *jptr;
1746     PetscBool       done;
1747     PetscCall(MatGetRowIJ(matis->A, 0, PETSC_FALSE, PETSC_FALSE, &local_rows, &ii, &jj, &done));
1748     PetscCheck(done, PetscObjectComm((PetscObject)(matis->A)), PETSC_ERR_PLIB, "Error in MatGetRowIJ");
1749     jptr = jj;
1750     for (i = 0; i < local_rows; i++) {
1751       PetscInt index_row = global_indices_r[i];
1752       for (j = 0; j < ii[i + 1] - ii[i]; j++, jptr++) {
1753         PetscInt owner     = row_ownership[index_row];
1754         PetscInt index_col = global_indices_c[*jptr];
1755         if (index_col > mat_ranges[owner] - 1 && index_col < mat_ranges[owner + 1]) { /* diag block */
1756           my_dnz[i] += 1;
1757         } else { /* offdiag block */
1758           my_onz[i] += 1;
1759         }
1760         /* same as before, interchanging rows and cols */
1761         if (issbaij && index_col != index_row) {
1762           owner = row_ownership[index_col];
1763           if (index_row > mat_ranges[owner] - 1 && index_row < mat_ranges[owner + 1]) {
1764             my_dnz[*jptr] += 1;
1765           } else {
1766             my_onz[*jptr] += 1;
1767           }
1768         }
1769       }
1770     }
1771     PetscCall(MatRestoreRowIJ(matis->A, 0, PETSC_FALSE, PETSC_FALSE, &local_rows, &ii, &jj, &done));
1772     PetscCheck(done, PetscObjectComm((PetscObject)(matis->A)), PETSC_ERR_PLIB, "Error in MatRestoreRowIJ");
1773   } else { /* loop over rows and use MatGetRow */
1774     for (i = 0; i < local_rows; i++) {
1775       const PetscInt *cols;
1776       PetscInt        ncols, index_row = global_indices_r[i];
1777       PetscCall(MatGetRow(matis->A, i, &ncols, &cols, NULL));
1778       for (j = 0; j < ncols; j++) {
1779         PetscInt owner     = row_ownership[index_row];
1780         PetscInt index_col = global_indices_c[cols[j]];
1781         if (index_col > mat_ranges[owner] - 1 && index_col < mat_ranges[owner + 1]) { /* diag block */
1782           my_dnz[i] += 1;
1783         } else { /* offdiag block */
1784           my_onz[i] += 1;
1785         }
1786         /* same as before, interchanging rows and cols */
1787         if (issbaij && index_col != index_row) {
1788           owner = row_ownership[index_col];
1789           if (index_row > mat_ranges[owner] - 1 && index_row < mat_ranges[owner + 1]) {
1790             my_dnz[cols[j]] += 1;
1791           } else {
1792             my_onz[cols[j]] += 1;
1793           }
1794         }
1795       }
1796       PetscCall(MatRestoreRow(matis->A, i, &ncols, &cols, NULL));
1797     }
1798   }
1799   if (global_indices_c != global_indices_r) PetscCall(ISLocalToGlobalMappingRestoreIndices(matis->cmapping, &global_indices_c));
1800   PetscCall(ISLocalToGlobalMappingRestoreIndices(matis->rmapping, &global_indices_r));
1801   PetscCall(PetscFree(row_ownership));
1802 
1803   /* Reduce my_dnz and my_onz */
1804   if (maxreduce) {
1805     PetscCall(PetscSFReduceBegin(matis->sf, MPIU_INT, my_dnz, dnz, MPI_MAX));
1806     PetscCall(PetscSFReduceBegin(matis->sf, MPIU_INT, my_onz, onz, MPI_MAX));
1807     PetscCall(PetscSFReduceEnd(matis->sf, MPIU_INT, my_dnz, dnz, MPI_MAX));
1808     PetscCall(PetscSFReduceEnd(matis->sf, MPIU_INT, my_onz, onz, MPI_MAX));
1809   } else {
1810     PetscCall(PetscSFReduceBegin(matis->sf, MPIU_INT, my_dnz, dnz, MPI_SUM));
1811     PetscCall(PetscSFReduceBegin(matis->sf, MPIU_INT, my_onz, onz, MPI_SUM));
1812     PetscCall(PetscSFReduceEnd(matis->sf, MPIU_INT, my_dnz, dnz, MPI_SUM));
1813     PetscCall(PetscSFReduceEnd(matis->sf, MPIU_INT, my_onz, onz, MPI_SUM));
1814   }
1815   PetscCall(PetscFree2(my_dnz, my_onz));
1816 
1817   /* Resize preallocation if overestimated */
1818   for (i = 0; i < lrows; i++) {
1819     dnz[i] = PetscMin(dnz[i], lcols);
1820     onz[i] = PetscMin(onz[i], cols - lcols);
1821   }
1822 
1823   /* Set preallocation */
1824   PetscCall(MatSetBlockSizesFromMats(B, A, A));
1825   PetscCall(MatSeqAIJSetPreallocation(B, 0, dnz));
1826   PetscCall(MatMPIAIJSetPreallocation(B, 0, dnz, 0, onz));
1827   for (i = 0; i < lrows; i += bs) {
1828     PetscInt b, d = dnz[i], o = onz[i];
1829 
1830     for (b = 1; b < bs; b++) {
1831       d = PetscMax(d, dnz[i + b]);
1832       o = PetscMax(o, onz[i + b]);
1833     }
1834     dnz[i / bs] = PetscMin(d / bs + d % bs, lcols / bs);
1835     onz[i / bs] = PetscMin(o / bs + o % bs, (cols - lcols) / bs);
1836   }
1837   PetscCall(MatSeqBAIJSetPreallocation(B, bs, 0, dnz));
1838   PetscCall(MatMPIBAIJSetPreallocation(B, bs, 0, dnz, 0, onz));
1839   PetscCall(MatMPISBAIJSetPreallocation(B, bs, 0, dnz, 0, onz));
1840   MatPreallocateEnd(dnz, onz);
1841   if (issbaij) PetscCall(MatRestoreRowUpperTriangular(matis->A));
1842   PetscCall(MatSetOption(B, MAT_NEW_NONZERO_ALLOCATION_ERR, PETSC_TRUE));
1843   PetscFunctionReturn(PETSC_SUCCESS);
1844 }
1845 
1846 PETSC_INTERN PetscErrorCode MatConvert_IS_XAIJ(Mat mat, MatType mtype, MatReuse reuse, Mat *M)
1847 {
1848   Mat_IS            *matis = (Mat_IS *)(mat->data);
1849   Mat                local_mat, MT;
1850   PetscInt           rbs, cbs, rows, cols, lrows, lcols;
1851   PetscInt           local_rows, local_cols;
1852   PetscBool          isseqdense, isseqsbaij, isseqaij, isseqbaij;
1853   PetscMPIInt        size;
1854   const PetscScalar *array;
1855 
1856   PetscFunctionBegin;
1857   PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)mat), &size));
1858   if (size == 1 && mat->rmap->N == matis->A->rmap->N && mat->cmap->N == matis->A->cmap->N) {
1859     Mat      B;
1860     IS       irows = NULL, icols = NULL;
1861     PetscInt rbs, cbs;
1862 
1863     PetscCall(ISLocalToGlobalMappingGetBlockSize(matis->rmapping, &rbs));
1864     PetscCall(ISLocalToGlobalMappingGetBlockSize(matis->cmapping, &cbs));
1865     if (reuse != MAT_REUSE_MATRIX) { /* check if l2g maps are one-to-one */
1866       IS              rows, cols;
1867       const PetscInt *ridxs, *cidxs;
1868       PetscInt        i, nw, *work;
1869 
1870       PetscCall(ISLocalToGlobalMappingGetBlockIndices(matis->rmapping, &ridxs));
1871       PetscCall(ISLocalToGlobalMappingGetSize(matis->rmapping, &nw));
1872       nw = nw / rbs;
1873       PetscCall(PetscCalloc1(nw, &work));
1874       for (i = 0; i < nw; i++) work[ridxs[i]] += 1;
1875       for (i = 0; i < nw; i++)
1876         if (!work[i] || work[i] > 1) break;
1877       if (i == nw) {
1878         PetscCall(ISCreateBlock(PETSC_COMM_SELF, rbs, nw, ridxs, PETSC_USE_POINTER, &rows));
1879         PetscCall(ISSetPermutation(rows));
1880         PetscCall(ISInvertPermutation(rows, PETSC_DECIDE, &irows));
1881         PetscCall(ISDestroy(&rows));
1882       }
1883       PetscCall(ISLocalToGlobalMappingRestoreBlockIndices(matis->rmapping, &ridxs));
1884       PetscCall(PetscFree(work));
1885       if (irows && matis->rmapping != matis->cmapping) {
1886         PetscCall(ISLocalToGlobalMappingGetBlockIndices(matis->cmapping, &cidxs));
1887         PetscCall(ISLocalToGlobalMappingGetSize(matis->cmapping, &nw));
1888         nw = nw / cbs;
1889         PetscCall(PetscCalloc1(nw, &work));
1890         for (i = 0; i < nw; i++) work[cidxs[i]] += 1;
1891         for (i = 0; i < nw; i++)
1892           if (!work[i] || work[i] > 1) break;
1893         if (i == nw) {
1894           PetscCall(ISCreateBlock(PETSC_COMM_SELF, cbs, nw, cidxs, PETSC_USE_POINTER, &cols));
1895           PetscCall(ISSetPermutation(cols));
1896           PetscCall(ISInvertPermutation(cols, PETSC_DECIDE, &icols));
1897           PetscCall(ISDestroy(&cols));
1898         }
1899         PetscCall(ISLocalToGlobalMappingRestoreBlockIndices(matis->cmapping, &cidxs));
1900         PetscCall(PetscFree(work));
1901       } else if (irows) {
1902         PetscCall(PetscObjectReference((PetscObject)irows));
1903         icols = irows;
1904       }
1905     } else {
1906       PetscCall(PetscObjectQuery((PetscObject)(*M), "_MatIS_IS_XAIJ_irows", (PetscObject *)&irows));
1907       PetscCall(PetscObjectQuery((PetscObject)(*M), "_MatIS_IS_XAIJ_icols", (PetscObject *)&icols));
1908       if (irows) PetscCall(PetscObjectReference((PetscObject)irows));
1909       if (icols) PetscCall(PetscObjectReference((PetscObject)icols));
1910     }
1911     if (!irows || !icols) {
1912       PetscCall(ISDestroy(&icols));
1913       PetscCall(ISDestroy(&irows));
1914       goto general_assembly;
1915     }
1916     PetscCall(MatConvert(matis->A, mtype, MAT_INITIAL_MATRIX, &B));
1917     if (reuse != MAT_INPLACE_MATRIX) {
1918       PetscCall(MatCreateSubMatrix(B, irows, icols, reuse, M));
1919       PetscCall(PetscObjectCompose((PetscObject)(*M), "_MatIS_IS_XAIJ_irows", (PetscObject)irows));
1920       PetscCall(PetscObjectCompose((PetscObject)(*M), "_MatIS_IS_XAIJ_icols", (PetscObject)icols));
1921     } else {
1922       Mat C;
1923 
1924       PetscCall(MatCreateSubMatrix(B, irows, icols, MAT_INITIAL_MATRIX, &C));
1925       PetscCall(MatHeaderReplace(mat, &C));
1926     }
1927     PetscCall(MatDestroy(&B));
1928     PetscCall(ISDestroy(&icols));
1929     PetscCall(ISDestroy(&irows));
1930     PetscFunctionReturn(PETSC_SUCCESS);
1931   }
1932 general_assembly:
1933   PetscCall(MatGetSize(mat, &rows, &cols));
1934   PetscCall(ISLocalToGlobalMappingGetBlockSize(matis->rmapping, &rbs));
1935   PetscCall(ISLocalToGlobalMappingGetBlockSize(matis->cmapping, &cbs));
1936   PetscCall(MatGetLocalSize(mat, &lrows, &lcols));
1937   PetscCall(MatGetSize(matis->A, &local_rows, &local_cols));
1938   PetscCall(PetscObjectBaseTypeCompare((PetscObject)matis->A, MATSEQDENSE, &isseqdense));
1939   PetscCall(PetscObjectBaseTypeCompare((PetscObject)matis->A, MATSEQAIJ, &isseqaij));
1940   PetscCall(PetscObjectBaseTypeCompare((PetscObject)matis->A, MATSEQBAIJ, &isseqbaij));
1941   PetscCall(PetscObjectBaseTypeCompare((PetscObject)matis->A, MATSEQSBAIJ, &isseqsbaij));
1942   PetscCheck(isseqdense || isseqaij || isseqbaij || isseqsbaij, PETSC_COMM_SELF, PETSC_ERR_SUP, "Not for matrix type %s", ((PetscObject)(matis->A))->type_name);
1943   if (PetscDefined(USE_DEBUG)) {
1944     PetscBool lb[4], bb[4];
1945 
1946     lb[0] = isseqdense;
1947     lb[1] = isseqaij;
1948     lb[2] = isseqbaij;
1949     lb[3] = isseqsbaij;
1950     PetscCall(MPIU_Allreduce(lb, bb, 4, MPIU_BOOL, MPI_LAND, PetscObjectComm((PetscObject)mat)));
1951     PetscCheck(bb[0] || bb[1] || bb[2] || bb[3], PETSC_COMM_SELF, PETSC_ERR_SUP, "Local matrices must have the same type");
1952   }
1953 
1954   if (reuse != MAT_REUSE_MATRIX) {
1955     PetscCall(MatCreate(PetscObjectComm((PetscObject)mat), &MT));
1956     PetscCall(MatSetSizes(MT, lrows, lcols, rows, cols));
1957     PetscCall(MatSetType(MT, mtype));
1958     PetscCall(MatSetBlockSizes(MT, rbs, cbs));
1959     PetscCall(MatISSetMPIXAIJPreallocation_Private(mat, MT, PETSC_FALSE));
1960   } else {
1961     PetscInt mrbs, mcbs, mrows, mcols, mlrows, mlcols;
1962 
1963     /* some checks */
1964     MT = *M;
1965     PetscCall(MatGetBlockSizes(MT, &mrbs, &mcbs));
1966     PetscCall(MatGetSize(MT, &mrows, &mcols));
1967     PetscCall(MatGetLocalSize(MT, &mlrows, &mlcols));
1968     PetscCheck(mrows == rows, PetscObjectComm((PetscObject)mat), PETSC_ERR_SUP, "Cannot reuse matrix. Wrong number of rows (%" PetscInt_FMT " != %" PetscInt_FMT ")", rows, mrows);
1969     PetscCheck(mcols == cols, PetscObjectComm((PetscObject)mat), PETSC_ERR_SUP, "Cannot reuse matrix. Wrong number of cols (%" PetscInt_FMT " != %" PetscInt_FMT ")", cols, mcols);
1970     PetscCheck(mlrows == lrows, PetscObjectComm((PetscObject)mat), PETSC_ERR_SUP, "Cannot reuse matrix. Wrong number of local rows (%" PetscInt_FMT " != %" PetscInt_FMT ")", lrows, mlrows);
1971     PetscCheck(mlcols == lcols, PetscObjectComm((PetscObject)mat), PETSC_ERR_SUP, "Cannot reuse matrix. Wrong number of local cols (%" PetscInt_FMT " != %" PetscInt_FMT ")", lcols, mlcols);
1972     PetscCheck(mrbs == rbs, PetscObjectComm((PetscObject)mat), PETSC_ERR_SUP, "Cannot reuse matrix. Wrong row block size (%" PetscInt_FMT " != %" PetscInt_FMT ")", rbs, mrbs);
1973     PetscCheck(mcbs == cbs, PetscObjectComm((PetscObject)mat), PETSC_ERR_SUP, "Cannot reuse matrix. Wrong col block size (%" PetscInt_FMT " != %" PetscInt_FMT ")", cbs, mcbs);
1974     PetscCall(MatZeroEntries(MT));
1975   }
1976 
1977   if (isseqsbaij || isseqbaij) {
1978     PetscCall(MatConvert(matis->A, MATSEQAIJ, MAT_INITIAL_MATRIX, &local_mat));
1979     isseqaij = PETSC_TRUE;
1980   } else {
1981     PetscCall(PetscObjectReference((PetscObject)matis->A));
1982     local_mat = matis->A;
1983   }
1984 
1985   /* Set values */
1986   PetscCall(MatSetLocalToGlobalMapping(MT, matis->rmapping, matis->cmapping));
1987   if (isseqdense) { /* special case for dense local matrices */
1988     PetscInt i, *dummy;
1989 
1990     PetscCall(PetscMalloc1(PetscMax(local_rows, local_cols), &dummy));
1991     for (i = 0; i < PetscMax(local_rows, local_cols); i++) dummy[i] = i;
1992     PetscCall(MatSetOption(MT, MAT_ROW_ORIENTED, PETSC_FALSE));
1993     PetscCall(MatDenseGetArrayRead(local_mat, &array));
1994     PetscCall(MatSetValuesLocal(MT, local_rows, dummy, local_cols, dummy, array, ADD_VALUES));
1995     PetscCall(MatDenseRestoreArrayRead(local_mat, &array));
1996     PetscCall(PetscFree(dummy));
1997   } else if (isseqaij) {
1998     const PetscInt *blocks;
1999     PetscInt        i, nvtxs, *xadj, *adjncy, nb;
2000     PetscBool       done;
2001     PetscScalar    *sarray;
2002 
2003     PetscCall(MatGetRowIJ(local_mat, 0, PETSC_FALSE, PETSC_FALSE, &nvtxs, (const PetscInt **)&xadj, (const PetscInt **)&adjncy, &done));
2004     PetscCheck(done, PetscObjectComm((PetscObject)local_mat), PETSC_ERR_PLIB, "Error in MatGetRowIJ");
2005     PetscCall(MatSeqAIJGetArray(local_mat, &sarray));
2006     PetscCall(MatGetVariableBlockSizes(local_mat, &nb, &blocks));
2007     if (nb) { /* speed up assembly for special blocked matrices (used by BDDC) */
2008       PetscInt sum;
2009 
2010       for (i = 0, sum = 0; i < nb; i++) sum += blocks[i];
2011       if (sum == nvtxs) {
2012         PetscInt r;
2013 
2014         for (i = 0, r = 0; i < nb; i++) {
2015           PetscAssert(blocks[i] == xadj[r + 1] - xadj[r], PETSC_COMM_SELF, PETSC_ERR_PLIB, "Invalid block sizes prescribed for block %" PetscInt_FMT ": expected %" PetscInt_FMT ", got %" PetscInt_FMT, i, blocks[i], xadj[r + 1] - xadj[r]);
2016           PetscCall(MatSetValuesLocal(MT, blocks[i], adjncy + xadj[r], blocks[i], adjncy + xadj[r], sarray + xadj[r], ADD_VALUES));
2017           r += blocks[i];
2018         }
2019       } else {
2020         for (i = 0; i < nvtxs; i++) PetscCall(MatSetValuesLocal(MT, 1, &i, xadj[i + 1] - xadj[i], adjncy + xadj[i], sarray + xadj[i], ADD_VALUES));
2021       }
2022     } else {
2023       for (i = 0; i < nvtxs; i++) PetscCall(MatSetValuesLocal(MT, 1, &i, xadj[i + 1] - xadj[i], adjncy + xadj[i], sarray + xadj[i], ADD_VALUES));
2024     }
2025     PetscCall(MatRestoreRowIJ(local_mat, 0, PETSC_FALSE, PETSC_FALSE, &nvtxs, (const PetscInt **)&xadj, (const PetscInt **)&adjncy, &done));
2026     PetscCheck(done, PetscObjectComm((PetscObject)local_mat), PETSC_ERR_PLIB, "Error in MatRestoreRowIJ");
2027     PetscCall(MatSeqAIJRestoreArray(local_mat, &sarray));
2028   } else { /* very basic values insertion for all other matrix types */
2029     PetscInt i;
2030 
2031     for (i = 0; i < local_rows; i++) {
2032       PetscInt        j;
2033       const PetscInt *local_indices_cols;
2034 
2035       PetscCall(MatGetRow(local_mat, i, &j, &local_indices_cols, &array));
2036       PetscCall(MatSetValuesLocal(MT, 1, &i, j, local_indices_cols, array, ADD_VALUES));
2037       PetscCall(MatRestoreRow(local_mat, i, &j, &local_indices_cols, &array));
2038     }
2039   }
2040   PetscCall(MatAssemblyBegin(MT, MAT_FINAL_ASSEMBLY));
2041   PetscCall(MatDestroy(&local_mat));
2042   PetscCall(MatAssemblyEnd(MT, MAT_FINAL_ASSEMBLY));
2043   if (isseqdense) PetscCall(MatSetOption(MT, MAT_ROW_ORIENTED, PETSC_TRUE));
2044   if (reuse == MAT_INPLACE_MATRIX) {
2045     PetscCall(MatHeaderReplace(mat, &MT));
2046   } else if (reuse == MAT_INITIAL_MATRIX) {
2047     *M = MT;
2048   }
2049   PetscFunctionReturn(PETSC_SUCCESS);
2050 }
2051 
2052 /*@
2053     MatISGetMPIXAIJ - Converts `MATIS` matrix into a parallel `MATAIJ` format
2054 
2055   Input Parameters:
2056 +  mat - the matrix (should be of type `MATIS`)
2057 -  reuse - either `MAT_INITIAL_MATRIX` or `MAT_REUSE_MATRIX`
2058 
2059   Output Parameter:
2060 .  newmat - the matrix in `MATAIJ` format
2061 
2062   Level: developer
2063 
2064   Note:
2065     This function has been deprecated and it will be removed in future releases. Update your code to use the `MatConvert()` interface.
2066 
2067 .seealso: `MATIS`, `MatConvert()`
2068 @*/
2069 PetscErrorCode MatISGetMPIXAIJ(Mat mat, MatReuse reuse, Mat *newmat)
2070 {
2071   PetscFunctionBegin;
2072   PetscValidHeaderSpecific(mat, MAT_CLASSID, 1);
2073   PetscValidLogicalCollectiveEnum(mat, reuse, 2);
2074   PetscValidPointer(newmat, 3);
2075   if (reuse == MAT_REUSE_MATRIX) {
2076     PetscValidHeaderSpecific(*newmat, MAT_CLASSID, 3);
2077     PetscCheckSameComm(mat, 1, *newmat, 3);
2078     PetscCheck(mat != *newmat, PetscObjectComm((PetscObject)mat), PETSC_ERR_SUP, "Cannot reuse the same matrix");
2079   }
2080   PetscUseMethod(mat, "MatISGetMPIXAIJ_C", (Mat, MatType, MatReuse, Mat *), (mat, MATAIJ, reuse, newmat));
2081   PetscFunctionReturn(PETSC_SUCCESS);
2082 }
2083 
2084 static PetscErrorCode MatDuplicate_IS(Mat mat, MatDuplicateOption op, Mat *newmat)
2085 {
2086   Mat_IS  *matis = (Mat_IS *)(mat->data);
2087   PetscInt rbs, cbs, m, n, M, N;
2088   Mat      B, localmat;
2089 
2090   PetscFunctionBegin;
2091   PetscCall(ISLocalToGlobalMappingGetBlockSize(mat->rmap->mapping, &rbs));
2092   PetscCall(ISLocalToGlobalMappingGetBlockSize(mat->cmap->mapping, &cbs));
2093   PetscCall(MatGetSize(mat, &M, &N));
2094   PetscCall(MatGetLocalSize(mat, &m, &n));
2095   PetscCall(MatCreate(PetscObjectComm((PetscObject)mat), &B));
2096   PetscCall(MatSetSizes(B, m, n, M, N));
2097   PetscCall(MatSetBlockSize(B, rbs == cbs ? rbs : 1));
2098   PetscCall(MatSetType(B, MATIS));
2099   PetscCall(MatISSetLocalMatType(B, matis->lmattype));
2100   PetscCall(MatSetLocalToGlobalMapping(B, mat->rmap->mapping, mat->cmap->mapping));
2101   PetscCall(MatDuplicate(matis->A, op, &localmat));
2102   PetscCall(MatSetLocalToGlobalMapping(localmat, matis->A->rmap->mapping, matis->A->cmap->mapping));
2103   PetscCall(MatISSetLocalMat(B, localmat));
2104   PetscCall(MatDestroy(&localmat));
2105   PetscCall(MatAssemblyBegin(B, MAT_FINAL_ASSEMBLY));
2106   PetscCall(MatAssemblyEnd(B, MAT_FINAL_ASSEMBLY));
2107   *newmat = B;
2108   PetscFunctionReturn(PETSC_SUCCESS);
2109 }
2110 
2111 static PetscErrorCode MatIsHermitian_IS(Mat A, PetscReal tol, PetscBool *flg)
2112 {
2113   Mat_IS   *matis = (Mat_IS *)A->data;
2114   PetscBool local_sym;
2115 
2116   PetscFunctionBegin;
2117   PetscCall(MatIsHermitian(matis->A, tol, &local_sym));
2118   PetscCall(MPIU_Allreduce(&local_sym, flg, 1, MPIU_BOOL, MPI_LAND, PetscObjectComm((PetscObject)A)));
2119   PetscFunctionReturn(PETSC_SUCCESS);
2120 }
2121 
2122 static PetscErrorCode MatIsSymmetric_IS(Mat A, PetscReal tol, PetscBool *flg)
2123 {
2124   Mat_IS   *matis = (Mat_IS *)A->data;
2125   PetscBool local_sym;
2126 
2127   PetscFunctionBegin;
2128   if (matis->rmapping != matis->cmapping) {
2129     *flg = PETSC_FALSE;
2130     PetscFunctionReturn(PETSC_SUCCESS);
2131   }
2132   PetscCall(MatIsSymmetric(matis->A, tol, &local_sym));
2133   PetscCall(MPIU_Allreduce(&local_sym, flg, 1, MPIU_BOOL, MPI_LAND, PetscObjectComm((PetscObject)A)));
2134   PetscFunctionReturn(PETSC_SUCCESS);
2135 }
2136 
2137 static PetscErrorCode MatIsStructurallySymmetric_IS(Mat A, PetscBool *flg)
2138 {
2139   Mat_IS   *matis = (Mat_IS *)A->data;
2140   PetscBool local_sym;
2141 
2142   PetscFunctionBegin;
2143   if (matis->rmapping != matis->cmapping) {
2144     *flg = PETSC_FALSE;
2145     PetscFunctionReturn(PETSC_SUCCESS);
2146   }
2147   PetscCall(MatIsStructurallySymmetric(matis->A, &local_sym));
2148   PetscCall(MPIU_Allreduce(&local_sym, flg, 1, MPIU_BOOL, MPI_LAND, PetscObjectComm((PetscObject)A)));
2149   PetscFunctionReturn(PETSC_SUCCESS);
2150 }
2151 
2152 static PetscErrorCode MatDestroy_IS(Mat A)
2153 {
2154   Mat_IS *b = (Mat_IS *)A->data;
2155 
2156   PetscFunctionBegin;
2157   PetscCall(PetscFree(b->bdiag));
2158   PetscCall(PetscFree(b->lmattype));
2159   PetscCall(MatDestroy(&b->A));
2160   PetscCall(VecScatterDestroy(&b->cctx));
2161   PetscCall(VecScatterDestroy(&b->rctx));
2162   PetscCall(VecDestroy(&b->x));
2163   PetscCall(VecDestroy(&b->y));
2164   PetscCall(VecDestroy(&b->counter));
2165   PetscCall(ISDestroy(&b->getsub_ris));
2166   PetscCall(ISDestroy(&b->getsub_cis));
2167   if (b->sf != b->csf) {
2168     PetscCall(PetscSFDestroy(&b->csf));
2169     PetscCall(PetscFree2(b->csf_rootdata, b->csf_leafdata));
2170   } else b->csf = NULL;
2171   PetscCall(PetscSFDestroy(&b->sf));
2172   PetscCall(PetscFree2(b->sf_rootdata, b->sf_leafdata));
2173   PetscCall(ISLocalToGlobalMappingDestroy(&b->rmapping));
2174   PetscCall(ISLocalToGlobalMappingDestroy(&b->cmapping));
2175   PetscCall(MatDestroy(&b->dA));
2176   PetscCall(MatDestroy(&b->assembledA));
2177   PetscCall(PetscFree(A->data));
2178   PetscCall(PetscObjectChangeTypeName((PetscObject)A, NULL));
2179   PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatISSetLocalMatType_C", NULL));
2180   PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatISGetLocalMat_C", NULL));
2181   PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatISSetLocalMat_C", NULL));
2182   PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatISRestoreLocalMat_C", NULL));
2183   PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatISGetMPIXAIJ_C", NULL));
2184   PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatISSetPreallocation_C", NULL));
2185   PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatISStoreL2L_C", NULL));
2186   PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatISFixLocalEmpty_C", NULL));
2187   PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatISGetLocalToGlobalMapping_C", NULL));
2188   PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatConvert_is_mpiaij_C", NULL));
2189   PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatConvert_is_mpibaij_C", NULL));
2190   PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatConvert_is_mpisbaij_C", NULL));
2191   PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatConvert_is_seqaij_C", NULL));
2192   PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatConvert_is_seqbaij_C", NULL));
2193   PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatConvert_is_seqsbaij_C", NULL));
2194   PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatConvert_is_aij_C", NULL));
2195   PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatSetPreallocationCOOLocal_C", NULL));
2196   PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatSetPreallocationCOO_C", NULL));
2197   PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatSetValuesCOO_C", NULL));
2198   PetscFunctionReturn(PETSC_SUCCESS);
2199 }
2200 
2201 static PetscErrorCode MatMult_IS(Mat A, Vec x, Vec y)
2202 {
2203   Mat_IS     *is   = (Mat_IS *)A->data;
2204   PetscScalar zero = 0.0;
2205 
2206   PetscFunctionBegin;
2207   /*  scatter the global vector x into the local work vector */
2208   PetscCall(VecScatterBegin(is->cctx, x, is->x, INSERT_VALUES, SCATTER_FORWARD));
2209   PetscCall(VecScatterEnd(is->cctx, x, is->x, INSERT_VALUES, SCATTER_FORWARD));
2210 
2211   /* multiply the local matrix */
2212   PetscCall(MatMult(is->A, is->x, is->y));
2213 
2214   /* scatter product back into global memory */
2215   PetscCall(VecSet(y, zero));
2216   PetscCall(VecScatterBegin(is->rctx, is->y, y, ADD_VALUES, SCATTER_REVERSE));
2217   PetscCall(VecScatterEnd(is->rctx, is->y, y, ADD_VALUES, SCATTER_REVERSE));
2218   PetscFunctionReturn(PETSC_SUCCESS);
2219 }
2220 
2221 static PetscErrorCode MatMultAdd_IS(Mat A, Vec v1, Vec v2, Vec v3)
2222 {
2223   Vec temp_vec;
2224 
2225   PetscFunctionBegin; /*  v3 = v2 + A * v1.*/
2226   if (v3 != v2) {
2227     PetscCall(MatMult(A, v1, v3));
2228     PetscCall(VecAXPY(v3, 1.0, v2));
2229   } else {
2230     PetscCall(VecDuplicate(v2, &temp_vec));
2231     PetscCall(MatMult(A, v1, temp_vec));
2232     PetscCall(VecAXPY(temp_vec, 1.0, v2));
2233     PetscCall(VecCopy(temp_vec, v3));
2234     PetscCall(VecDestroy(&temp_vec));
2235   }
2236   PetscFunctionReturn(PETSC_SUCCESS);
2237 }
2238 
2239 static PetscErrorCode MatMultTranspose_IS(Mat A, Vec y, Vec x)
2240 {
2241   Mat_IS *is = (Mat_IS *)A->data;
2242 
2243   PetscFunctionBegin;
2244   /*  scatter the global vector x into the local work vector */
2245   PetscCall(VecScatterBegin(is->rctx, y, is->y, INSERT_VALUES, SCATTER_FORWARD));
2246   PetscCall(VecScatterEnd(is->rctx, y, is->y, INSERT_VALUES, SCATTER_FORWARD));
2247 
2248   /* multiply the local matrix */
2249   PetscCall(MatMultTranspose(is->A, is->y, is->x));
2250 
2251   /* scatter product back into global vector */
2252   PetscCall(VecSet(x, 0));
2253   PetscCall(VecScatterBegin(is->cctx, is->x, x, ADD_VALUES, SCATTER_REVERSE));
2254   PetscCall(VecScatterEnd(is->cctx, is->x, x, ADD_VALUES, SCATTER_REVERSE));
2255   PetscFunctionReturn(PETSC_SUCCESS);
2256 }
2257 
2258 static PetscErrorCode MatMultTransposeAdd_IS(Mat A, Vec v1, Vec v2, Vec v3)
2259 {
2260   Vec temp_vec;
2261 
2262   PetscFunctionBegin; /*  v3 = v2 + A' * v1.*/
2263   if (v3 != v2) {
2264     PetscCall(MatMultTranspose(A, v1, v3));
2265     PetscCall(VecAXPY(v3, 1.0, v2));
2266   } else {
2267     PetscCall(VecDuplicate(v2, &temp_vec));
2268     PetscCall(MatMultTranspose(A, v1, temp_vec));
2269     PetscCall(VecAXPY(temp_vec, 1.0, v2));
2270     PetscCall(VecCopy(temp_vec, v3));
2271     PetscCall(VecDestroy(&temp_vec));
2272   }
2273   PetscFunctionReturn(PETSC_SUCCESS);
2274 }
2275 
2276 static PetscErrorCode MatView_IS(Mat A, PetscViewer viewer)
2277 {
2278   Mat_IS     *a = (Mat_IS *)A->data;
2279   PetscViewer sviewer;
2280   PetscBool   isascii, view = PETSC_TRUE;
2281 
2282   PetscFunctionBegin;
2283   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &isascii));
2284   if (isascii) {
2285     PetscViewerFormat format;
2286 
2287     PetscCall(PetscViewerGetFormat(viewer, &format));
2288     if (format == PETSC_VIEWER_ASCII_INFO) view = PETSC_FALSE;
2289   }
2290   if (!view) PetscFunctionReturn(PETSC_SUCCESS);
2291   PetscCall(PetscViewerGetSubViewer(viewer, PETSC_COMM_SELF, &sviewer));
2292   PetscCall(MatView(a->A, sviewer));
2293   PetscCall(PetscViewerRestoreSubViewer(viewer, PETSC_COMM_SELF, &sviewer));
2294   PetscCall(PetscViewerFlush(viewer));
2295   PetscFunctionReturn(PETSC_SUCCESS);
2296 }
2297 
2298 static PetscErrorCode MatInvertBlockDiagonal_IS(Mat mat, const PetscScalar **values)
2299 {
2300   Mat_IS            *is = (Mat_IS *)mat->data;
2301   MPI_Datatype       nodeType;
2302   const PetscScalar *lv;
2303   PetscInt           bs;
2304 
2305   PetscFunctionBegin;
2306   PetscCall(MatGetBlockSize(mat, &bs));
2307   PetscCall(MatSetBlockSize(is->A, bs));
2308   PetscCall(MatInvertBlockDiagonal(is->A, &lv));
2309   if (!is->bdiag) PetscCall(PetscMalloc1(bs * mat->rmap->n, &is->bdiag));
2310   PetscCallMPI(MPI_Type_contiguous(bs, MPIU_SCALAR, &nodeType));
2311   PetscCallMPI(MPI_Type_commit(&nodeType));
2312   PetscCall(PetscSFReduceBegin(is->sf, nodeType, lv, is->bdiag, MPI_REPLACE));
2313   PetscCall(PetscSFReduceEnd(is->sf, nodeType, lv, is->bdiag, MPI_REPLACE));
2314   PetscCallMPI(MPI_Type_free(&nodeType));
2315   if (values) *values = is->bdiag;
2316   PetscFunctionReturn(PETSC_SUCCESS);
2317 }
2318 
2319 static PetscErrorCode MatISSetUpScatters_Private(Mat A)
2320 {
2321   Vec             cglobal, rglobal;
2322   IS              from;
2323   Mat_IS         *is = (Mat_IS *)A->data;
2324   PetscScalar     sum;
2325   const PetscInt *garray;
2326   PetscInt        nr, rbs, nc, cbs;
2327   VecType         rtype;
2328 
2329   PetscFunctionBegin;
2330   PetscCall(ISLocalToGlobalMappingGetSize(is->rmapping, &nr));
2331   PetscCall(ISLocalToGlobalMappingGetBlockSize(is->rmapping, &rbs));
2332   PetscCall(ISLocalToGlobalMappingGetSize(is->cmapping, &nc));
2333   PetscCall(ISLocalToGlobalMappingGetBlockSize(is->cmapping, &cbs));
2334   PetscCall(VecDestroy(&is->x));
2335   PetscCall(VecDestroy(&is->y));
2336   PetscCall(VecDestroy(&is->counter));
2337   PetscCall(VecScatterDestroy(&is->rctx));
2338   PetscCall(VecScatterDestroy(&is->cctx));
2339   PetscCall(MatCreateVecs(is->A, &is->x, &is->y));
2340   PetscCall(VecBindToCPU(is->y, PETSC_TRUE));
2341   PetscCall(VecGetRootType_Private(is->y, &rtype));
2342   PetscCall(PetscFree(A->defaultvectype));
2343   PetscCall(PetscStrallocpy(rtype, &A->defaultvectype));
2344   PetscCall(MatCreateVecs(A, &cglobal, &rglobal));
2345   PetscCall(VecBindToCPU(rglobal, PETSC_TRUE));
2346   PetscCall(ISLocalToGlobalMappingGetBlockIndices(is->rmapping, &garray));
2347   PetscCall(ISCreateBlock(PetscObjectComm((PetscObject)A), rbs, nr / rbs, garray, PETSC_USE_POINTER, &from));
2348   PetscCall(VecScatterCreate(rglobal, from, is->y, NULL, &is->rctx));
2349   PetscCall(ISLocalToGlobalMappingRestoreBlockIndices(is->rmapping, &garray));
2350   PetscCall(ISDestroy(&from));
2351   if (is->rmapping != is->cmapping) {
2352     PetscCall(ISLocalToGlobalMappingGetBlockIndices(is->cmapping, &garray));
2353     PetscCall(ISCreateBlock(PetscObjectComm((PetscObject)A), cbs, nc / cbs, garray, PETSC_USE_POINTER, &from));
2354     PetscCall(VecScatterCreate(cglobal, from, is->x, NULL, &is->cctx));
2355     PetscCall(ISLocalToGlobalMappingRestoreBlockIndices(is->cmapping, &garray));
2356     PetscCall(ISDestroy(&from));
2357   } else {
2358     PetscCall(PetscObjectReference((PetscObject)is->rctx));
2359     is->cctx = is->rctx;
2360   }
2361   PetscCall(VecDestroy(&cglobal));
2362 
2363   /* interface counter vector (local) */
2364   PetscCall(VecDuplicate(is->y, &is->counter));
2365   PetscCall(VecBindToCPU(is->counter, PETSC_TRUE));
2366   PetscCall(VecSet(is->y, 1.));
2367   PetscCall(VecScatterBegin(is->rctx, is->y, rglobal, ADD_VALUES, SCATTER_REVERSE));
2368   PetscCall(VecScatterEnd(is->rctx, is->y, rglobal, ADD_VALUES, SCATTER_REVERSE));
2369   PetscCall(VecScatterBegin(is->rctx, rglobal, is->counter, INSERT_VALUES, SCATTER_FORWARD));
2370   PetscCall(VecScatterEnd(is->rctx, rglobal, is->counter, INSERT_VALUES, SCATTER_FORWARD));
2371   PetscCall(VecBindToCPU(is->y, PETSC_FALSE));
2372   PetscCall(VecBindToCPU(is->counter, PETSC_FALSE));
2373 
2374   /* special functions for block-diagonal matrices */
2375   PetscCall(VecSum(rglobal, &sum));
2376   A->ops->invertblockdiagonal = NULL;
2377   if ((PetscInt)(PetscRealPart(sum)) == A->rmap->N && A->rmap->N == A->cmap->N && is->rmapping == is->cmapping) A->ops->invertblockdiagonal = MatInvertBlockDiagonal_IS;
2378   PetscCall(VecDestroy(&rglobal));
2379 
2380   /* setup SF for general purpose shared indices based communications */
2381   PetscCall(MatISSetUpSF_IS(A));
2382   PetscFunctionReturn(PETSC_SUCCESS);
2383 }
2384 
2385 static PetscErrorCode MatISFilterL2GMap(Mat A, ISLocalToGlobalMapping map, ISLocalToGlobalMapping *nmap, ISLocalToGlobalMapping *lmap)
2386 {
2387   IS                         is;
2388   ISLocalToGlobalMappingType l2gtype;
2389   const PetscInt            *idxs;
2390   PetscHSetI                 ht;
2391   PetscInt                  *nidxs;
2392   PetscInt                   i, n, bs, c;
2393   PetscBool                  flg[] = {PETSC_FALSE, PETSC_FALSE};
2394 
2395   PetscFunctionBegin;
2396   PetscCall(ISLocalToGlobalMappingGetSize(map, &n));
2397   PetscCall(ISLocalToGlobalMappingGetBlockSize(map, &bs));
2398   PetscCall(ISLocalToGlobalMappingGetBlockIndices(map, &idxs));
2399   PetscCall(PetscHSetICreate(&ht));
2400   PetscCall(PetscMalloc1(n / bs, &nidxs));
2401   for (i = 0, c = 0; i < n / bs; i++) {
2402     PetscBool missing;
2403     if (idxs[i] < 0) {
2404       flg[0] = PETSC_TRUE;
2405       continue;
2406     }
2407     PetscCall(PetscHSetIQueryAdd(ht, idxs[i], &missing));
2408     if (!missing) flg[1] = PETSC_TRUE;
2409     else nidxs[c++] = idxs[i];
2410   }
2411   PetscCall(PetscHSetIDestroy(&ht));
2412   PetscCall(MPIU_Allreduce(MPI_IN_PLACE, flg, 2, MPIU_BOOL, MPI_LOR, PetscObjectComm((PetscObject)A)));
2413   if (!flg[0] && !flg[1]) { /* Entries are all non negative and unique */
2414     *nmap = NULL;
2415     *lmap = NULL;
2416     PetscCall(PetscFree(nidxs));
2417     PetscCall(ISLocalToGlobalMappingRestoreBlockIndices(map, &idxs));
2418     PetscFunctionReturn(PETSC_SUCCESS);
2419   }
2420 
2421   /* New l2g map without negative or repeated indices */
2422   PetscCall(ISCreateBlock(PetscObjectComm((PetscObject)A), bs, c, nidxs, PETSC_USE_POINTER, &is));
2423   PetscCall(ISLocalToGlobalMappingCreateIS(is, nmap));
2424   PetscCall(ISDestroy(&is));
2425   PetscCall(ISLocalToGlobalMappingGetType(map, &l2gtype));
2426   PetscCall(ISLocalToGlobalMappingSetType(*nmap, l2gtype));
2427 
2428   /* New local l2g map for repeated indices */
2429   PetscCall(ISGlobalToLocalMappingApplyBlock(*nmap, IS_GTOLM_MASK, n / bs, idxs, NULL, nidxs));
2430   PetscCall(ISCreateBlock(PETSC_COMM_SELF, bs, n / bs, nidxs, PETSC_USE_POINTER, &is));
2431   PetscCall(ISLocalToGlobalMappingCreateIS(is, lmap));
2432   PetscCall(ISDestroy(&is));
2433 
2434   PetscCall(PetscFree(nidxs));
2435   PetscCall(ISLocalToGlobalMappingRestoreBlockIndices(map, &idxs));
2436   PetscFunctionReturn(PETSC_SUCCESS);
2437 }
2438 
2439 static PetscErrorCode MatSetLocalToGlobalMapping_IS(Mat A, ISLocalToGlobalMapping rmapping, ISLocalToGlobalMapping cmapping)
2440 {
2441   Mat_IS                *is            = (Mat_IS *)A->data;
2442   ISLocalToGlobalMapping localrmapping = NULL, localcmapping = NULL;
2443   PetscBool              cong, freem[]                       = {PETSC_FALSE, PETSC_FALSE};
2444   PetscInt               nr, rbs, nc, cbs;
2445 
2446   PetscFunctionBegin;
2447   if (rmapping) PetscCheckSameComm(A, 1, rmapping, 2);
2448   if (cmapping) PetscCheckSameComm(A, 1, cmapping, 3);
2449 
2450   PetscCall(ISLocalToGlobalMappingDestroy(&is->rmapping));
2451   PetscCall(ISLocalToGlobalMappingDestroy(&is->cmapping));
2452   PetscCall(PetscLayoutSetUp(A->rmap));
2453   PetscCall(PetscLayoutSetUp(A->cmap));
2454   PetscCall(MatHasCongruentLayouts(A, &cong));
2455 
2456   /* If NULL, local space matches global space */
2457   if (!rmapping) {
2458     IS is;
2459 
2460     PetscCall(ISCreateStride(PetscObjectComm((PetscObject)A), A->rmap->N, 0, 1, &is));
2461     PetscCall(ISLocalToGlobalMappingCreateIS(is, &rmapping));
2462     if (A->rmap->bs > 0) PetscCall(ISLocalToGlobalMappingSetBlockSize(rmapping, A->rmap->bs));
2463     PetscCall(ISDestroy(&is));
2464     freem[0] = PETSC_TRUE;
2465     if (!cmapping && cong && A->rmap->bs == A->cmap->bs) cmapping = rmapping;
2466   } else if (!is->islocalref) { /* check if the l2g map has negative or repeated entries */
2467     PetscCall(MatISFilterL2GMap(A, rmapping, &is->rmapping, &localrmapping));
2468     if (rmapping == cmapping) {
2469       PetscCall(PetscObjectReference((PetscObject)is->rmapping));
2470       is->cmapping = is->rmapping;
2471       PetscCall(PetscObjectReference((PetscObject)localrmapping));
2472       localcmapping = localrmapping;
2473     }
2474   }
2475   if (!cmapping) {
2476     IS is;
2477 
2478     PetscCall(ISCreateStride(PetscObjectComm((PetscObject)A), A->cmap->N, 0, 1, &is));
2479     PetscCall(ISLocalToGlobalMappingCreateIS(is, &cmapping));
2480     if (A->cmap->bs > 0) PetscCall(ISLocalToGlobalMappingSetBlockSize(cmapping, A->cmap->bs));
2481     PetscCall(ISDestroy(&is));
2482     freem[1] = PETSC_TRUE;
2483   } else if (cmapping != rmapping && !is->islocalref) { /* check if the l2g map has negative or repeated entries */
2484     PetscCall(MatISFilterL2GMap(A, cmapping, &is->cmapping, &localcmapping));
2485   }
2486   if (!is->rmapping) {
2487     PetscCall(PetscObjectReference((PetscObject)rmapping));
2488     is->rmapping = rmapping;
2489   }
2490   if (!is->cmapping) {
2491     PetscCall(PetscObjectReference((PetscObject)cmapping));
2492     is->cmapping = cmapping;
2493   }
2494 
2495   /* Clean up */
2496   PetscCall(MatDestroy(&is->A));
2497   if (is->csf != is->sf) {
2498     PetscCall(PetscSFDestroy(&is->csf));
2499     PetscCall(PetscFree2(is->csf_rootdata, is->csf_leafdata));
2500   } else is->csf = NULL;
2501   PetscCall(PetscSFDestroy(&is->sf));
2502   PetscCall(PetscFree2(is->sf_rootdata, is->sf_leafdata));
2503   PetscCall(PetscFree(is->bdiag));
2504 
2505   /* check if the two mappings are actually the same for square matrices since MATIS has some optimization for this case
2506      (DOLFIN passes 2 different objects) */
2507   PetscCall(ISLocalToGlobalMappingGetSize(is->rmapping, &nr));
2508   PetscCall(ISLocalToGlobalMappingGetBlockSize(is->rmapping, &rbs));
2509   PetscCall(ISLocalToGlobalMappingGetSize(is->cmapping, &nc));
2510   PetscCall(ISLocalToGlobalMappingGetBlockSize(is->cmapping, &cbs));
2511   if (is->rmapping != is->cmapping && cong) {
2512     PetscBool same = PETSC_FALSE;
2513     if (nr == nc && cbs == rbs) {
2514       const PetscInt *idxs1, *idxs2;
2515 
2516       PetscCall(ISLocalToGlobalMappingGetBlockIndices(is->rmapping, &idxs1));
2517       PetscCall(ISLocalToGlobalMappingGetBlockIndices(is->cmapping, &idxs2));
2518       PetscCall(PetscArraycmp(idxs1, idxs2, nr / rbs, &same));
2519       PetscCall(ISLocalToGlobalMappingRestoreBlockIndices(is->rmapping, &idxs1));
2520       PetscCall(ISLocalToGlobalMappingRestoreBlockIndices(is->cmapping, &idxs2));
2521     }
2522     PetscCall(MPIU_Allreduce(MPI_IN_PLACE, &same, 1, MPIU_BOOL, MPI_LAND, PetscObjectComm((PetscObject)A)));
2523     if (same) {
2524       PetscCall(ISLocalToGlobalMappingDestroy(&is->cmapping));
2525       PetscCall(PetscObjectReference((PetscObject)is->rmapping));
2526       is->cmapping = is->rmapping;
2527     }
2528   }
2529   PetscCall(PetscLayoutSetBlockSize(A->rmap, rbs));
2530   PetscCall(PetscLayoutSetBlockSize(A->cmap, cbs));
2531   /* Pass the user defined maps to the layout */
2532   PetscCall(PetscLayoutSetISLocalToGlobalMapping(A->rmap, rmapping));
2533   PetscCall(PetscLayoutSetISLocalToGlobalMapping(A->cmap, cmapping));
2534   if (freem[0]) PetscCall(ISLocalToGlobalMappingDestroy(&rmapping));
2535   if (freem[1]) PetscCall(ISLocalToGlobalMappingDestroy(&cmapping));
2536 
2537   /* Create the local matrix A */
2538   PetscCall(MatCreate(PETSC_COMM_SELF, &is->A));
2539   PetscCall(MatSetType(is->A, is->lmattype));
2540   PetscCall(MatSetSizes(is->A, nr, nc, nr, nc));
2541   PetscCall(MatSetBlockSizes(is->A, rbs, cbs));
2542   PetscCall(MatSetOptionsPrefix(is->A, "is_"));
2543   PetscCall(MatAppendOptionsPrefix(is->A, ((PetscObject)A)->prefix));
2544   PetscCall(PetscLayoutSetUp(is->A->rmap));
2545   PetscCall(PetscLayoutSetUp(is->A->cmap));
2546   PetscCall(MatSetLocalToGlobalMapping(is->A, localrmapping, localcmapping));
2547   PetscCall(ISLocalToGlobalMappingDestroy(&localrmapping));
2548   PetscCall(ISLocalToGlobalMappingDestroy(&localcmapping));
2549 
2550   /* setup scatters and local vectors for MatMult */
2551   if (!is->islocalref) PetscCall(MatISSetUpScatters_Private(A));
2552   A->preallocated = PETSC_TRUE;
2553   PetscFunctionReturn(PETSC_SUCCESS);
2554 }
2555 
2556 static PetscErrorCode MatSetUp_IS(Mat A)
2557 {
2558   ISLocalToGlobalMapping rmap, cmap;
2559 
2560   PetscFunctionBegin;
2561   PetscCall(MatGetLocalToGlobalMapping(A, &rmap, &cmap));
2562   if (!rmap && !cmap) PetscCall(MatSetLocalToGlobalMapping(A, NULL, NULL));
2563   PetscFunctionReturn(PETSC_SUCCESS);
2564 }
2565 
2566 static PetscErrorCode MatSetValues_IS(Mat mat, PetscInt m, const PetscInt *rows, PetscInt n, const PetscInt *cols, const PetscScalar *values, InsertMode addv)
2567 {
2568   Mat_IS  *is = (Mat_IS *)mat->data;
2569   PetscInt rows_l[MATIS_MAX_ENTRIES_INSERTION], cols_l[MATIS_MAX_ENTRIES_INSERTION];
2570 
2571   PetscFunctionBegin;
2572   PetscCall(ISGlobalToLocalMappingApply(is->rmapping, IS_GTOLM_MASK, m, rows, &m, rows_l));
2573   if (m != n || rows != cols || is->cmapping != is->rmapping) {
2574     PetscCall(ISGlobalToLocalMappingApply(is->cmapping, IS_GTOLM_MASK, n, cols, &n, cols_l));
2575     PetscCall(MatSetValues(is->A, m, rows_l, n, cols_l, values, addv));
2576   } else {
2577     PetscCall(MatSetValues(is->A, m, rows_l, m, rows_l, values, addv));
2578   }
2579   PetscFunctionReturn(PETSC_SUCCESS);
2580 }
2581 
2582 static PetscErrorCode MatSetValuesBlocked_IS(Mat mat, PetscInt m, const PetscInt *rows, PetscInt n, const PetscInt *cols, const PetscScalar *values, InsertMode addv)
2583 {
2584   Mat_IS  *is = (Mat_IS *)mat->data;
2585   PetscInt rows_l[MATIS_MAX_ENTRIES_INSERTION], cols_l[MATIS_MAX_ENTRIES_INSERTION];
2586 
2587   PetscFunctionBegin;
2588   PetscCall(ISGlobalToLocalMappingApplyBlock(is->rmapping, IS_GTOLM_MASK, m, rows, &m, rows_l));
2589   if (m != n || rows != cols || is->cmapping != is->rmapping) {
2590     PetscCall(ISGlobalToLocalMappingApply(is->cmapping, IS_GTOLM_MASK, n, cols, &n, cols_l));
2591     PetscCall(MatSetValuesBlocked(is->A, m, rows_l, n, cols_l, values, addv));
2592   } else {
2593     PetscCall(MatSetValuesBlocked(is->A, m, rows_l, n, rows_l, values, addv));
2594   }
2595   PetscFunctionReturn(PETSC_SUCCESS);
2596 }
2597 
2598 static PetscErrorCode MatSetValuesLocal_IS(Mat A, PetscInt m, const PetscInt *rows, PetscInt n, const PetscInt *cols, const PetscScalar *values, InsertMode addv)
2599 {
2600   Mat_IS *is = (Mat_IS *)A->data;
2601 
2602   PetscFunctionBegin;
2603   if (is->A->rmap->mapping || is->A->cmap->mapping) {
2604     PetscCall(MatSetValuesLocal(is->A, m, rows, n, cols, values, addv));
2605   } else {
2606     PetscCall(MatSetValues(is->A, m, rows, n, cols, values, addv));
2607   }
2608   PetscFunctionReturn(PETSC_SUCCESS);
2609 }
2610 
2611 static PetscErrorCode MatSetValuesBlockedLocal_IS(Mat A, PetscInt m, const PetscInt *rows, PetscInt n, const PetscInt *cols, const PetscScalar *values, InsertMode addv)
2612 {
2613   Mat_IS *is = (Mat_IS *)A->data;
2614 
2615   PetscFunctionBegin;
2616   if (is->A->rmap->mapping || is->A->cmap->mapping) {
2617     PetscCall(MatSetValuesBlockedLocal(is->A, m, rows, n, cols, values, addv));
2618   } else {
2619     PetscCall(MatSetValuesBlocked(is->A, m, rows, n, cols, values, addv));
2620   }
2621   PetscFunctionReturn(PETSC_SUCCESS);
2622 }
2623 
2624 static PetscErrorCode MatISZeroRowsColumnsLocal_Private(Mat A, PetscInt n, const PetscInt rows[], PetscScalar diag, PetscBool columns)
2625 {
2626   Mat_IS *is = (Mat_IS *)A->data;
2627 
2628   PetscFunctionBegin;
2629   if (!n) {
2630     is->pure_neumann = PETSC_TRUE;
2631   } else {
2632     PetscInt i;
2633     is->pure_neumann = PETSC_FALSE;
2634 
2635     if (columns) {
2636       PetscCall(MatZeroRowsColumns(is->A, n, rows, diag, NULL, NULL));
2637     } else {
2638       PetscCall(MatZeroRows(is->A, n, rows, diag, NULL, NULL));
2639     }
2640     if (diag != 0.) {
2641       const PetscScalar *array;
2642       PetscCall(VecGetArrayRead(is->counter, &array));
2643       for (i = 0; i < n; i++) PetscCall(MatSetValue(is->A, rows[i], rows[i], diag / (array[rows[i]]), INSERT_VALUES));
2644       PetscCall(VecRestoreArrayRead(is->counter, &array));
2645     }
2646     PetscCall(MatAssemblyBegin(is->A, MAT_FINAL_ASSEMBLY));
2647     PetscCall(MatAssemblyEnd(is->A, MAT_FINAL_ASSEMBLY));
2648   }
2649   PetscFunctionReturn(PETSC_SUCCESS);
2650 }
2651 
2652 static PetscErrorCode MatZeroRowsColumns_Private_IS(Mat A, PetscInt n, const PetscInt rows[], PetscScalar diag, Vec x, Vec b, PetscBool columns)
2653 {
2654   Mat_IS   *matis = (Mat_IS *)A->data;
2655   PetscInt  nr, nl, len, i;
2656   PetscInt *lrows;
2657 
2658   PetscFunctionBegin;
2659   if (PetscUnlikelyDebug(columns || diag != 0. || (x && b))) {
2660     PetscBool cong;
2661 
2662     PetscCall(PetscLayoutCompare(A->rmap, A->cmap, &cong));
2663     cong = (PetscBool)(cong && matis->sf == matis->csf);
2664     PetscCheck(cong || !columns, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "Columns can be zeroed if and only if A->rmap and A->cmap are congruent and the l2g maps are the same for MATIS");
2665     PetscCheck(cong || diag == 0., PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "Nonzero diagonal value supported if and only if A->rmap and A->cmap are congruent and the l2g maps are the same for MATIS");
2666     PetscCheck(cong || !x || !b, PetscObjectComm((PetscObject)A), PETSC_ERR_SUP, "A->rmap and A->cmap need to be congruent, and the l2g maps be the same");
2667   }
2668   /* get locally owned rows */
2669   PetscCall(PetscLayoutMapLocal(A->rmap, n, rows, &len, &lrows, NULL));
2670   /* fix right hand side if needed */
2671   if (x && b) {
2672     const PetscScalar *xx;
2673     PetscScalar       *bb;
2674 
2675     PetscCall(VecGetArrayRead(x, &xx));
2676     PetscCall(VecGetArray(b, &bb));
2677     for (i = 0; i < len; ++i) bb[lrows[i]] = diag * xx[lrows[i]];
2678     PetscCall(VecRestoreArrayRead(x, &xx));
2679     PetscCall(VecRestoreArray(b, &bb));
2680   }
2681   /* get rows associated to the local matrices */
2682   PetscCall(MatGetSize(matis->A, &nl, NULL));
2683   PetscCall(PetscArrayzero(matis->sf_leafdata, nl));
2684   PetscCall(PetscArrayzero(matis->sf_rootdata, A->rmap->n));
2685   for (i = 0; i < len; i++) matis->sf_rootdata[lrows[i]] = 1;
2686   PetscCall(PetscFree(lrows));
2687   PetscCall(PetscSFBcastBegin(matis->sf, MPIU_INT, matis->sf_rootdata, matis->sf_leafdata, MPI_REPLACE));
2688   PetscCall(PetscSFBcastEnd(matis->sf, MPIU_INT, matis->sf_rootdata, matis->sf_leafdata, MPI_REPLACE));
2689   PetscCall(PetscMalloc1(nl, &lrows));
2690   for (i = 0, nr = 0; i < nl; i++)
2691     if (matis->sf_leafdata[i]) lrows[nr++] = i;
2692   PetscCall(MatISZeroRowsColumnsLocal_Private(A, nr, lrows, diag, columns));
2693   PetscCall(PetscFree(lrows));
2694   PetscCall(MatAssemblyBegin(A, MAT_FINAL_ASSEMBLY));
2695   PetscCall(MatAssemblyEnd(A, MAT_FINAL_ASSEMBLY));
2696   PetscFunctionReturn(PETSC_SUCCESS);
2697 }
2698 
2699 static PetscErrorCode MatZeroRows_IS(Mat A, PetscInt n, const PetscInt rows[], PetscScalar diag, Vec x, Vec b)
2700 {
2701   PetscFunctionBegin;
2702   PetscCall(MatZeroRowsColumns_Private_IS(A, n, rows, diag, x, b, PETSC_FALSE));
2703   PetscFunctionReturn(PETSC_SUCCESS);
2704 }
2705 
2706 static PetscErrorCode MatZeroRowsColumns_IS(Mat A, PetscInt n, const PetscInt rows[], PetscScalar diag, Vec x, Vec b)
2707 {
2708   PetscFunctionBegin;
2709   PetscCall(MatZeroRowsColumns_Private_IS(A, n, rows, diag, x, b, PETSC_TRUE));
2710   PetscFunctionReturn(PETSC_SUCCESS);
2711 }
2712 
2713 static PetscErrorCode MatAssemblyBegin_IS(Mat A, MatAssemblyType type)
2714 {
2715   Mat_IS *is = (Mat_IS *)A->data;
2716 
2717   PetscFunctionBegin;
2718   PetscCall(MatAssemblyBegin(is->A, type));
2719   PetscFunctionReturn(PETSC_SUCCESS);
2720 }
2721 
2722 static PetscErrorCode MatAssemblyEnd_IS(Mat A, MatAssemblyType type)
2723 {
2724   Mat_IS   *is = (Mat_IS *)A->data;
2725   PetscBool lnnz;
2726 
2727   PetscFunctionBegin;
2728   PetscCall(MatAssemblyEnd(is->A, type));
2729   /* fix for local empty rows/cols */
2730   if (is->locempty && type == MAT_FINAL_ASSEMBLY) {
2731     Mat                    newlA;
2732     ISLocalToGlobalMapping rl2g, cl2g;
2733     IS                     nzr, nzc;
2734     PetscInt               nr, nc, nnzr, nnzc;
2735     PetscBool              lnewl2g, newl2g;
2736 
2737     PetscCall(MatGetSize(is->A, &nr, &nc));
2738     PetscCall(MatFindNonzeroRowsOrCols_Basic(is->A, PETSC_FALSE, PETSC_SMALL, &nzr));
2739     if (!nzr) PetscCall(ISCreateStride(PetscObjectComm((PetscObject)is->A), nr, 0, 1, &nzr));
2740     PetscCall(MatFindNonzeroRowsOrCols_Basic(is->A, PETSC_TRUE, PETSC_SMALL, &nzc));
2741     if (!nzc) PetscCall(ISCreateStride(PetscObjectComm((PetscObject)is->A), nc, 0, 1, &nzc));
2742     PetscCall(ISGetSize(nzr, &nnzr));
2743     PetscCall(ISGetSize(nzc, &nnzc));
2744     if (nnzr != nr || nnzc != nc) { /* need new global l2g map */
2745       lnewl2g = PETSC_TRUE;
2746       PetscCallMPI(MPI_Allreduce(&lnewl2g, &newl2g, 1, MPIU_BOOL, MPI_LOR, PetscObjectComm((PetscObject)A)));
2747 
2748       /* extract valid submatrix */
2749       PetscCall(MatCreateSubMatrix(is->A, nzr, nzc, MAT_INITIAL_MATRIX, &newlA));
2750     } else { /* local matrix fully populated */
2751       lnewl2g = PETSC_FALSE;
2752       PetscCallMPI(MPI_Allreduce(&lnewl2g, &newl2g, 1, MPIU_BOOL, MPI_LOR, PetscObjectComm((PetscObject)A)));
2753       PetscCall(PetscObjectReference((PetscObject)is->A));
2754       newlA = is->A;
2755     }
2756 
2757     /* attach new global l2g map if needed */
2758     if (newl2g) {
2759       IS              zr, zc;
2760       const PetscInt *ridxs, *cidxs, *zridxs, *zcidxs;
2761       PetscInt       *nidxs, i;
2762 
2763       PetscCall(ISComplement(nzr, 0, nr, &zr));
2764       PetscCall(ISComplement(nzc, 0, nc, &zc));
2765       PetscCall(PetscMalloc1(PetscMax(nr, nc), &nidxs));
2766       PetscCall(ISLocalToGlobalMappingGetIndices(is->rmapping, &ridxs));
2767       PetscCall(ISLocalToGlobalMappingGetIndices(is->cmapping, &cidxs));
2768       PetscCall(ISGetIndices(zr, &zridxs));
2769       PetscCall(ISGetIndices(zc, &zcidxs));
2770       PetscCall(ISGetLocalSize(zr, &nnzr));
2771       PetscCall(ISGetLocalSize(zc, &nnzc));
2772 
2773       PetscCall(PetscArraycpy(nidxs, ridxs, nr));
2774       for (i = 0; i < nnzr; i++) nidxs[zridxs[i]] = -1;
2775       PetscCall(ISLocalToGlobalMappingCreate(PetscObjectComm((PetscObject)A), 1, nr, nidxs, PETSC_COPY_VALUES, &rl2g));
2776       PetscCall(PetscArraycpy(nidxs, cidxs, nc));
2777       for (i = 0; i < nnzc; i++) nidxs[zcidxs[i]] = -1;
2778       PetscCall(ISLocalToGlobalMappingCreate(PetscObjectComm((PetscObject)A), 1, nc, nidxs, PETSC_COPY_VALUES, &cl2g));
2779 
2780       PetscCall(ISRestoreIndices(zr, &zridxs));
2781       PetscCall(ISRestoreIndices(zc, &zcidxs));
2782       PetscCall(ISLocalToGlobalMappingRestoreIndices(is->rmapping, &ridxs));
2783       PetscCall(ISLocalToGlobalMappingRestoreIndices(is->cmapping, &cidxs));
2784       PetscCall(ISDestroy(&nzr));
2785       PetscCall(ISDestroy(&nzc));
2786       PetscCall(ISDestroy(&zr));
2787       PetscCall(ISDestroy(&zc));
2788       PetscCall(PetscFree(nidxs));
2789       PetscCall(MatSetLocalToGlobalMapping(A, rl2g, cl2g));
2790       PetscCall(ISLocalToGlobalMappingDestroy(&rl2g));
2791       PetscCall(ISLocalToGlobalMappingDestroy(&cl2g));
2792     }
2793     PetscCall(MatISSetLocalMat(A, newlA));
2794     PetscCall(MatDestroy(&newlA));
2795     PetscCall(ISDestroy(&nzr));
2796     PetscCall(ISDestroy(&nzc));
2797     is->locempty = PETSC_FALSE;
2798   }
2799   lnnz          = (PetscBool)(is->A->nonzerostate == is->lnnzstate);
2800   is->lnnzstate = is->A->nonzerostate;
2801   PetscCall(MPIU_Allreduce(MPI_IN_PLACE, &lnnz, 1, MPIU_BOOL, MPI_LOR, PetscObjectComm((PetscObject)A)));
2802   if (lnnz) A->nonzerostate++;
2803   PetscFunctionReturn(PETSC_SUCCESS);
2804 }
2805 
2806 static PetscErrorCode MatISGetLocalMat_IS(Mat mat, Mat *local)
2807 {
2808   Mat_IS *is = (Mat_IS *)mat->data;
2809 
2810   PetscFunctionBegin;
2811   *local = is->A;
2812   PetscFunctionReturn(PETSC_SUCCESS);
2813 }
2814 
2815 static PetscErrorCode MatISRestoreLocalMat_IS(Mat mat, Mat *local)
2816 {
2817   PetscFunctionBegin;
2818   *local = NULL;
2819   PetscFunctionReturn(PETSC_SUCCESS);
2820 }
2821 
2822 /*@
2823     MatISGetLocalMat - Gets the local matrix stored inside a `MATIS` matrix.
2824 
2825   Input Parameter:
2826 .  mat - the matrix
2827 
2828   Output Parameter:
2829 .  local - the local matrix
2830 
2831   Level: advanced
2832 
2833   Notes:
2834     This can be called if you have precomputed the nonzero structure of the
2835   matrix and want to provide it to the inner matrix object to improve the performance
2836   of the `MatSetValues()` operation.
2837 
2838   Call `MatISRestoreLocalMat()` when finished with the local matrix.
2839 
2840 .seealso: `MATIS`, `MatISRestoreLocalMat()`
2841 @*/
2842 PetscErrorCode MatISGetLocalMat(Mat mat, Mat *local)
2843 {
2844   PetscFunctionBegin;
2845   PetscValidHeaderSpecific(mat, MAT_CLASSID, 1);
2846   PetscValidPointer(local, 2);
2847   PetscUseMethod(mat, "MatISGetLocalMat_C", (Mat, Mat *), (mat, local));
2848   PetscFunctionReturn(PETSC_SUCCESS);
2849 }
2850 
2851 /*@
2852     MatISRestoreLocalMat - Restores the local matrix obtained with `MatISGetLocalMat()`
2853 
2854   Input Parameter:
2855 .  mat - the matrix
2856 
2857   Output Parameter:
2858 .  local - the local matrix
2859 
2860   Level: advanced
2861 
2862 .seealso: `MATIS`, `MatISGetLocalMat()`
2863 @*/
2864 PetscErrorCode MatISRestoreLocalMat(Mat mat, Mat *local)
2865 {
2866   PetscFunctionBegin;
2867   PetscValidHeaderSpecific(mat, MAT_CLASSID, 1);
2868   PetscValidPointer(local, 2);
2869   PetscUseMethod(mat, "MatISRestoreLocalMat_C", (Mat, Mat *), (mat, local));
2870   PetscFunctionReturn(PETSC_SUCCESS);
2871 }
2872 
2873 static PetscErrorCode MatISSetLocalMatType_IS(Mat mat, MatType mtype)
2874 {
2875   Mat_IS *is = (Mat_IS *)mat->data;
2876 
2877   PetscFunctionBegin;
2878   if (is->A) PetscCall(MatSetType(is->A, mtype));
2879   PetscCall(PetscFree(is->lmattype));
2880   PetscCall(PetscStrallocpy(mtype, &is->lmattype));
2881   PetscFunctionReturn(PETSC_SUCCESS);
2882 }
2883 
2884 /*@
2885     MatISSetLocalMatType - Specifies the type of local matrix inside the `MATIS`
2886 
2887   Input Parameters:
2888 +  mat - the matrix
2889 -  mtype - the local matrix type
2890 
2891   Level: advanced
2892 
2893 .seealso: `MATIS`, `MatSetType()`, `MatType`
2894 @*/
2895 PetscErrorCode MatISSetLocalMatType(Mat mat, MatType mtype)
2896 {
2897   PetscFunctionBegin;
2898   PetscValidHeaderSpecific(mat, MAT_CLASSID, 1);
2899   PetscUseMethod(mat, "MatISSetLocalMatType_C", (Mat, MatType), (mat, mtype));
2900   PetscFunctionReturn(PETSC_SUCCESS);
2901 }
2902 
2903 static PetscErrorCode MatISSetLocalMat_IS(Mat mat, Mat local)
2904 {
2905   Mat_IS   *is = (Mat_IS *)mat->data;
2906   PetscInt  nrows, ncols, orows, ocols;
2907   MatType   mtype, otype;
2908   PetscBool sametype = PETSC_TRUE;
2909 
2910   PetscFunctionBegin;
2911   if (is->A && !is->islocalref) {
2912     PetscCall(MatGetSize(is->A, &orows, &ocols));
2913     PetscCall(MatGetSize(local, &nrows, &ncols));
2914     PetscCheck(orows == nrows && ocols == ncols, PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Local MATIS matrix should be of size %" PetscInt_FMT "x%" PetscInt_FMT " (you passed a %" PetscInt_FMT "x%" PetscInt_FMT " matrix)", orows, ocols, nrows, ncols);
2915     PetscCall(MatGetType(local, &mtype));
2916     PetscCall(MatGetType(is->A, &otype));
2917     PetscCall(PetscStrcmp(mtype, otype, &sametype));
2918   }
2919   PetscCall(PetscObjectReference((PetscObject)local));
2920   PetscCall(MatDestroy(&is->A));
2921   is->A = local;
2922   PetscCall(MatGetType(is->A, &mtype));
2923   PetscCall(MatISSetLocalMatType(mat, mtype));
2924   if (!sametype && !is->islocalref) PetscCall(MatISSetUpScatters_Private(mat));
2925   PetscFunctionReturn(PETSC_SUCCESS);
2926 }
2927 
2928 /*@
2929     MatISSetLocalMat - Replace the local matrix stored inside a `MATIS` object.
2930 
2931   Collective
2932 
2933   Input Parameters:
2934 +  mat - the matrix
2935 -  local - the local matrix
2936 
2937   Level: advanced
2938 
2939   Notes:
2940   Any previous matrix within the `MATIS` has its reference count decreased by one.
2941 
2942   This can be called if you have precomputed the local matrix and
2943   want to provide it to the matrix object `MATIS`.
2944 
2945 .seealso: `MATIS`, `MatISSetLocalMatType`, `MatISGetLocalMat()`
2946 @*/
2947 PetscErrorCode MatISSetLocalMat(Mat mat, Mat local)
2948 {
2949   PetscFunctionBegin;
2950   PetscValidHeaderSpecific(mat, MAT_CLASSID, 1);
2951   PetscValidHeaderSpecific(local, MAT_CLASSID, 2);
2952   PetscUseMethod(mat, "MatISSetLocalMat_C", (Mat, Mat), (mat, local));
2953   PetscFunctionReturn(PETSC_SUCCESS);
2954 }
2955 
2956 static PetscErrorCode MatZeroEntries_IS(Mat A)
2957 {
2958   Mat_IS *a = (Mat_IS *)A->data;
2959 
2960   PetscFunctionBegin;
2961   PetscCall(MatZeroEntries(a->A));
2962   PetscFunctionReturn(PETSC_SUCCESS);
2963 }
2964 
2965 static PetscErrorCode MatScale_IS(Mat A, PetscScalar a)
2966 {
2967   Mat_IS *is = (Mat_IS *)A->data;
2968 
2969   PetscFunctionBegin;
2970   PetscCall(MatScale(is->A, a));
2971   PetscFunctionReturn(PETSC_SUCCESS);
2972 }
2973 
2974 static PetscErrorCode MatGetDiagonal_IS(Mat A, Vec v)
2975 {
2976   Mat_IS *is = (Mat_IS *)A->data;
2977 
2978   PetscFunctionBegin;
2979   /* get diagonal of the local matrix */
2980   PetscCall(MatGetDiagonal(is->A, is->y));
2981 
2982   /* scatter diagonal back into global vector */
2983   PetscCall(VecSet(v, 0));
2984   PetscCall(VecScatterBegin(is->rctx, is->y, v, ADD_VALUES, SCATTER_REVERSE));
2985   PetscCall(VecScatterEnd(is->rctx, is->y, v, ADD_VALUES, SCATTER_REVERSE));
2986   PetscFunctionReturn(PETSC_SUCCESS);
2987 }
2988 
2989 static PetscErrorCode MatSetOption_IS(Mat A, MatOption op, PetscBool flg)
2990 {
2991   Mat_IS *a = (Mat_IS *)A->data;
2992 
2993   PetscFunctionBegin;
2994   PetscCall(MatSetOption(a->A, op, flg));
2995   PetscFunctionReturn(PETSC_SUCCESS);
2996 }
2997 
2998 static PetscErrorCode MatAXPY_IS(Mat Y, PetscScalar a, Mat X, MatStructure str)
2999 {
3000   Mat_IS *y = (Mat_IS *)Y->data;
3001   Mat_IS *x;
3002 
3003   PetscFunctionBegin;
3004   if (PetscDefined(USE_DEBUG)) {
3005     PetscBool ismatis;
3006     PetscCall(PetscObjectTypeCompare((PetscObject)X, MATIS, &ismatis));
3007     PetscCheck(ismatis, PetscObjectComm((PetscObject)Y), PETSC_ERR_SUP, "Cannot call MatAXPY(Y,a,X,str) with X not of type MATIS");
3008   }
3009   x = (Mat_IS *)X->data;
3010   PetscCall(MatAXPY(y->A, a, x->A, str));
3011   PetscFunctionReturn(PETSC_SUCCESS);
3012 }
3013 
3014 static PetscErrorCode MatGetLocalSubMatrix_IS(Mat A, IS row, IS col, Mat *submat)
3015 {
3016   Mat                    lA;
3017   Mat_IS                *matis = (Mat_IS *)(A->data);
3018   ISLocalToGlobalMapping rl2g, cl2g;
3019   IS                     is;
3020   const PetscInt        *rg, *rl;
3021   PetscInt               nrg;
3022   PetscInt               N, M, nrl, i, *idxs;
3023 
3024   PetscFunctionBegin;
3025   PetscCall(ISLocalToGlobalMappingGetIndices(A->rmap->mapping, &rg));
3026   PetscCall(ISGetLocalSize(row, &nrl));
3027   PetscCall(ISGetIndices(row, &rl));
3028   PetscCall(ISLocalToGlobalMappingGetSize(A->rmap->mapping, &nrg));
3029   if (PetscDefined(USE_DEBUG)) {
3030     for (i = 0; i < nrl; i++) PetscCheck(rl[i] < nrg, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Local row index %" PetscInt_FMT " -> %" PetscInt_FMT " greater then maximum possible %" PetscInt_FMT, i, rl[i], nrg);
3031   }
3032   PetscCall(PetscMalloc1(nrg, &idxs));
3033   /* map from [0,nrl) to row */
3034   for (i = 0; i < nrl; i++) idxs[i] = rl[i];
3035   for (i = nrl; i < nrg; i++) idxs[i] = -1;
3036   PetscCall(ISRestoreIndices(row, &rl));
3037   PetscCall(ISLocalToGlobalMappingRestoreIndices(A->rmap->mapping, &rg));
3038   PetscCall(ISCreateGeneral(PetscObjectComm((PetscObject)A), nrg, idxs, PETSC_OWN_POINTER, &is));
3039   PetscCall(ISLocalToGlobalMappingCreateIS(is, &rl2g));
3040   PetscCall(ISDestroy(&is));
3041   /* compute new l2g map for columns */
3042   if (col != row || matis->rmapping != matis->cmapping || matis->A->rmap->mapping != matis->A->cmap->mapping) {
3043     const PetscInt *cg, *cl;
3044     PetscInt        ncg;
3045     PetscInt        ncl;
3046 
3047     PetscCall(ISLocalToGlobalMappingGetIndices(A->cmap->mapping, &cg));
3048     PetscCall(ISGetLocalSize(col, &ncl));
3049     PetscCall(ISGetIndices(col, &cl));
3050     PetscCall(ISLocalToGlobalMappingGetSize(A->cmap->mapping, &ncg));
3051     if (PetscDefined(USE_DEBUG)) {
3052       for (i = 0; i < ncl; i++) PetscCheck(cl[i] < ncg, PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Local column index %" PetscInt_FMT " -> %" PetscInt_FMT " greater then maximum possible %" PetscInt_FMT, i, cl[i], ncg);
3053     }
3054     PetscCall(PetscMalloc1(ncg, &idxs));
3055     /* map from [0,ncl) to col */
3056     for (i = 0; i < ncl; i++) idxs[i] = cl[i];
3057     for (i = ncl; i < ncg; i++) idxs[i] = -1;
3058     PetscCall(ISRestoreIndices(col, &cl));
3059     PetscCall(ISLocalToGlobalMappingRestoreIndices(A->cmap->mapping, &cg));
3060     PetscCall(ISCreateGeneral(PetscObjectComm((PetscObject)A), ncg, idxs, PETSC_OWN_POINTER, &is));
3061     PetscCall(ISLocalToGlobalMappingCreateIS(is, &cl2g));
3062     PetscCall(ISDestroy(&is));
3063   } else {
3064     PetscCall(PetscObjectReference((PetscObject)rl2g));
3065     cl2g = rl2g;
3066   }
3067   /* create the MATIS submatrix */
3068   PetscCall(MatGetSize(A, &M, &N));
3069   PetscCall(MatCreate(PetscObjectComm((PetscObject)A), submat));
3070   PetscCall(MatSetSizes(*submat, PETSC_DECIDE, PETSC_DECIDE, M, N));
3071   PetscCall(MatSetType(*submat, MATIS));
3072   matis             = (Mat_IS *)((*submat)->data);
3073   matis->islocalref = PETSC_TRUE;
3074   PetscCall(MatSetLocalToGlobalMapping(*submat, rl2g, cl2g));
3075   PetscCall(MatISGetLocalMat(A, &lA));
3076   PetscCall(MatISSetLocalMat(*submat, lA));
3077   PetscCall(MatSetUp(*submat));
3078   PetscCall(MatAssemblyBegin(*submat, MAT_FINAL_ASSEMBLY));
3079   PetscCall(MatAssemblyEnd(*submat, MAT_FINAL_ASSEMBLY));
3080   PetscCall(ISLocalToGlobalMappingDestroy(&rl2g));
3081   PetscCall(ISLocalToGlobalMappingDestroy(&cl2g));
3082 
3083   /* remove unsupported ops */
3084   PetscCall(PetscMemzero((*submat)->ops, sizeof(struct _MatOps)));
3085   (*submat)->ops->destroy               = MatDestroy_IS;
3086   (*submat)->ops->setvalueslocal        = MatSetValuesLocal_SubMat_IS;
3087   (*submat)->ops->setvaluesblockedlocal = MatSetValuesBlockedLocal_SubMat_IS;
3088   (*submat)->ops->assemblybegin         = MatAssemblyBegin_IS;
3089   (*submat)->ops->assemblyend           = MatAssemblyEnd_IS;
3090   PetscFunctionReturn(PETSC_SUCCESS);
3091 }
3092 
3093 static PetscErrorCode MatSetFromOptions_IS(Mat A, PetscOptionItems *PetscOptionsObject)
3094 {
3095   Mat_IS   *a = (Mat_IS *)A->data;
3096   char      type[256];
3097   PetscBool flg;
3098 
3099   PetscFunctionBegin;
3100   PetscOptionsHeadBegin(PetscOptionsObject, "MATIS options");
3101   PetscCall(PetscOptionsBool("-matis_keepassembled", "Store an assembled version if needed", "MatISKeepAssembled", a->keepassembled, &a->keepassembled, NULL));
3102   PetscCall(PetscOptionsBool("-matis_fixempty", "Fix local matrices in case of empty local rows/columns", "MatISFixLocalEmpty", a->locempty, &a->locempty, NULL));
3103   PetscCall(PetscOptionsBool("-matis_storel2l", "Store local-to-local matrices generated from PtAP operations", "MatISStoreL2L", a->storel2l, &a->storel2l, NULL));
3104   PetscCall(PetscOptionsFList("-matis_localmat_type", "Matrix type", "MatISSetLocalMatType", MatList, a->lmattype, type, 256, &flg));
3105   if (flg) PetscCall(MatISSetLocalMatType(A, type));
3106   if (a->A) PetscCall(MatSetFromOptions(a->A));
3107   PetscOptionsHeadEnd();
3108   PetscFunctionReturn(PETSC_SUCCESS);
3109 }
3110 
3111 /*@
3112     MatCreateIS - Creates a "process" unassembled matrix, `MATIS`, assembled on each
3113        process but not across processes.
3114 
3115    Input Parameters:
3116 +     comm    - MPI communicator that will share the matrix
3117 .     bs      - block size of the matrix
3118 .     m,n,M,N - local and/or global sizes of the left and right vector used in matrix vector products
3119 .     rmap    - local to global map for rows
3120 -     cmap    - local to global map for cols
3121 
3122    Output Parameter:
3123 .    A - the resulting matrix
3124 
3125    Level: advanced
3126 
3127    Notes:
3128     m and n are NOT related to the size of the map; they represent the size of the local parts of the distributed vectors
3129     used in `MatMult()` operations. The sizes of rmap and cmap define the size of the local matrices.
3130 
3131     If rmap (cmap) is NULL, then the local row (column) spaces matches the global space.
3132 
3133 .seealso: `MATIS`, `MatSetLocalToGlobalMapping()`
3134 @*/
3135 PetscErrorCode MatCreateIS(MPI_Comm comm, PetscInt bs, PetscInt m, PetscInt n, PetscInt M, PetscInt N, ISLocalToGlobalMapping rmap, ISLocalToGlobalMapping cmap, Mat *A)
3136 {
3137   PetscFunctionBegin;
3138   PetscCall(MatCreate(comm, A));
3139   PetscCall(MatSetSizes(*A, m, n, M, N));
3140   if (bs > 0) PetscCall(MatSetBlockSize(*A, bs));
3141   PetscCall(MatSetType(*A, MATIS));
3142   PetscCall(MatSetLocalToGlobalMapping(*A, rmap, cmap));
3143   PetscFunctionReturn(PETSC_SUCCESS);
3144 }
3145 
3146 static PetscErrorCode MatHasOperation_IS(Mat A, MatOperation op, PetscBool *has)
3147 {
3148   Mat_IS      *a              = (Mat_IS *)A->data;
3149   MatOperation tobefiltered[] = {MATOP_MULT_ADD, MATOP_MULT_TRANSPOSE_ADD, MATOP_GET_DIAGONAL_BLOCK, MATOP_INCREASE_OVERLAP};
3150 
3151   PetscFunctionBegin;
3152   *has = PETSC_FALSE;
3153   if (!((void **)A->ops)[op] || !a->A) PetscFunctionReturn(PETSC_SUCCESS);
3154   *has = PETSC_TRUE;
3155   for (PetscInt i = 0; i < (PetscInt)PETSC_STATIC_ARRAY_LENGTH(tobefiltered); i++)
3156     if (op == tobefiltered[i]) PetscFunctionReturn(PETSC_SUCCESS);
3157   PetscCall(MatHasOperation(a->A, op, has));
3158   PetscFunctionReturn(PETSC_SUCCESS);
3159 }
3160 
3161 static PetscErrorCode MatSetValuesCOO_IS(Mat A, const PetscScalar v[], InsertMode imode)
3162 {
3163   Mat_IS *a = (Mat_IS *)A->data;
3164 
3165   PetscFunctionBegin;
3166   PetscCall(MatSetValuesCOO(a->A, v, imode));
3167   PetscCall(MatAssemblyBegin(A, MAT_FINAL_ASSEMBLY));
3168   PetscCall(MatAssemblyEnd(A, MAT_FINAL_ASSEMBLY));
3169   PetscFunctionReturn(PETSC_SUCCESS);
3170 }
3171 
3172 static PetscErrorCode MatSetPreallocationCOOLocal_IS(Mat A, PetscCount ncoo, PetscInt coo_i[], PetscInt coo_j[])
3173 {
3174   Mat_IS *a = (Mat_IS *)A->data;
3175 
3176   PetscFunctionBegin;
3177   PetscCheck(a->A, PetscObjectComm((PetscObject)A), PETSC_ERR_ORDER, "Need to provide l2g map first via MatSetLocalToGlobalMapping");
3178   if (a->A->rmap->mapping || a->A->cmap->mapping) {
3179     PetscCall(MatSetPreallocationCOOLocal(a->A, ncoo, coo_i, coo_j));
3180   } else {
3181     PetscCall(MatSetPreallocationCOO(a->A, ncoo, coo_i, coo_j));
3182   }
3183   PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatSetValuesCOO_C", MatSetValuesCOO_IS));
3184   A->preallocated = PETSC_TRUE;
3185   PetscFunctionReturn(PETSC_SUCCESS);
3186 }
3187 
3188 static PetscErrorCode MatSetPreallocationCOO_IS(Mat A, PetscCount ncoo, PetscInt coo_i[], PetscInt coo_j[])
3189 {
3190   Mat_IS *a = (Mat_IS *)A->data;
3191 
3192   PetscFunctionBegin;
3193   PetscCheck(a->A, PetscObjectComm((PetscObject)A), PETSC_ERR_ORDER, "Need to provide l2g map first via MatSetLocalToGlobalMapping");
3194   PetscCheck(ncoo <= PETSC_MAX_INT, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "ncoo %" PetscCount_FMT " overflowed PetscInt; configure --with-64-bit-indices or request support", ncoo);
3195   PetscCall(ISGlobalToLocalMappingApply(a->rmapping, IS_GTOLM_MASK, ncoo, coo_i, NULL, coo_i));
3196   PetscCall(ISGlobalToLocalMappingApply(a->cmapping, IS_GTOLM_MASK, ncoo, coo_j, NULL, coo_j));
3197   PetscCall(MatSetPreallocationCOO(a->A, ncoo, coo_i, coo_j));
3198   PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatSetValuesCOO_C", MatSetValuesCOO_IS));
3199   A->preallocated = PETSC_TRUE;
3200   PetscFunctionReturn(PETSC_SUCCESS);
3201 }
3202 
3203 static PetscErrorCode MatISGetAssembled_Private(Mat A, Mat *tA)
3204 {
3205   Mat_IS          *a = (Mat_IS *)A->data;
3206   PetscObjectState Astate, aAstate       = PETSC_MIN_INT;
3207   PetscObjectState Annzstate, aAnnzstate = PETSC_MIN_INT;
3208 
3209   PetscFunctionBegin;
3210   PetscCall(PetscObjectStateGet((PetscObject)A, &Astate));
3211   Annzstate = A->nonzerostate;
3212   if (a->assembledA) {
3213     PetscCall(PetscObjectStateGet((PetscObject)a->assembledA, &aAstate));
3214     aAnnzstate = a->assembledA->nonzerostate;
3215   }
3216   if (aAnnzstate != Annzstate) PetscCall(MatDestroy(&a->assembledA));
3217   if (Astate != aAstate || !a->assembledA) {
3218     MatType     aAtype;
3219     PetscMPIInt size;
3220     PetscInt    rbs, cbs, bs;
3221 
3222     /* the assembled form is used as temporary storage for parallel operations
3223        like createsubmatrices and the like, do not waste device memory */
3224     PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)A), &size));
3225     PetscCall(ISLocalToGlobalMappingGetBlockSize(a->cmapping, &cbs));
3226     PetscCall(ISLocalToGlobalMappingGetBlockSize(a->rmapping, &rbs));
3227     bs = rbs == cbs ? rbs : 1;
3228     if (a->assembledA) PetscCall(MatGetType(a->assembledA, &aAtype));
3229     else if (size > 1) aAtype = bs > 1 ? MATMPIBAIJ : MATMPIAIJ;
3230     else aAtype = bs > 1 ? MATSEQBAIJ : MATSEQAIJ;
3231 
3232     PetscCall(MatConvert(A, aAtype, a->assembledA ? MAT_REUSE_MATRIX : MAT_INITIAL_MATRIX, &a->assembledA));
3233     PetscCall(PetscObjectStateSet((PetscObject)a->assembledA, Astate));
3234     a->assembledA->nonzerostate = Annzstate;
3235   }
3236   PetscCall(PetscObjectReference((PetscObject)a->assembledA));
3237   *tA = a->assembledA;
3238   if (!a->keepassembled) PetscCall(MatDestroy(&a->assembledA));
3239   PetscFunctionReturn(PETSC_SUCCESS);
3240 }
3241 
3242 static PetscErrorCode MatISRestoreAssembled_Private(Mat A, Mat *tA)
3243 {
3244   PetscFunctionBegin;
3245   PetscCall(MatDestroy(tA));
3246   *tA = NULL;
3247   PetscFunctionReturn(PETSC_SUCCESS);
3248 }
3249 
3250 static PetscErrorCode MatGetDiagonalBlock_IS(Mat A, Mat *dA)
3251 {
3252   Mat_IS          *a = (Mat_IS *)A->data;
3253   PetscObjectState Astate, dAstate = PETSC_MIN_INT;
3254 
3255   PetscFunctionBegin;
3256   PetscCall(PetscObjectStateGet((PetscObject)A, &Astate));
3257   if (a->dA) PetscCall(PetscObjectStateGet((PetscObject)a->dA, &dAstate));
3258   if (Astate != dAstate) {
3259     Mat     tA;
3260     MatType ltype;
3261 
3262     PetscCall(MatDestroy(&a->dA));
3263     PetscCall(MatISGetAssembled_Private(A, &tA));
3264     PetscCall(MatGetDiagonalBlock(tA, &a->dA));
3265     PetscCall(MatPropagateSymmetryOptions(tA, a->dA));
3266     PetscCall(MatGetType(a->A, &ltype));
3267     PetscCall(MatConvert(a->dA, ltype, MAT_INPLACE_MATRIX, &a->dA));
3268     PetscCall(PetscObjectReference((PetscObject)a->dA));
3269     PetscCall(MatISRestoreAssembled_Private(A, &tA));
3270     PetscCall(PetscObjectStateSet((PetscObject)a->dA, Astate));
3271   }
3272   *dA = a->dA;
3273   PetscFunctionReturn(PETSC_SUCCESS);
3274 }
3275 
3276 static PetscErrorCode MatCreateSubMatrices_IS(Mat A, PetscInt n, const IS irow[], const IS icol[], MatReuse reuse, Mat *submat[])
3277 {
3278   Mat tA;
3279 
3280   PetscFunctionBegin;
3281   PetscCall(MatISGetAssembled_Private(A, &tA));
3282   PetscCall(MatCreateSubMatrices(tA, n, irow, icol, reuse, submat));
3283   /* MatCreateSubMatrices_MPIAIJ is a mess at the moment */
3284 #if 0
3285   {
3286     Mat_IS    *a = (Mat_IS*)A->data;
3287     MatType   ltype;
3288     VecType   vtype;
3289     char      *flg;
3290 
3291     PetscCall(MatGetType(a->A,&ltype));
3292     PetscCall(MatGetVecType(a->A,&vtype));
3293     PetscCall(PetscStrstr(vtype,"cuda",&flg));
3294     if (!flg) PetscCall(PetscStrstr(vtype,"hip",&flg));
3295     if (!flg) PetscCall(PetscStrstr(vtype,"kokkos",&flg));
3296     if (flg) {
3297       for (PetscInt i = 0; i < n; i++) {
3298         Mat sA = (*submat)[i];
3299 
3300         PetscCall(MatConvert(sA,ltype,MAT_INPLACE_MATRIX,&sA));
3301         (*submat)[i] = sA;
3302       }
3303     }
3304   }
3305 #endif
3306   PetscCall(MatISRestoreAssembled_Private(A, &tA));
3307   PetscFunctionReturn(PETSC_SUCCESS);
3308 }
3309 
3310 static PetscErrorCode MatIncreaseOverlap_IS(Mat A, PetscInt n, IS is[], PetscInt ov)
3311 {
3312   Mat tA;
3313 
3314   PetscFunctionBegin;
3315   PetscCall(MatISGetAssembled_Private(A, &tA));
3316   PetscCall(MatIncreaseOverlap(tA, n, is, ov));
3317   PetscCall(MatISRestoreAssembled_Private(A, &tA));
3318   PetscFunctionReturn(PETSC_SUCCESS);
3319 }
3320 
3321 /*@
3322    MatISGetLocalToGlobalMapping - Gets the local-to-global numbering of the `MATIS` object
3323 
3324    Not Collective
3325 
3326    Input Parameter:
3327 .  A - the matrix
3328 
3329    Output Parameters:
3330 +  rmapping - row mapping
3331 -  cmapping - column mapping
3332 
3333    Note:
3334    The returned map can be different from the one used to construct the `MATIS` object, since it will not contain negative or repeated indices.
3335 
3336    Level: advanced
3337 
3338 .seealso: `MatIS`, `MatSetLocalToGlobalMapping()`
3339 @*/
3340 PetscErrorCode MatISGetLocalToGlobalMapping(Mat A, ISLocalToGlobalMapping *rmapping, ISLocalToGlobalMapping *cmapping)
3341 {
3342   PetscFunctionBegin;
3343   PetscValidHeaderSpecific(A, MAT_CLASSID, 1);
3344   PetscValidType(A, 1);
3345   if (rmapping) PetscValidPointer(rmapping, 2);
3346   if (cmapping) PetscValidPointer(cmapping, 3);
3347   PetscUseMethod(A, "MatISGetLocalToGlobalMapping_C", (Mat, ISLocalToGlobalMapping *, ISLocalToGlobalMapping *), (A, rmapping, cmapping));
3348   PetscFunctionReturn(PETSC_SUCCESS);
3349 }
3350 
3351 static PetscErrorCode MatISGetLocalToGlobalMapping_IS(Mat A, ISLocalToGlobalMapping *r, ISLocalToGlobalMapping *c)
3352 {
3353   Mat_IS *a = (Mat_IS *)A->data;
3354 
3355   PetscFunctionBegin;
3356   if (r) *r = a->rmapping;
3357   if (c) *c = a->cmapping;
3358   PetscFunctionReturn(PETSC_SUCCESS);
3359 }
3360 
3361 /*MC
3362    MATIS - MATIS = "is" - A matrix type to be used for using the non-overlapping domain decomposition methods (e.g. `PCBDDC` or `KSPFETIDP`).
3363    This stores the matrices in globally unassembled form. Each processor assembles only its local Neumann problem and the parallel matrix vector
3364    product is handled "implicitly".
3365 
3366    Options Database Keys:
3367 + -mat_type is - sets the matrix type to "is" during a call to MatSetFromOptions()
3368 . -matis_fixempty - Fixes local matrices in case of empty local rows/columns.
3369 - -matis_storel2l - stores the local-to-local operators generated by the Galerkin process of MatPtAP().
3370 
3371    Notes:
3372    Options prefix for the inner matrix are given by -is_mat_xxx
3373 
3374    You must call `MatSetLocalToGlobalMapping()` before using this matrix type.
3375 
3376    You can do matrix preallocation on the local matrix after you obtain it with
3377    `MatISGetLocalMat()`; otherwise, you could use `MatISSetPreallocation()`
3378 
3379   Level: advanced
3380 
3381 .seealso: `MATIS`, `Mat`, `MatISGetLocalMat()`, `MatSetLocalToGlobalMapping()`, `MatISSetPreallocation()`, `MatCreateIS()`, `PCBDDC`, `KSPFETIDP`
3382 M*/
3383 PETSC_EXTERN PetscErrorCode MatCreate_IS(Mat A)
3384 {
3385   Mat_IS *a;
3386 
3387   PetscFunctionBegin;
3388   PetscCall(PetscNew(&a));
3389   PetscCall(PetscStrallocpy(MATAIJ, &a->lmattype));
3390   A->data = (void *)a;
3391 
3392   /* matrix ops */
3393   PetscCall(PetscMemzero(A->ops, sizeof(struct _MatOps)));
3394   A->ops->mult                    = MatMult_IS;
3395   A->ops->multadd                 = MatMultAdd_IS;
3396   A->ops->multtranspose           = MatMultTranspose_IS;
3397   A->ops->multtransposeadd        = MatMultTransposeAdd_IS;
3398   A->ops->destroy                 = MatDestroy_IS;
3399   A->ops->setlocaltoglobalmapping = MatSetLocalToGlobalMapping_IS;
3400   A->ops->setvalues               = MatSetValues_IS;
3401   A->ops->setvaluesblocked        = MatSetValuesBlocked_IS;
3402   A->ops->setvalueslocal          = MatSetValuesLocal_IS;
3403   A->ops->setvaluesblockedlocal   = MatSetValuesBlockedLocal_IS;
3404   A->ops->zerorows                = MatZeroRows_IS;
3405   A->ops->zerorowscolumns         = MatZeroRowsColumns_IS;
3406   A->ops->assemblybegin           = MatAssemblyBegin_IS;
3407   A->ops->assemblyend             = MatAssemblyEnd_IS;
3408   A->ops->view                    = MatView_IS;
3409   A->ops->zeroentries             = MatZeroEntries_IS;
3410   A->ops->scale                   = MatScale_IS;
3411   A->ops->getdiagonal             = MatGetDiagonal_IS;
3412   A->ops->setoption               = MatSetOption_IS;
3413   A->ops->ishermitian             = MatIsHermitian_IS;
3414   A->ops->issymmetric             = MatIsSymmetric_IS;
3415   A->ops->isstructurallysymmetric = MatIsStructurallySymmetric_IS;
3416   A->ops->duplicate               = MatDuplicate_IS;
3417   A->ops->missingdiagonal         = MatMissingDiagonal_IS;
3418   A->ops->copy                    = MatCopy_IS;
3419   A->ops->getlocalsubmatrix       = MatGetLocalSubMatrix_IS;
3420   A->ops->createsubmatrix         = MatCreateSubMatrix_IS;
3421   A->ops->axpy                    = MatAXPY_IS;
3422   A->ops->diagonalset             = MatDiagonalSet_IS;
3423   A->ops->shift                   = MatShift_IS;
3424   A->ops->transpose               = MatTranspose_IS;
3425   A->ops->getinfo                 = MatGetInfo_IS;
3426   A->ops->diagonalscale           = MatDiagonalScale_IS;
3427   A->ops->setfromoptions          = MatSetFromOptions_IS;
3428   A->ops->setup                   = MatSetUp_IS;
3429   A->ops->hasoperation            = MatHasOperation_IS;
3430   A->ops->getdiagonalblock        = MatGetDiagonalBlock_IS;
3431   A->ops->createsubmatrices       = MatCreateSubMatrices_IS;
3432   A->ops->increaseoverlap         = MatIncreaseOverlap_IS;
3433 
3434   /* special MATIS functions */
3435   PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatISSetLocalMatType_C", MatISSetLocalMatType_IS));
3436   PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatISGetLocalMat_C", MatISGetLocalMat_IS));
3437   PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatISRestoreLocalMat_C", MatISRestoreLocalMat_IS));
3438   PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatISSetLocalMat_C", MatISSetLocalMat_IS));
3439   PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatISGetMPIXAIJ_C", MatConvert_IS_XAIJ));
3440   PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatISSetPreallocation_C", MatISSetPreallocation_IS));
3441   PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatISStoreL2L_C", MatISStoreL2L_IS));
3442   PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatISFixLocalEmpty_C", MatISFixLocalEmpty_IS));
3443   PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatISGetLocalToGlobalMapping_C", MatISGetLocalToGlobalMapping_IS));
3444   PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatConvert_is_mpiaij_C", MatConvert_IS_XAIJ));
3445   PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatConvert_is_mpibaij_C", MatConvert_IS_XAIJ));
3446   PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatConvert_is_mpisbaij_C", MatConvert_IS_XAIJ));
3447   PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatConvert_is_seqaij_C", MatConvert_IS_XAIJ));
3448   PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatConvert_is_seqbaij_C", MatConvert_IS_XAIJ));
3449   PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatConvert_is_seqsbaij_C", MatConvert_IS_XAIJ));
3450   PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatConvert_is_aij_C", MatConvert_IS_XAIJ));
3451   PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatSetPreallocationCOOLocal_C", MatSetPreallocationCOOLocal_IS));
3452   PetscCall(PetscObjectComposeFunction((PetscObject)A, "MatSetPreallocationCOO_C", MatSetPreallocationCOO_IS));
3453   PetscCall(PetscObjectChangeTypeName((PetscObject)A, MATIS));
3454   PetscFunctionReturn(PETSC_SUCCESS);
3455 }
3456