xref: /petsc/src/mat/interface/matrix.c (revision 40cbb1a031ea8f2be4fe2b92dc842b003ad37be3)
1 /*
2    This is where the abstract matrix operations are defined
3 */
4 
5 #include <petsc/private/matimpl.h>        /*I "petscmat.h" I*/
6 #include <petsc/private/isimpl.h>
7 #include <petsc/private/vecimpl.h>
8 
9 /* Logging support */
10 PetscClassId MAT_CLASSID;
11 PetscClassId MAT_COLORING_CLASSID;
12 PetscClassId MAT_FDCOLORING_CLASSID;
13 PetscClassId MAT_TRANSPOSECOLORING_CLASSID;
14 
15 PetscLogEvent MAT_Mult, MAT_Mults, MAT_MultAdd, MAT_MultTranspose;
16 PetscLogEvent MAT_MultTransposeAdd, MAT_Solve, MAT_Solves, MAT_SolveAdd, MAT_SolveTranspose, MAT_MatSolve,MAT_MatTrSolve;
17 PetscLogEvent MAT_SolveTransposeAdd, MAT_SOR, MAT_ForwardSolve, MAT_BackwardSolve, MAT_LUFactor, MAT_LUFactorSymbolic;
18 PetscLogEvent MAT_LUFactorNumeric, MAT_CholeskyFactor, MAT_CholeskyFactorSymbolic, MAT_CholeskyFactorNumeric, MAT_ILUFactor;
19 PetscLogEvent MAT_ILUFactorSymbolic, MAT_ICCFactorSymbolic, MAT_Copy, MAT_Convert, MAT_Scale, MAT_AssemblyBegin;
20 PetscLogEvent MAT_QRFactorNumeric, MAT_QRFactorSymbolic, MAT_QRFactor;
21 PetscLogEvent MAT_AssemblyEnd, MAT_SetValues, MAT_GetValues, MAT_GetRow, MAT_GetRowIJ, MAT_CreateSubMats, MAT_GetOrdering, MAT_RedundantMat, MAT_GetSeqNonzeroStructure;
22 PetscLogEvent MAT_IncreaseOverlap, MAT_Partitioning, MAT_PartitioningND, MAT_Coarsen, MAT_ZeroEntries, MAT_Load, MAT_View, MAT_AXPY, MAT_FDColoringCreate;
23 PetscLogEvent MAT_FDColoringSetUp, MAT_FDColoringApply,MAT_Transpose,MAT_FDColoringFunction, MAT_CreateSubMat;
24 PetscLogEvent MAT_TransposeColoringCreate;
25 PetscLogEvent MAT_MatMult, MAT_MatMultSymbolic, MAT_MatMultNumeric;
26 PetscLogEvent MAT_PtAP, MAT_PtAPSymbolic, MAT_PtAPNumeric,MAT_RARt, MAT_RARtSymbolic, MAT_RARtNumeric;
27 PetscLogEvent MAT_MatTransposeMult, MAT_MatTransposeMultSymbolic, MAT_MatTransposeMultNumeric;
28 PetscLogEvent MAT_TransposeMatMult, MAT_TransposeMatMultSymbolic, MAT_TransposeMatMultNumeric;
29 PetscLogEvent MAT_MatMatMult, MAT_MatMatMultSymbolic, MAT_MatMatMultNumeric;
30 PetscLogEvent MAT_MultHermitianTranspose,MAT_MultHermitianTransposeAdd;
31 PetscLogEvent MAT_Getsymtranspose, MAT_Getsymtransreduced, MAT_GetBrowsOfAcols;
32 PetscLogEvent MAT_GetBrowsOfAocols, MAT_Getlocalmat, MAT_Getlocalmatcondensed, MAT_Seqstompi, MAT_Seqstompinum, MAT_Seqstompisym;
33 PetscLogEvent MAT_Applypapt, MAT_Applypapt_numeric, MAT_Applypapt_symbolic, MAT_GetSequentialNonzeroStructure;
34 PetscLogEvent MAT_GetMultiProcBlock;
35 PetscLogEvent MAT_CUSPARSECopyToGPU, MAT_CUSPARSECopyFromGPU, MAT_CUSPARSEGenerateTranspose, MAT_CUSPARSESolveAnalysis;
36 PetscLogEvent MAT_PreallCOO, MAT_SetVCOO;
37 PetscLogEvent MAT_SetValuesBatch;
38 PetscLogEvent MAT_ViennaCLCopyToGPU;
39 PetscLogEvent MAT_DenseCopyToGPU, MAT_DenseCopyFromGPU;
40 PetscLogEvent MAT_Merge,MAT_Residual,MAT_SetRandom;
41 PetscLogEvent MAT_FactorFactS,MAT_FactorInvS;
42 PetscLogEvent MATCOLORING_Apply,MATCOLORING_Comm,MATCOLORING_Local,MATCOLORING_ISCreate,MATCOLORING_SetUp,MATCOLORING_Weights;
43 PetscLogEvent MAT_H2Opus_Build,MAT_H2Opus_Compress,MAT_H2Opus_Orthog,MAT_H2Opus_LR;
44 
45 const char *const MatFactorTypes[] = {"NONE","LU","CHOLESKY","ILU","ICC","ILUDT","QR","MatFactorType","MAT_FACTOR_",NULL};
46 
47 /*@
48    MatSetRandom - Sets all components of a matrix to random numbers. For sparse matrices that have been preallocated but not been assembled it randomly selects appropriate locations,
49                   for sparse matrices that already have locations it fills the locations with random numbers
50 
51    Logically Collective on Mat
52 
53    Input Parameters:
54 +  x  - the matrix
55 -  rctx - the random number context, formed by PetscRandomCreate(), or NULL and
56           it will create one internally.
57 
58    Output Parameter:
59 .  x  - the matrix
60 
61    Example of Usage:
62 .vb
63      PetscRandomCreate(PETSC_COMM_WORLD,&rctx);
64      MatSetRandom(x,rctx);
65      PetscRandomDestroy(rctx);
66 .ve
67 
68    Level: intermediate
69 
70 .seealso: `MatZeroEntries()`, `MatSetValues()`, `PetscRandomCreate()`, `PetscRandomDestroy()`
71 @*/
72 PetscErrorCode MatSetRandom(Mat x,PetscRandom rctx)
73 {
74   PetscRandom    randObj = NULL;
75 
76   PetscFunctionBegin;
77   PetscValidHeaderSpecific(x,MAT_CLASSID,1);
78   if (rctx) PetscValidHeaderSpecific(rctx,PETSC_RANDOM_CLASSID,2);
79   PetscValidType(x,1);
80   MatCheckPreallocated(x,1);
81 
82   PetscCheck(x->ops->setrandom,PetscObjectComm((PetscObject)x),PETSC_ERR_SUP,"Mat type %s",((PetscObject)x)->type_name);
83 
84   if (!rctx) {
85     MPI_Comm comm;
86     PetscCall(PetscObjectGetComm((PetscObject)x,&comm));
87     PetscCall(PetscRandomCreate(comm,&randObj));
88     PetscCall(PetscRandomSetFromOptions(randObj));
89     rctx = randObj;
90   }
91   PetscCall(PetscLogEventBegin(MAT_SetRandom,x,rctx,0,0));
92   PetscCall((*x->ops->setrandom)(x,rctx));
93   PetscCall(PetscLogEventEnd(MAT_SetRandom,x,rctx,0,0));
94 
95   PetscCall(MatAssemblyBegin(x,MAT_FINAL_ASSEMBLY));
96   PetscCall(MatAssemblyEnd(x,MAT_FINAL_ASSEMBLY));
97   PetscCall(PetscRandomDestroy(&randObj));
98   PetscFunctionReturn(0);
99 }
100 
101 /*@
102    MatFactorGetErrorZeroPivot - returns the pivot value that was determined to be zero and the row it occurred in
103 
104    Logically Collective on Mat
105 
106    Input Parameter:
107 .  mat - the factored matrix
108 
109    Output Parameters:
110 +  pivot - the pivot value computed
111 -  row - the row that the zero pivot occurred. Note that this row must be interpreted carefully due to row reorderings and which processes
112          the share the matrix
113 
114    Level: advanced
115 
116    Notes:
117     This routine does not work for factorizations done with external packages.
118 
119     This routine should only be called if MatGetFactorError() returns a value of MAT_FACTOR_NUMERIC_ZEROPIVOT
120 
121     This can be called on non-factored matrices that come from, for example, matrices used in SOR.
122 
123 .seealso: `MatZeroEntries()`, `MatFactor()`, `MatGetFactor()`, `MatLUFactorSymbolic()`, `MatCholeskyFactorSymbolic()`, `MatFactorClearError()`, `MatFactorGetErrorZeroPivot()`
124 @*/
125 PetscErrorCode MatFactorGetErrorZeroPivot(Mat mat,PetscReal *pivot,PetscInt *row)
126 {
127   PetscFunctionBegin;
128   PetscValidHeaderSpecific(mat,MAT_CLASSID,1);
129   PetscValidRealPointer(pivot,2);
130   PetscValidIntPointer(row,3);
131   *pivot = mat->factorerror_zeropivot_value;
132   *row   = mat->factorerror_zeropivot_row;
133   PetscFunctionReturn(0);
134 }
135 
136 /*@
137    MatFactorGetError - gets the error code from a factorization
138 
139    Logically Collective on Mat
140 
141    Input Parameters:
142 .  mat - the factored matrix
143 
144    Output Parameter:
145 .  err  - the error code
146 
147    Level: advanced
148 
149    Notes:
150     This can be called on non-factored matrices that come from, for example, matrices used in SOR.
151 
152 .seealso: `MatZeroEntries()`, `MatFactor()`, `MatGetFactor()`, `MatLUFactorSymbolic()`, `MatCholeskyFactorSymbolic()`, `MatFactorClearError()`, `MatFactorGetErrorZeroPivot()`
153 @*/
154 PetscErrorCode MatFactorGetError(Mat mat,MatFactorError *err)
155 {
156   PetscFunctionBegin;
157   PetscValidHeaderSpecific(mat,MAT_CLASSID,1);
158   PetscValidPointer(err,2);
159   *err = mat->factorerrortype;
160   PetscFunctionReturn(0);
161 }
162 
163 /*@
164    MatFactorClearError - clears the error code in a factorization
165 
166    Logically Collective on Mat
167 
168    Input Parameter:
169 .  mat - the factored matrix
170 
171    Level: developer
172 
173    Notes:
174     This can be called on non-factored matrices that come from, for example, matrices used in SOR.
175 
176 .seealso: `MatZeroEntries()`, `MatFactor()`, `MatGetFactor()`, `MatLUFactorSymbolic()`, `MatCholeskyFactorSymbolic()`, `MatFactorGetError()`, `MatFactorGetErrorZeroPivot()`
177 @*/
178 PetscErrorCode MatFactorClearError(Mat mat)
179 {
180   PetscFunctionBegin;
181   PetscValidHeaderSpecific(mat,MAT_CLASSID,1);
182   mat->factorerrortype             = MAT_FACTOR_NOERROR;
183   mat->factorerror_zeropivot_value = 0.0;
184   mat->factorerror_zeropivot_row   = 0;
185   PetscFunctionReturn(0);
186 }
187 
188 PETSC_INTERN PetscErrorCode MatFindNonzeroRowsOrCols_Basic(Mat mat,PetscBool cols,PetscReal tol,IS *nonzero)
189 {
190   Vec               r,l;
191   const PetscScalar *al;
192   PetscInt          i,nz,gnz,N,n;
193 
194   PetscFunctionBegin;
195   PetscCall(MatCreateVecs(mat,&r,&l));
196   if (!cols) { /* nonzero rows */
197     PetscCall(MatGetSize(mat,&N,NULL));
198     PetscCall(MatGetLocalSize(mat,&n,NULL));
199     PetscCall(VecSet(l,0.0));
200     PetscCall(VecSetRandom(r,NULL));
201     PetscCall(MatMult(mat,r,l));
202     PetscCall(VecGetArrayRead(l,&al));
203   } else { /* nonzero columns */
204     PetscCall(MatGetSize(mat,NULL,&N));
205     PetscCall(MatGetLocalSize(mat,NULL,&n));
206     PetscCall(VecSet(r,0.0));
207     PetscCall(VecSetRandom(l,NULL));
208     PetscCall(MatMultTranspose(mat,l,r));
209     PetscCall(VecGetArrayRead(r,&al));
210   }
211   if (tol <= 0.0) { for (i=0,nz=0;i<n;i++) if (al[i] != 0.0) nz++; }
212   else { for (i=0,nz=0;i<n;i++) if (PetscAbsScalar(al[i]) > tol) nz++; }
213   PetscCall(MPIU_Allreduce(&nz,&gnz,1,MPIU_INT,MPI_SUM,PetscObjectComm((PetscObject)mat)));
214   if (gnz != N) {
215     PetscInt *nzr;
216     PetscCall(PetscMalloc1(nz,&nzr));
217     if (nz) {
218       if (tol < 0) { for (i=0,nz=0;i<n;i++) if (al[i] != 0.0) nzr[nz++] = i; }
219       else { for (i=0,nz=0;i<n;i++) if (PetscAbsScalar(al[i]) > tol) nzr[nz++] = i; }
220     }
221     PetscCall(ISCreateGeneral(PetscObjectComm((PetscObject)mat),nz,nzr,PETSC_OWN_POINTER,nonzero));
222   } else *nonzero = NULL;
223   if (!cols) { /* nonzero rows */
224     PetscCall(VecRestoreArrayRead(l,&al));
225   } else {
226     PetscCall(VecRestoreArrayRead(r,&al));
227   }
228   PetscCall(VecDestroy(&l));
229   PetscCall(VecDestroy(&r));
230   PetscFunctionReturn(0);
231 }
232 
233 /*@
234       MatFindNonzeroRows - Locate all rows that are not completely zero in the matrix
235 
236   Input Parameter:
237 .    A  - the matrix
238 
239   Output Parameter:
240 .    keptrows - the rows that are not completely zero
241 
242   Notes:
243     keptrows is set to NULL if all rows are nonzero.
244 
245   Level: intermediate
246 
247  @*/
248 PetscErrorCode MatFindNonzeroRows(Mat mat,IS *keptrows)
249 {
250   PetscFunctionBegin;
251   PetscValidHeaderSpecific(mat,MAT_CLASSID,1);
252   PetscValidType(mat,1);
253   PetscValidPointer(keptrows,2);
254   PetscCheck(mat->assembled,PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Not for unassembled matrix");
255   PetscCheck(!mat->factortype,PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Not for factored matrix");
256   if (mat->ops->findnonzerorows) {
257     PetscCall((*mat->ops->findnonzerorows)(mat,keptrows));
258   } else {
259     PetscCall(MatFindNonzeroRowsOrCols_Basic(mat,PETSC_FALSE,0.0,keptrows));
260   }
261   PetscFunctionReturn(0);
262 }
263 
264 /*@
265       MatFindZeroRows - Locate all rows that are completely zero in the matrix
266 
267   Input Parameter:
268 .    A  - the matrix
269 
270   Output Parameter:
271 .    zerorows - the rows that are completely zero
272 
273   Notes:
274     zerorows is set to NULL if no rows are zero.
275 
276   Level: intermediate
277 
278  @*/
279 PetscErrorCode MatFindZeroRows(Mat mat,IS *zerorows)
280 {
281   IS       keptrows;
282   PetscInt m, n;
283 
284   PetscFunctionBegin;
285   PetscValidHeaderSpecific(mat,MAT_CLASSID,1);
286   PetscValidType(mat,1);
287   PetscValidPointer(zerorows,2);
288   PetscCall(MatFindNonzeroRows(mat, &keptrows));
289   /* MatFindNonzeroRows sets keptrows to NULL if there are no zero rows.
290      In keeping with this convention, we set zerorows to NULL if there are no zero
291      rows. */
292   if (keptrows == NULL) {
293     *zerorows = NULL;
294   } else {
295     PetscCall(MatGetOwnershipRange(mat,&m,&n));
296     PetscCall(ISComplement(keptrows,m,n,zerorows));
297     PetscCall(ISDestroy(&keptrows));
298   }
299   PetscFunctionReturn(0);
300 }
301 
302 /*@
303    MatGetDiagonalBlock - Returns the part of the matrix associated with the on-process coupling
304 
305    Not Collective
306 
307    Input Parameters:
308 .   A - the matrix
309 
310    Output Parameters:
311 .   a - the diagonal part (which is a SEQUENTIAL matrix)
312 
313    Notes:
314     see the manual page for MatCreateAIJ() for more information on the "diagonal part" of the matrix.
315           Use caution, as the reference count on the returned matrix is not incremented and it is used as
316           part of the containing MPI Mat's normal operation.
317 
318    Level: advanced
319 
320 @*/
321 PetscErrorCode MatGetDiagonalBlock(Mat A,Mat *a)
322 {
323   PetscFunctionBegin;
324   PetscValidHeaderSpecific(A,MAT_CLASSID,1);
325   PetscValidType(A,1);
326   PetscValidPointer(a,2);
327   PetscCheck(!A->factortype,PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_WRONGSTATE,"Not for factored matrix");
328   if (A->ops->getdiagonalblock) {
329     PetscCall((*A->ops->getdiagonalblock)(A,a));
330   } else {
331     PetscMPIInt size;
332 
333     PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)A),&size));
334     PetscCheck(size == 1,PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"Not for parallel matrix type %s",((PetscObject)A)->type_name);
335     *a = A;
336   }
337   PetscFunctionReturn(0);
338 }
339 
340 /*@
341    MatGetTrace - Gets the trace of a matrix. The sum of the diagonal entries.
342 
343    Collective on Mat
344 
345    Input Parameters:
346 .  mat - the matrix
347 
348    Output Parameter:
349 .   trace - the sum of the diagonal entries
350 
351    Level: advanced
352 
353 @*/
354 PetscErrorCode MatGetTrace(Mat mat,PetscScalar *trace)
355 {
356   Vec diag;
357 
358   PetscFunctionBegin;
359   PetscValidHeaderSpecific(mat,MAT_CLASSID,1);
360   PetscValidScalarPointer(trace,2);
361   PetscCall(MatCreateVecs(mat,&diag,NULL));
362   PetscCall(MatGetDiagonal(mat,diag));
363   PetscCall(VecSum(diag,trace));
364   PetscCall(VecDestroy(&diag));
365   PetscFunctionReturn(0);
366 }
367 
368 /*@
369    MatRealPart - Zeros out the imaginary part of the matrix
370 
371    Logically Collective on Mat
372 
373    Input Parameters:
374 .  mat - the matrix
375 
376    Level: advanced
377 
378 .seealso: `MatImaginaryPart()`
379 @*/
380 PetscErrorCode MatRealPart(Mat mat)
381 {
382   PetscFunctionBegin;
383   PetscValidHeaderSpecific(mat,MAT_CLASSID,1);
384   PetscValidType(mat,1);
385   PetscCheck(mat->assembled,PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Not for unassembled matrix");
386   PetscCheck(!mat->factortype,PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Not for factored matrix");
387   PetscCheck(mat->ops->realpart,PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Mat type %s",((PetscObject)mat)->type_name);
388   MatCheckPreallocated(mat,1);
389   PetscCall((*mat->ops->realpart)(mat));
390   PetscFunctionReturn(0);
391 }
392 
393 /*@C
394    MatGetGhosts - Get the global index of all ghost nodes defined by the sparse matrix
395 
396    Collective on Mat
397 
398    Input Parameter:
399 .  mat - the matrix
400 
401    Output Parameters:
402 +   nghosts - number of ghosts (note for BAIJ matrices there is one ghost for each block)
403 -   ghosts - the global indices of the ghost points
404 
405    Notes:
406     the nghosts and ghosts are suitable to pass into VecCreateGhost()
407 
408    Level: advanced
409 
410 @*/
411 PetscErrorCode MatGetGhosts(Mat mat,PetscInt *nghosts,const PetscInt *ghosts[])
412 {
413   PetscFunctionBegin;
414   PetscValidHeaderSpecific(mat,MAT_CLASSID,1);
415   PetscValidType(mat,1);
416   PetscCheck(mat->assembled,PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Not for unassembled matrix");
417   PetscCheck(!mat->factortype,PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Not for factored matrix");
418   if (mat->ops->getghosts) {
419     PetscCall((*mat->ops->getghosts)(mat,nghosts,ghosts));
420   } else {
421     if (nghosts) *nghosts = 0;
422     if (ghosts)  *ghosts  = NULL;
423   }
424   PetscFunctionReturn(0);
425 }
426 
427 /*@
428    MatImaginaryPart - Moves the imaginary part of the matrix to the real part and zeros the imaginary part
429 
430    Logically Collective on Mat
431 
432    Input Parameters:
433 .  mat - the matrix
434 
435    Level: advanced
436 
437 .seealso: `MatRealPart()`
438 @*/
439 PetscErrorCode MatImaginaryPart(Mat mat)
440 {
441   PetscFunctionBegin;
442   PetscValidHeaderSpecific(mat,MAT_CLASSID,1);
443   PetscValidType(mat,1);
444   PetscCheck(mat->assembled,PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Not for unassembled matrix");
445   PetscCheck(!mat->factortype,PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Not for factored matrix");
446   PetscCheck(mat->ops->imaginarypart,PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Mat type %s",((PetscObject)mat)->type_name);
447   MatCheckPreallocated(mat,1);
448   PetscCall((*mat->ops->imaginarypart)(mat));
449   PetscFunctionReturn(0);
450 }
451 
452 /*@
453    MatMissingDiagonal - Determine if sparse matrix is missing a diagonal entry (or block entry for BAIJ matrices)
454 
455    Not Collective
456 
457    Input Parameter:
458 .  mat - the matrix
459 
460    Output Parameters:
461 +  missing - is any diagonal missing
462 -  dd - first diagonal entry that is missing (optional) on this process
463 
464    Level: advanced
465 
466 .seealso: `MatRealPart()`
467 @*/
468 PetscErrorCode MatMissingDiagonal(Mat mat,PetscBool *missing,PetscInt *dd)
469 {
470   PetscFunctionBegin;
471   PetscValidHeaderSpecific(mat,MAT_CLASSID,1);
472   PetscValidType(mat,1);
473   PetscValidBoolPointer(missing,2);
474   PetscCheck(mat->assembled,PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Not for unassembled matrix %s",((PetscObject)mat)->type_name);
475   PetscCheck(!mat->factortype,PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Not for factored matrix");
476   PetscCheck(mat->ops->missingdiagonal,PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Mat type %s",((PetscObject)mat)->type_name);
477   PetscCall((*mat->ops->missingdiagonal)(mat,missing,dd));
478   PetscFunctionReturn(0);
479 }
480 
481 /*@C
482    MatGetRow - Gets a row of a matrix.  You MUST call MatRestoreRow()
483    for each row that you get to ensure that your application does
484    not bleed memory.
485 
486    Not Collective
487 
488    Input Parameters:
489 +  mat - the matrix
490 -  row - the row to get
491 
492    Output Parameters:
493 +  ncols -  if not NULL, the number of nonzeros in the row
494 .  cols - if not NULL, the column numbers
495 -  vals - if not NULL, the values
496 
497    Notes:
498    This routine is provided for people who need to have direct access
499    to the structure of a matrix.  We hope that we provide enough
500    high-level matrix routines that few users will need it.
501 
502    MatGetRow() always returns 0-based column indices, regardless of
503    whether the internal representation is 0-based (default) or 1-based.
504 
505    For better efficiency, set cols and/or vals to NULL if you do
506    not wish to extract these quantities.
507 
508    The user can only examine the values extracted with MatGetRow();
509    the values cannot be altered.  To change the matrix entries, one
510    must use MatSetValues().
511 
512    You can only have one call to MatGetRow() outstanding for a particular
513    matrix at a time, per processor. MatGetRow() can only obtain rows
514    associated with the given processor, it cannot get rows from the
515    other processors; for that we suggest using MatCreateSubMatrices(), then
516    MatGetRow() on the submatrix. The row index passed to MatGetRow()
517    is in the global number of rows.
518 
519    Fortran Notes:
520    The calling sequence from Fortran is
521 .vb
522    MatGetRow(matrix,row,ncols,cols,values,ierr)
523          Mat     matrix (input)
524          integer row    (input)
525          integer ncols  (output)
526          integer cols(maxcols) (output)
527          double precision (or double complex) values(maxcols) output
528 .ve
529    where maxcols >= maximum nonzeros in any row of the matrix.
530 
531    Caution:
532    Do not try to change the contents of the output arrays (cols and vals).
533    In some cases, this may corrupt the matrix.
534 
535    Level: advanced
536 
537 .seealso: `MatRestoreRow()`, `MatSetValues()`, `MatGetValues()`, `MatCreateSubMatrices()`, `MatGetDiagonal()`
538 @*/
539 PetscErrorCode MatGetRow(Mat mat,PetscInt row,PetscInt *ncols,const PetscInt *cols[],const PetscScalar *vals[])
540 {
541   PetscInt incols;
542 
543   PetscFunctionBegin;
544   PetscValidHeaderSpecific(mat,MAT_CLASSID,1);
545   PetscValidType(mat,1);
546   PetscCheck(mat->assembled,PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Not for unassembled matrix");
547   PetscCheck(!mat->factortype,PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Not for factored matrix");
548   PetscCheck(mat->ops->getrow,PETSC_COMM_SELF,PETSC_ERR_SUP,"Mat type %s",((PetscObject)mat)->type_name);
549   MatCheckPreallocated(mat,1);
550   PetscCheck(row >= mat->rmap->rstart && row < mat->rmap->rend,PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Only for local rows, %" PetscInt_FMT " not in [%" PetscInt_FMT ",%" PetscInt_FMT ")",row,mat->rmap->rstart,mat->rmap->rend);
551   PetscCall(PetscLogEventBegin(MAT_GetRow,mat,0,0,0));
552   PetscCall((*mat->ops->getrow)(mat,row,&incols,(PetscInt**)cols,(PetscScalar**)vals));
553   if (ncols) *ncols = incols;
554   PetscCall(PetscLogEventEnd(MAT_GetRow,mat,0,0,0));
555   PetscFunctionReturn(0);
556 }
557 
558 /*@
559    MatConjugate - replaces the matrix values with their complex conjugates
560 
561    Logically Collective on Mat
562 
563    Input Parameters:
564 .  mat - the matrix
565 
566    Level: advanced
567 
568 .seealso: `VecConjugate()`
569 @*/
570 PetscErrorCode MatConjugate(Mat mat)
571 {
572   PetscFunctionBegin;
573   PetscValidHeaderSpecific(mat,MAT_CLASSID,1);
574   PetscCheck(mat->assembled,PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Not for unassembled matrix");
575   if (PetscDefined(USE_COMPLEX)) {
576     PetscCheck(mat->ops->conjugate,PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Not provided for matrix type %s, send email to petsc-maint@mcs.anl.gov",((PetscObject)mat)->type_name);
577     PetscCall((*mat->ops->conjugate)(mat));
578   }
579   PetscFunctionReturn(0);
580 }
581 
582 /*@C
583    MatRestoreRow - Frees any temporary space allocated by MatGetRow().
584 
585    Not Collective
586 
587    Input Parameters:
588 +  mat - the matrix
589 .  row - the row to get
590 .  ncols, cols - the number of nonzeros and their columns
591 -  vals - if nonzero the column values
592 
593    Notes:
594    This routine should be called after you have finished examining the entries.
595 
596    This routine zeros out ncols, cols, and vals. This is to prevent accidental
597    us of the array after it has been restored. If you pass NULL, it will
598    not zero the pointers.  Use of cols or vals after MatRestoreRow is invalid.
599 
600    Fortran Notes:
601    The calling sequence from Fortran is
602 .vb
603    MatRestoreRow(matrix,row,ncols,cols,values,ierr)
604       Mat     matrix (input)
605       integer row    (input)
606       integer ncols  (output)
607       integer cols(maxcols) (output)
608       double precision (or double complex) values(maxcols) output
609 .ve
610    Where maxcols >= maximum nonzeros in any row of the matrix.
611 
612    In Fortran MatRestoreRow() MUST be called after MatGetRow()
613    before another call to MatGetRow() can be made.
614 
615    Level: advanced
616 
617 .seealso: `MatGetRow()`
618 @*/
619 PetscErrorCode MatRestoreRow(Mat mat,PetscInt row,PetscInt *ncols,const PetscInt *cols[],const PetscScalar *vals[])
620 {
621   PetscFunctionBegin;
622   PetscValidHeaderSpecific(mat,MAT_CLASSID,1);
623   if (ncols) PetscValidIntPointer(ncols,3);
624   PetscCheck(mat->assembled,PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Not for unassembled matrix");
625   if (!mat->ops->restorerow) PetscFunctionReturn(0);
626   PetscCall((*mat->ops->restorerow)(mat,row,ncols,(PetscInt **)cols,(PetscScalar **)vals));
627   if (ncols) *ncols = 0;
628   if (cols)  *cols = NULL;
629   if (vals)  *vals = NULL;
630   PetscFunctionReturn(0);
631 }
632 
633 /*@
634    MatGetRowUpperTriangular - Sets a flag to enable calls to MatGetRow() for matrix in MATSBAIJ format.
635    You should call MatRestoreRowUpperTriangular() after calling MatGetRow/MatRestoreRow() to disable the flag.
636 
637    Not Collective
638 
639    Input Parameters:
640 .  mat - the matrix
641 
642    Notes:
643    The flag is to ensure that users are aware of MatGetRow() only provides the upper triangular part of the row for the matrices in MATSBAIJ format.
644 
645    Level: advanced
646 
647 .seealso: `MatRestoreRowUpperTriangular()`
648 @*/
649 PetscErrorCode MatGetRowUpperTriangular(Mat mat)
650 {
651   PetscFunctionBegin;
652   PetscValidHeaderSpecific(mat,MAT_CLASSID,1);
653   PetscValidType(mat,1);
654   PetscCheck(mat->assembled,PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Not for unassembled matrix");
655   PetscCheck(!mat->factortype,PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Not for factored matrix");
656   MatCheckPreallocated(mat,1);
657   if (!mat->ops->getrowuppertriangular) PetscFunctionReturn(0);
658   PetscCall((*mat->ops->getrowuppertriangular)(mat));
659   PetscFunctionReturn(0);
660 }
661 
662 /*@
663    MatRestoreRowUpperTriangular - Disable calls to MatGetRow() for matrix in MATSBAIJ format.
664 
665    Not Collective
666 
667    Input Parameters:
668 .  mat - the matrix
669 
670    Notes:
671    This routine should be called after you have finished MatGetRow/MatRestoreRow().
672 
673    Level: advanced
674 
675 .seealso: `MatGetRowUpperTriangular()`
676 @*/
677 PetscErrorCode MatRestoreRowUpperTriangular(Mat mat)
678 {
679   PetscFunctionBegin;
680   PetscValidHeaderSpecific(mat,MAT_CLASSID,1);
681   PetscValidType(mat,1);
682   PetscCheck(mat->assembled,PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Not for unassembled matrix");
683   PetscCheck(!mat->factortype,PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Not for factored matrix");
684   MatCheckPreallocated(mat,1);
685   if (!mat->ops->restorerowuppertriangular) PetscFunctionReturn(0);
686   PetscCall((*mat->ops->restorerowuppertriangular)(mat));
687   PetscFunctionReturn(0);
688 }
689 
690 /*@C
691    MatSetOptionsPrefix - Sets the prefix used for searching for all
692    Mat options in the database.
693 
694    Logically Collective on Mat
695 
696    Input Parameters:
697 +  A - the Mat context
698 -  prefix - the prefix to prepend to all option names
699 
700    Notes:
701    A hyphen (-) must NOT be given at the beginning of the prefix name.
702    The first character of all runtime options is AUTOMATICALLY the hyphen.
703 
704    Level: advanced
705 
706 .seealso: `MatSetFromOptions()`
707 @*/
708 PetscErrorCode MatSetOptionsPrefix(Mat A,const char prefix[])
709 {
710   PetscFunctionBegin;
711   PetscValidHeaderSpecific(A,MAT_CLASSID,1);
712   PetscCall(PetscObjectSetOptionsPrefix((PetscObject)A,prefix));
713   PetscFunctionReturn(0);
714 }
715 
716 /*@C
717    MatAppendOptionsPrefix - Appends to the prefix used for searching for all
718    Mat options in the database.
719 
720    Logically Collective on Mat
721 
722    Input Parameters:
723 +  A - the Mat context
724 -  prefix - the prefix to prepend to all option names
725 
726    Notes:
727    A hyphen (-) must NOT be given at the beginning of the prefix name.
728    The first character of all runtime options is AUTOMATICALLY the hyphen.
729 
730    Level: advanced
731 
732 .seealso: `MatGetOptionsPrefix()`
733 @*/
734 PetscErrorCode MatAppendOptionsPrefix(Mat A,const char prefix[])
735 {
736   PetscFunctionBegin;
737   PetscValidHeaderSpecific(A,MAT_CLASSID,1);
738   PetscCall(PetscObjectAppendOptionsPrefix((PetscObject)A,prefix));
739   PetscFunctionReturn(0);
740 }
741 
742 /*@C
743    MatGetOptionsPrefix - Gets the prefix used for searching for all
744    Mat options in the database.
745 
746    Not Collective
747 
748    Input Parameter:
749 .  A - the Mat context
750 
751    Output Parameter:
752 .  prefix - pointer to the prefix string used
753 
754    Notes:
755     On the fortran side, the user should pass in a string 'prefix' of
756    sufficient length to hold the prefix.
757 
758    Level: advanced
759 
760 .seealso: `MatAppendOptionsPrefix()`
761 @*/
762 PetscErrorCode MatGetOptionsPrefix(Mat A,const char *prefix[])
763 {
764   PetscFunctionBegin;
765   PetscValidHeaderSpecific(A,MAT_CLASSID,1);
766   PetscValidPointer(prefix,2);
767   PetscCall(PetscObjectGetOptionsPrefix((PetscObject)A,prefix));
768   PetscFunctionReturn(0);
769 }
770 
771 /*@
772    MatResetPreallocation - Reset mat to use the original nonzero pattern provided by users.
773 
774    Collective on Mat
775 
776    Input Parameters:
777 .  A - the Mat context
778 
779    Notes:
780    The allocated memory will be shrunk after calling MatAssembly with MAT_FINAL_ASSEMBLY. Users can reset the preallocation to access the original memory.
781    Currently support MPIAIJ and SEQAIJ.
782 
783    Level: beginner
784 
785 .seealso: `MatSeqAIJSetPreallocation()`, `MatMPIAIJSetPreallocation()`, `MatXAIJSetPreallocation()`
786 @*/
787 PetscErrorCode MatResetPreallocation(Mat A)
788 {
789   PetscFunctionBegin;
790   PetscValidHeaderSpecific(A,MAT_CLASSID,1);
791   PetscValidType(A,1);
792   PetscUseMethod(A,"MatResetPreallocation_C",(Mat),(A));
793   PetscFunctionReturn(0);
794 }
795 
796 /*@
797    MatSetUp - Sets up the internal matrix data structures for later use.
798 
799    Collective on Mat
800 
801    Input Parameters:
802 .  A - the Mat context
803 
804    Notes:
805    If the user has not set preallocation for this matrix then a default preallocation that is likely to be inefficient is used.
806 
807    If a suitable preallocation routine is used, this function does not need to be called.
808 
809    See the Performance chapter of the PETSc users manual for how to preallocate matrices
810 
811    Level: beginner
812 
813 .seealso: `MatCreate()`, `MatDestroy()`
814 @*/
815 PetscErrorCode MatSetUp(Mat A)
816 {
817   PetscFunctionBegin;
818   PetscValidHeaderSpecific(A,MAT_CLASSID,1);
819   if (!((PetscObject)A)->type_name) {
820     PetscMPIInt size;
821 
822     PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)A), &size));
823     PetscCall(MatSetType(A, size == 1 ? MATSEQAIJ : MATMPIAIJ));
824   }
825   if (!A->preallocated && A->ops->setup) {
826     PetscCall(PetscInfo(A,"Warning not preallocating matrix storage\n"));
827     PetscCall((*A->ops->setup)(A));
828   }
829   PetscCall(PetscLayoutSetUp(A->rmap));
830   PetscCall(PetscLayoutSetUp(A->cmap));
831   A->preallocated = PETSC_TRUE;
832   PetscFunctionReturn(0);
833 }
834 
835 #if defined(PETSC_HAVE_SAWS)
836 #include <petscviewersaws.h>
837 #endif
838 
839 /*@C
840    MatViewFromOptions - View from Options
841 
842    Collective on Mat
843 
844    Input Parameters:
845 +  A - the Mat context
846 .  obj - Optional object
847 -  name - command line option
848 
849    Level: intermediate
850 .seealso: `Mat`, `MatView`, `PetscObjectViewFromOptions()`, `MatCreate()`
851 @*/
852 PetscErrorCode  MatViewFromOptions(Mat A,PetscObject obj,const char name[])
853 {
854   PetscFunctionBegin;
855   PetscValidHeaderSpecific(A,MAT_CLASSID,1);
856   PetscCall(PetscObjectViewFromOptions((PetscObject)A,obj,name));
857   PetscFunctionReturn(0);
858 }
859 
860 /*@C
861    MatView - Visualizes a matrix object.
862 
863    Collective on Mat
864 
865    Input Parameters:
866 +  mat - the matrix
867 -  viewer - visualization context
868 
869   Notes:
870   The available visualization contexts include
871 +    PETSC_VIEWER_STDOUT_SELF - for sequential matrices
872 .    PETSC_VIEWER_STDOUT_WORLD - for parallel matrices created on PETSC_COMM_WORLD
873 .    PETSC_VIEWER_STDOUT_(comm) - for matrices created on MPI communicator comm
874 -     PETSC_VIEWER_DRAW_WORLD - graphical display of nonzero structure
875 
876    The user can open alternative visualization contexts with
877 +    PetscViewerASCIIOpen() - Outputs matrix to a specified file
878 .    PetscViewerBinaryOpen() - Outputs matrix in binary to a
879          specified file; corresponding input uses MatLoad()
880 .    PetscViewerDrawOpen() - Outputs nonzero matrix structure to
881          an X window display
882 -    PetscViewerSocketOpen() - Outputs matrix to Socket viewer.
883          Currently only the sequential dense and AIJ
884          matrix types support the Socket viewer.
885 
886    The user can call PetscViewerPushFormat() to specify the output
887    format of ASCII printed objects (when using PETSC_VIEWER_STDOUT_SELF,
888    PETSC_VIEWER_STDOUT_WORLD and PetscViewerASCIIOpen).  Available formats include
889 +    PETSC_VIEWER_DEFAULT - default, prints matrix contents
890 .    PETSC_VIEWER_ASCII_MATLAB - prints matrix contents in Matlab format
891 .    PETSC_VIEWER_ASCII_DENSE - prints entire matrix including zeros
892 .    PETSC_VIEWER_ASCII_COMMON - prints matrix contents, using a sparse
893          format common among all matrix types
894 .    PETSC_VIEWER_ASCII_IMPL - prints matrix contents, using an implementation-specific
895          format (which is in many cases the same as the default)
896 .    PETSC_VIEWER_ASCII_INFO - prints basic information about the matrix
897          size and structure (not the matrix entries)
898 -    PETSC_VIEWER_ASCII_INFO_DETAIL - prints more detailed information about
899          the matrix structure
900 
901    Options Database Keys:
902 +  -mat_view ::ascii_info - Prints info on matrix at conclusion of MatAssemblyEnd()
903 .  -mat_view ::ascii_info_detail - Prints more detailed info
904 .  -mat_view - Prints matrix in ASCII format
905 .  -mat_view ::ascii_matlab - Prints matrix in Matlab format
906 .  -mat_view draw - PetscDraws nonzero structure of matrix, using MatView() and PetscDrawOpenX().
907 .  -display <name> - Sets display name (default is host)
908 .  -draw_pause <sec> - Sets number of seconds to pause after display
909 .  -mat_view socket - Sends matrix to socket, can be accessed from Matlab (see Users-Manual: ch_matlab for details)
910 .  -viewer_socket_machine <machine> -
911 .  -viewer_socket_port <port> -
912 .  -mat_view binary - save matrix to file in binary format
913 -  -viewer_binary_filename <name> -
914 
915    Level: beginner
916 
917    Notes:
918     The ASCII viewers are only recommended for small matrices on at most a moderate number of processes,
919     the program will seemingly hang and take hours for larger matrices, for larger matrices one should use the binary format.
920 
921     In the debugger you can do "call MatView(mat,0)" to display the matrix. (The same holds for any PETSc object viewer).
922 
923     See the manual page for MatLoad() for the exact format of the binary file when the binary
924       viewer is used.
925 
926       See share/petsc/matlab/PetscBinaryRead.m for a Matlab code that can read in the binary file when the binary
927       viewer is used and lib/petsc/bin/PetscBinaryIO.py for loading them into Python.
928 
929       One can use '-mat_view draw -draw_pause -1' to pause the graphical display of matrix nonzero structure,
930       and then use the following mouse functions.
931 .vb
932   left mouse: zoom in
933   middle mouse: zoom out
934   right mouse: continue with the simulation
935 .ve
936 
937 .seealso: `PetscViewerPushFormat()`, `PetscViewerASCIIOpen()`, `PetscViewerDrawOpen()`,
938           `PetscViewerSocketOpen()`, `PetscViewerBinaryOpen()`, `MatLoad()`
939 @*/
940 PetscErrorCode MatView(Mat mat,PetscViewer viewer)
941 {
942   PetscInt          rows,cols,rbs,cbs;
943   PetscBool         isascii,isstring,issaws;
944   PetscViewerFormat format;
945   PetscMPIInt       size;
946 
947   PetscFunctionBegin;
948   PetscValidHeaderSpecific(mat,MAT_CLASSID,1);
949   PetscValidType(mat,1);
950   if (!viewer) PetscCall(PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject)mat),&viewer));
951   PetscValidHeaderSpecific(viewer,PETSC_VIEWER_CLASSID,2);
952   PetscCheckSameComm(mat,1,viewer,2);
953   MatCheckPreallocated(mat,1);
954 
955   PetscCall(PetscViewerGetFormat(viewer,&format));
956   PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)mat),&size));
957   if (size == 1 && format == PETSC_VIEWER_LOAD_BALANCE) PetscFunctionReturn(0);
958 
959   PetscCall(PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERSTRING,&isstring));
960   PetscCall(PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&isascii));
961   PetscCall(PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERSAWS,&issaws));
962   if ((!isascii || (format != PETSC_VIEWER_ASCII_INFO || format == PETSC_VIEWER_ASCII_INFO_DETAIL)) && mat->factortype) {
963     SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"No viewers for factored matrix except ASCII info or info_detail");
964   }
965 
966   PetscCall(PetscLogEventBegin(MAT_View,mat,viewer,0,0));
967   if (isascii) {
968     PetscCheck(mat->assembled,PetscObjectComm((PetscObject)mat),PETSC_ERR_ORDER,"Must call MatAssemblyBegin/End() before viewing matrix");
969     PetscCall(PetscObjectPrintClassNamePrefixType((PetscObject)mat,viewer));
970     if (format == PETSC_VIEWER_ASCII_INFO || format == PETSC_VIEWER_ASCII_INFO_DETAIL) {
971       MatNullSpace nullsp,transnullsp;
972 
973       PetscCall(PetscViewerASCIIPushTab(viewer));
974       PetscCall(MatGetSize(mat,&rows,&cols));
975       PetscCall(MatGetBlockSizes(mat,&rbs,&cbs));
976       if (rbs != 1 || cbs != 1) {
977         if (rbs != cbs) PetscCall(PetscViewerASCIIPrintf(viewer,"rows=%" PetscInt_FMT ", cols=%" PetscInt_FMT ", rbs=%" PetscInt_FMT ", cbs=%" PetscInt_FMT "\n",rows,cols,rbs,cbs));
978         else            PetscCall(PetscViewerASCIIPrintf(viewer,"rows=%" PetscInt_FMT ", cols=%" PetscInt_FMT ", bs=%" PetscInt_FMT "\n",rows,cols,rbs));
979       } else PetscCall(PetscViewerASCIIPrintf(viewer,"rows=%" PetscInt_FMT ", cols=%" PetscInt_FMT "\n",rows,cols));
980       if (mat->factortype) {
981         MatSolverType solver;
982         PetscCall(MatFactorGetSolverType(mat,&solver));
983         PetscCall(PetscViewerASCIIPrintf(viewer,"package used to perform factorization: %s\n",solver));
984       }
985       if (mat->ops->getinfo) {
986         MatInfo info;
987         PetscCall(MatGetInfo(mat,MAT_GLOBAL_SUM,&info));
988         PetscCall(PetscViewerASCIIPrintf(viewer,"total: nonzeros=%.f, allocated nonzeros=%.f\n",info.nz_used,info.nz_allocated));
989         if (!mat->factortype) PetscCall(PetscViewerASCIIPrintf(viewer,"total number of mallocs used during MatSetValues calls=%" PetscInt_FMT "\n",(PetscInt)info.mallocs));
990       }
991       PetscCall(MatGetNullSpace(mat,&nullsp));
992       PetscCall(MatGetTransposeNullSpace(mat,&transnullsp));
993       if (nullsp) PetscCall(PetscViewerASCIIPrintf(viewer,"  has attached null space\n"));
994       if (transnullsp && transnullsp != nullsp) PetscCall(PetscViewerASCIIPrintf(viewer,"  has attached transposed null space\n"));
995       PetscCall(MatGetNearNullSpace(mat,&nullsp));
996       if (nullsp) PetscCall(PetscViewerASCIIPrintf(viewer,"  has attached near null space\n"));
997       PetscCall(PetscViewerASCIIPushTab(viewer));
998       PetscCall(MatProductView(mat,viewer));
999       PetscCall(PetscViewerASCIIPopTab(viewer));
1000     }
1001   } else if (issaws) {
1002 #if defined(PETSC_HAVE_SAWS)
1003     PetscMPIInt rank;
1004 
1005     PetscCall(PetscObjectName((PetscObject)mat));
1006     PetscCallMPI(MPI_Comm_rank(PETSC_COMM_WORLD,&rank));
1007     if (!((PetscObject)mat)->amsmem && rank == 0) {
1008       PetscCall(PetscObjectViewSAWs((PetscObject)mat,viewer));
1009     }
1010 #endif
1011   } else if (isstring) {
1012     const char *type;
1013     PetscCall(MatGetType(mat,&type));
1014     PetscCall(PetscViewerStringSPrintf(viewer," MatType: %-7.7s",type));
1015     if (mat->ops->view) PetscCall((*mat->ops->view)(mat,viewer));
1016   }
1017   if ((format == PETSC_VIEWER_NATIVE || format == PETSC_VIEWER_LOAD_BALANCE) && mat->ops->viewnative) {
1018     PetscCall(PetscViewerASCIIPushTab(viewer));
1019     PetscCall((*mat->ops->viewnative)(mat,viewer));
1020     PetscCall(PetscViewerASCIIPopTab(viewer));
1021   } else if (mat->ops->view) {
1022     PetscCall(PetscViewerASCIIPushTab(viewer));
1023     PetscCall((*mat->ops->view)(mat,viewer));
1024     PetscCall(PetscViewerASCIIPopTab(viewer));
1025   }
1026   if (isascii) {
1027     PetscCall(PetscViewerGetFormat(viewer,&format));
1028     if (format == PETSC_VIEWER_ASCII_INFO || format == PETSC_VIEWER_ASCII_INFO_DETAIL) {
1029       PetscCall(PetscViewerASCIIPopTab(viewer));
1030     }
1031   }
1032   PetscCall(PetscLogEventEnd(MAT_View,mat,viewer,0,0));
1033   PetscFunctionReturn(0);
1034 }
1035 
1036 #if defined(PETSC_USE_DEBUG)
1037 #include <../src/sys/totalview/tv_data_display.h>
1038 PETSC_UNUSED static int TV_display_type(const struct _p_Mat *mat)
1039 {
1040   TV_add_row("Local rows", "int", &mat->rmap->n);
1041   TV_add_row("Local columns", "int", &mat->cmap->n);
1042   TV_add_row("Global rows", "int", &mat->rmap->N);
1043   TV_add_row("Global columns", "int", &mat->cmap->N);
1044   TV_add_row("Typename", TV_ascii_string_type, ((PetscObject)mat)->type_name);
1045   return TV_format_OK;
1046 }
1047 #endif
1048 
1049 /*@C
1050    MatLoad - Loads a matrix that has been stored in binary/HDF5 format
1051    with MatView().  The matrix format is determined from the options database.
1052    Generates a parallel MPI matrix if the communicator has more than one
1053    processor.  The default matrix type is AIJ.
1054 
1055    Collective on PetscViewer
1056 
1057    Input Parameters:
1058 +  mat - the newly loaded matrix, this needs to have been created with MatCreate()
1059             or some related function before a call to MatLoad()
1060 -  viewer - binary/HDF5 file viewer
1061 
1062    Options Database Keys:
1063    Used with block matrix formats (MATSEQBAIJ,  ...) to specify
1064    block size
1065 .    -matload_block_size <bs> - set block size
1066 
1067    Level: beginner
1068 
1069    Notes:
1070    If the Mat type has not yet been given then MATAIJ is used, call MatSetFromOptions() on the
1071    Mat before calling this routine if you wish to set it from the options database.
1072 
1073    MatLoad() automatically loads into the options database any options
1074    given in the file filename.info where filename is the name of the file
1075    that was passed to the PetscViewerBinaryOpen(). The options in the info
1076    file will be ignored if you use the -viewer_binary_skip_info option.
1077 
1078    If the type or size of mat is not set before a call to MatLoad, PETSc
1079    sets the default matrix type AIJ and sets the local and global sizes.
1080    If type and/or size is already set, then the same are used.
1081 
1082    In parallel, each processor can load a subset of rows (or the
1083    entire matrix).  This routine is especially useful when a large
1084    matrix is stored on disk and only part of it is desired on each
1085    processor.  For example, a parallel solver may access only some of
1086    the rows from each processor.  The algorithm used here reads
1087    relatively small blocks of data rather than reading the entire
1088    matrix and then subsetting it.
1089 
1090    Viewer's PetscViewerType must be either PETSCVIEWERBINARY or PETSCVIEWERHDF5.
1091    Such viewer can be created using PetscViewerBinaryOpen()/PetscViewerHDF5Open(),
1092    or the sequence like
1093 $    PetscViewer v;
1094 $    PetscViewerCreate(PETSC_COMM_WORLD,&v);
1095 $    PetscViewerSetType(v,PETSCVIEWERBINARY);
1096 $    PetscViewerSetFromOptions(v);
1097 $    PetscViewerFileSetMode(v,FILE_MODE_READ);
1098 $    PetscViewerFileSetName(v,"datafile");
1099    The optional PetscViewerSetFromOptions() call allows to override PetscViewerSetType() using option
1100 $ -viewer_type {binary,hdf5}
1101 
1102    See the example src/ksp/ksp/tutorials/ex27.c with the first approach,
1103    and src/mat/tutorials/ex10.c with the second approach.
1104 
1105    Notes about the PETSc binary format:
1106    In case of PETSCVIEWERBINARY, a native PETSc binary format is used. Each of the blocks
1107    is read onto rank 0 and then shipped to its destination rank, one after another.
1108    Multiple objects, both matrices and vectors, can be stored within the same file.
1109    Their PetscObject name is ignored; they are loaded in the order of their storage.
1110 
1111    Most users should not need to know the details of the binary storage
1112    format, since MatLoad() and MatView() completely hide these details.
1113    But for anyone who's interested, the standard binary matrix storage
1114    format is
1115 
1116 $    PetscInt    MAT_FILE_CLASSID
1117 $    PetscInt    number of rows
1118 $    PetscInt    number of columns
1119 $    PetscInt    total number of nonzeros
1120 $    PetscInt    *number nonzeros in each row
1121 $    PetscInt    *column indices of all nonzeros (starting index is zero)
1122 $    PetscScalar *values of all nonzeros
1123 
1124    PETSc automatically does the byte swapping for
1125 machines that store the bytes reversed, e.g.  DEC alpha, freebsd,
1126 Linux, Microsoft Windows and the Intel Paragon; thus if you write your own binary
1127 read/write routines you have to swap the bytes; see PetscBinaryRead()
1128 and PetscBinaryWrite() to see how this may be done.
1129 
1130    Notes about the HDF5 (MATLAB MAT-File Version 7.3) format:
1131    In case of PETSCVIEWERHDF5, a parallel HDF5 reader is used.
1132    Each processor's chunk is loaded independently by its owning rank.
1133    Multiple objects, both matrices and vectors, can be stored within the same file.
1134    They are looked up by their PetscObject name.
1135 
1136    As the MATLAB MAT-File Version 7.3 format is also a HDF5 flavor, we decided to use
1137    by default the same structure and naming of the AIJ arrays and column count
1138    within the HDF5 file. This means that a MAT file saved with -v7.3 flag, e.g.
1139 $    save example.mat A b -v7.3
1140    can be directly read by this routine (see Reference 1 for details).
1141    Note that depending on your MATLAB version, this format might be a default,
1142    otherwise you can set it as default in Preferences.
1143 
1144    Unless -nocompression flag is used to save the file in MATLAB,
1145    PETSc must be configured with ZLIB package.
1146 
1147    See also examples src/mat/tutorials/ex10.c and src/ksp/ksp/tutorials/ex27.c
1148 
1149    Current HDF5 (MAT-File) limitations:
1150    This reader currently supports only real MATSEQAIJ, MATMPIAIJ, MATSEQDENSE and MATMPIDENSE matrices.
1151 
1152    Corresponding MatView() is not yet implemented.
1153 
1154    The loaded matrix is actually a transpose of the original one in MATLAB,
1155    unless you push PETSC_VIEWER_HDF5_MAT format (see examples above).
1156    With this format, matrix is automatically transposed by PETSc,
1157    unless the matrix is marked as SPD or symmetric
1158    (see MatSetOption(), MAT_SPD, MAT_SYMMETRIC).
1159 
1160    References:
1161 .  * - MATLAB(R) Documentation, manual page of save(), https://www.mathworks.com/help/matlab/ref/save.html#btox10b-1-version
1162 
1163 .seealso: `PetscViewerBinaryOpen()`, `PetscViewerSetType()`, `MatView()`, `VecLoad()`
1164 
1165  @*/
1166 PetscErrorCode MatLoad(Mat mat,PetscViewer viewer)
1167 {
1168   PetscBool flg;
1169 
1170   PetscFunctionBegin;
1171   PetscValidHeaderSpecific(mat,MAT_CLASSID,1);
1172   PetscValidHeaderSpecific(viewer,PETSC_VIEWER_CLASSID,2);
1173 
1174   if (!((PetscObject)mat)->type_name) PetscCall(MatSetType(mat,MATAIJ));
1175 
1176   flg  = PETSC_FALSE;
1177   PetscCall(PetscOptionsGetBool(((PetscObject)mat)->options,((PetscObject)mat)->prefix,"-matload_symmetric",&flg,NULL));
1178   if (flg) {
1179     PetscCall(MatSetOption(mat,MAT_SYMMETRIC,PETSC_TRUE));
1180     PetscCall(MatSetOption(mat,MAT_SYMMETRY_ETERNAL,PETSC_TRUE));
1181   }
1182   flg  = PETSC_FALSE;
1183   PetscCall(PetscOptionsGetBool(((PetscObject)mat)->options,((PetscObject)mat)->prefix,"-matload_spd",&flg,NULL));
1184   if (flg) PetscCall(MatSetOption(mat,MAT_SPD,PETSC_TRUE));
1185 
1186   PetscCheck(mat->ops->load,PETSC_COMM_SELF,PETSC_ERR_SUP,"MatLoad is not supported for type %s",((PetscObject)mat)->type_name);
1187   PetscCall(PetscLogEventBegin(MAT_Load,mat,viewer,0,0));
1188   PetscCall((*mat->ops->load)(mat,viewer));
1189   PetscCall(PetscLogEventEnd(MAT_Load,mat,viewer,0,0));
1190   PetscFunctionReturn(0);
1191 }
1192 
1193 static PetscErrorCode MatDestroy_Redundant(Mat_Redundant **redundant)
1194 {
1195   Mat_Redundant *redund = *redundant;
1196 
1197   PetscFunctionBegin;
1198   if (redund) {
1199     if (redund->matseq) { /* via MatCreateSubMatrices()  */
1200       PetscCall(ISDestroy(&redund->isrow));
1201       PetscCall(ISDestroy(&redund->iscol));
1202       PetscCall(MatDestroySubMatrices(1,&redund->matseq));
1203     } else {
1204       PetscCall(PetscFree2(redund->send_rank,redund->recv_rank));
1205       PetscCall(PetscFree(redund->sbuf_j));
1206       PetscCall(PetscFree(redund->sbuf_a));
1207       for (PetscInt i=0; i<redund->nrecvs; i++) {
1208         PetscCall(PetscFree(redund->rbuf_j[i]));
1209         PetscCall(PetscFree(redund->rbuf_a[i]));
1210       }
1211       PetscCall(PetscFree4(redund->sbuf_nz,redund->rbuf_nz,redund->rbuf_j,redund->rbuf_a));
1212     }
1213 
1214     if (redund->subcomm) PetscCall(PetscCommDestroy(&redund->subcomm));
1215     PetscCall(PetscFree(redund));
1216   }
1217   PetscFunctionReturn(0);
1218 }
1219 
1220 /*@C
1221    MatDestroy - Frees space taken by a matrix.
1222 
1223    Collective on Mat
1224 
1225    Input Parameter:
1226 .  A - the matrix
1227 
1228    Level: beginner
1229 
1230 @*/
1231 PetscErrorCode MatDestroy(Mat *A)
1232 {
1233   PetscFunctionBegin;
1234   if (!*A) PetscFunctionReturn(0);
1235   PetscValidHeaderSpecific(*A,MAT_CLASSID,1);
1236   if (--((PetscObject)(*A))->refct > 0) {*A = NULL; PetscFunctionReturn(0);}
1237 
1238   /* if memory was published with SAWs then destroy it */
1239   PetscCall(PetscObjectSAWsViewOff((PetscObject)*A));
1240   if ((*A)->ops->destroy) PetscCall((*(*A)->ops->destroy)(*A));
1241 
1242   PetscCall(PetscFree((*A)->defaultvectype));
1243   PetscCall(PetscFree((*A)->bsizes));
1244   PetscCall(PetscFree((*A)->solvertype));
1245   for (PetscInt i=0; i<MAT_FACTOR_NUM_TYPES; i++) PetscCall(PetscFree((*A)->preferredordering[i]));
1246   PetscCall(MatDestroy_Redundant(&(*A)->redundant));
1247   PetscCall(MatProductClear(*A));
1248   PetscCall(MatNullSpaceDestroy(&(*A)->nullsp));
1249   PetscCall(MatNullSpaceDestroy(&(*A)->transnullsp));
1250   PetscCall(MatNullSpaceDestroy(&(*A)->nearnullsp));
1251   PetscCall(MatDestroy(&(*A)->schur));
1252   PetscCall(PetscLayoutDestroy(&(*A)->rmap));
1253   PetscCall(PetscLayoutDestroy(&(*A)->cmap));
1254   PetscCall(PetscHeaderDestroy(A));
1255   PetscFunctionReturn(0);
1256 }
1257 
1258 /*@C
1259    MatSetValues - Inserts or adds a block of values into a matrix.
1260    These values may be cached, so MatAssemblyBegin() and MatAssemblyEnd()
1261    MUST be called after all calls to MatSetValues() have been completed.
1262 
1263    Not Collective
1264 
1265    Input Parameters:
1266 +  mat - the matrix
1267 .  v - a logically two-dimensional array of values
1268 .  m, idxm - the number of rows and their global indices
1269 .  n, idxn - the number of columns and their global indices
1270 -  addv - either ADD_VALUES or INSERT_VALUES, where
1271    ADD_VALUES adds values to any existing entries, and
1272    INSERT_VALUES replaces existing entries with new values
1273 
1274    Notes:
1275    If you create the matrix yourself (that is not with a call to DMCreateMatrix()) then you MUST call MatXXXXSetPreallocation() or
1276       MatSetUp() before using this routine
1277 
1278    By default the values, v, are row-oriented. See MatSetOption() for other options.
1279 
1280    Calls to MatSetValues() with the INSERT_VALUES and ADD_VALUES
1281    options cannot be mixed without intervening calls to the assembly
1282    routines.
1283 
1284    MatSetValues() uses 0-based row and column numbers in Fortran
1285    as well as in C.
1286 
1287    Negative indices may be passed in idxm and idxn, these rows and columns are
1288    simply ignored. This allows easily inserting element stiffness matrices
1289    with homogeneous Dirchlet boundary conditions that you don't want represented
1290    in the matrix.
1291 
1292    Efficiency Alert:
1293    The routine MatSetValuesBlocked() may offer much better efficiency
1294    for users of block sparse formats (MATSEQBAIJ and MATMPIBAIJ).
1295 
1296    Level: beginner
1297 
1298    Developer Notes:
1299     This is labeled with C so does not automatically generate Fortran stubs and interfaces
1300                     because it requires multiple Fortran interfaces depending on which arguments are scalar or arrays.
1301 
1302 .seealso: `MatSetOption()`, `MatAssemblyBegin()`, `MatAssemblyEnd()`, `MatSetValuesBlocked()`, `MatSetValuesLocal()`,
1303           `InsertMode`, `INSERT_VALUES`, `ADD_VALUES`
1304 @*/
1305 PetscErrorCode MatSetValues(Mat mat,PetscInt m,const PetscInt idxm[],PetscInt n,const PetscInt idxn[],const PetscScalar v[],InsertMode addv)
1306 {
1307   PetscFunctionBeginHot;
1308   PetscValidHeaderSpecific(mat,MAT_CLASSID,1);
1309   PetscValidType(mat,1);
1310   if (!m || !n) PetscFunctionReturn(0); /* no values to insert */
1311   PetscValidIntPointer(idxm,3);
1312   PetscValidIntPointer(idxn,5);
1313   MatCheckPreallocated(mat,1);
1314 
1315   if (mat->insertmode == NOT_SET_VALUES) mat->insertmode = addv;
1316   else PetscCheck(mat->insertmode == addv,PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Cannot mix add values and insert values");
1317 
1318   if (PetscDefined(USE_DEBUG)) {
1319     PetscInt       i,j;
1320 
1321     PetscCheck(!mat->factortype,PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Not for factored matrix");
1322     PetscCheck(mat->ops->setvalues,PETSC_COMM_SELF,PETSC_ERR_SUP,"Mat type %s",((PetscObject)mat)->type_name);
1323 
1324     for (i=0; i<m; i++) {
1325       for (j=0; j<n; j++) {
1326         if (mat->erroriffailure && PetscIsInfOrNanScalar(v[i*n+j]))
1327 #if defined(PETSC_USE_COMPLEX)
1328           SETERRQ(PETSC_COMM_SELF,PETSC_ERR_FP,"Inserting %g+i%g at matrix entry (%" PetscInt_FMT ",%" PetscInt_FMT ")",(double)PetscRealPart(v[i*n+j]),(double)PetscImaginaryPart(v[i*n+j]),idxm[i],idxn[j]);
1329 #else
1330           SETERRQ(PETSC_COMM_SELF,PETSC_ERR_FP,"Inserting %g at matrix entry (%" PetscInt_FMT ",%" PetscInt_FMT ")",(double)v[i*n+j],idxm[i],idxn[j]);
1331 #endif
1332       }
1333     }
1334     for (i=0; i<m; i++) PetscCheck(idxm[i] < mat->rmap->N,PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Cannot insert in row %" PetscInt_FMT ", maximum is %" PetscInt_FMT,idxm[i],mat->rmap->N-1);
1335     for (i=0; i<n; i++) PetscCheck(idxn[i] < mat->cmap->N,PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Cannot insert in column %" PetscInt_FMT ", maximum is %" PetscInt_FMT,idxn[i],mat->cmap->N-1);
1336   }
1337 
1338   if (mat->assembled) {
1339     mat->was_assembled = PETSC_TRUE;
1340     mat->assembled     = PETSC_FALSE;
1341   }
1342   PetscCall(PetscLogEventBegin(MAT_SetValues,mat,0,0,0));
1343   PetscCall((*mat->ops->setvalues)(mat,m,idxm,n,idxn,v,addv));
1344   PetscCall(PetscLogEventEnd(MAT_SetValues,mat,0,0,0));
1345   PetscFunctionReturn(0);
1346 }
1347 
1348 /*@C
1349    MatSetValuesIS - Inserts or adds a block of values into a matrix using IS to indicate the rows and columns
1350    These values may be cached, so MatAssemblyBegin() and MatAssemblyEnd()
1351    MUST be called after all calls to MatSetValues() have been completed.
1352 
1353    Not Collective
1354 
1355    Input Parameters:
1356 +  mat - the matrix
1357 .  v - a logically two-dimensional array of values
1358 .  ism - the rows to provide
1359 .  isn - the columns to provide
1360 -  addv - either ADD_VALUES or INSERT_VALUES, where
1361    ADD_VALUES adds values to any existing entries, and
1362    INSERT_VALUES replaces existing entries with new values
1363 
1364    Notes:
1365    If you create the matrix yourself (that is not with a call to DMCreateMatrix()) then you MUST call MatXXXXSetPreallocation() or
1366       MatSetUp() before using this routine
1367 
1368    By default the values, v, are row-oriented. See MatSetOption() for other options.
1369 
1370    Calls to MatSetValues() with the INSERT_VALUES and ADD_VALUES
1371    options cannot be mixed without intervening calls to the assembly
1372    routines.
1373 
1374    MatSetValues() uses 0-based row and column numbers in Fortran
1375    as well as in C.
1376 
1377    Negative indices may be passed in ism and isn, these rows and columns are
1378    simply ignored. This allows easily inserting element stiffness matrices
1379    with homogeneous Dirchlet boundary conditions that you don't want represented
1380    in the matrix.
1381 
1382    Efficiency Alert:
1383    The routine MatSetValuesBlocked() may offer much better efficiency
1384    for users of block sparse formats (MATSEQBAIJ and MATMPIBAIJ).
1385 
1386    Level: beginner
1387 
1388    Developer Notes:
1389     This is labeled with C so does not automatically generate Fortran stubs and interfaces
1390                     because it requires multiple Fortran interfaces depending on which arguments are scalar or arrays.
1391 
1392     This is currently not optimized for any particular IS type
1393 
1394 .seealso: MatSetOption(), MatAssemblyBegin(), MatAssemblyEnd(), MatSetValuesBlocked(), MatSetValuesLocal(),
1395           InsertMode, INSERT_VALUES, ADD_VALUES, MatSetValues()
1396 @*/
1397 PetscErrorCode MatSetValuesIS(Mat mat,IS ism,IS isn,const PetscScalar v[],InsertMode addv)
1398 {
1399   PetscInt       m,n;
1400   const PetscInt *rows,*cols;
1401 
1402   PetscFunctionBeginHot;
1403   PetscValidHeaderSpecific(mat,MAT_CLASSID,1);
1404   PetscCall(ISGetIndices(ism,&rows));
1405   PetscCall(ISGetIndices(isn,&cols));
1406   PetscCall(ISGetLocalSize(ism,&m));
1407   PetscCall(ISGetLocalSize(isn,&n));
1408   PetscCall(MatSetValues(mat,m,rows,n,cols,v,addv));
1409   PetscCall(ISRestoreIndices(ism,&rows));
1410   PetscCall(ISRestoreIndices(isn,&cols));
1411   PetscFunctionReturn(0);
1412 }
1413 
1414 /*@
1415    MatSetValuesRowLocal - Inserts a row (block row for BAIJ matrices) of nonzero
1416         values into a matrix
1417 
1418    Not Collective
1419 
1420    Input Parameters:
1421 +  mat - the matrix
1422 .  row - the (block) row to set
1423 -  v - a logically two-dimensional array of values
1424 
1425    Notes:
1426    By the values, v, are column-oriented (for the block version) and sorted
1427 
1428    All the nonzeros in the row must be provided
1429 
1430    The matrix must have previously had its column indices set
1431 
1432    The row must belong to this process
1433 
1434    Level: intermediate
1435 
1436 .seealso: `MatSetOption()`, `MatAssemblyBegin()`, `MatAssemblyEnd()`, `MatSetValuesBlocked()`, `MatSetValuesLocal()`,
1437           `InsertMode`, `INSERT_VALUES`, `ADD_VALUES`, `MatSetValues()`, `MatSetValuesRow()`, `MatSetLocalToGlobalMapping()`
1438 @*/
1439 PetscErrorCode MatSetValuesRowLocal(Mat mat,PetscInt row,const PetscScalar v[])
1440 {
1441   PetscInt globalrow;
1442 
1443   PetscFunctionBegin;
1444   PetscValidHeaderSpecific(mat,MAT_CLASSID,1);
1445   PetscValidType(mat,1);
1446   PetscValidScalarPointer(v,3);
1447   PetscCall(ISLocalToGlobalMappingApply(mat->rmap->mapping,1,&row,&globalrow));
1448   PetscCall(MatSetValuesRow(mat,globalrow,v));
1449   PetscFunctionReturn(0);
1450 }
1451 
1452 /*@
1453    MatSetValuesRow - Inserts a row (block row for BAIJ matrices) of nonzero
1454         values into a matrix
1455 
1456    Not Collective
1457 
1458    Input Parameters:
1459 +  mat - the matrix
1460 .  row - the (block) row to set
1461 -  v - a logically two-dimensional (column major) array of values for  block matrices with blocksize larger than one, otherwise a one dimensional array of values
1462 
1463    Notes:
1464    The values, v, are column-oriented for the block version.
1465 
1466    All the nonzeros in the row must be provided
1467 
1468    THE MATRIX MUST HAVE PREVIOUSLY HAD ITS COLUMN INDICES SET. IT IS RARE THAT THIS ROUTINE IS USED, usually MatSetValues() is used.
1469 
1470    The row must belong to this process
1471 
1472    Level: advanced
1473 
1474 .seealso: `MatSetOption()`, `MatAssemblyBegin()`, `MatAssemblyEnd()`, `MatSetValuesBlocked()`, `MatSetValuesLocal()`,
1475           `InsertMode`, `INSERT_VALUES`, `ADD_VALUES`, `MatSetValues()`
1476 @*/
1477 PetscErrorCode MatSetValuesRow(Mat mat,PetscInt row,const PetscScalar v[])
1478 {
1479   PetscFunctionBeginHot;
1480   PetscValidHeaderSpecific(mat,MAT_CLASSID,1);
1481   PetscValidType(mat,1);
1482   MatCheckPreallocated(mat,1);
1483   PetscValidScalarPointer(v,3);
1484   PetscCheck(mat->insertmode != ADD_VALUES,PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Cannot mix add and insert values");
1485   PetscCheck(!mat->factortype,PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Not for factored matrix");
1486   mat->insertmode = INSERT_VALUES;
1487 
1488   if (mat->assembled) {
1489     mat->was_assembled = PETSC_TRUE;
1490     mat->assembled     = PETSC_FALSE;
1491   }
1492   PetscCall(PetscLogEventBegin(MAT_SetValues,mat,0,0,0));
1493   PetscCheck(mat->ops->setvaluesrow,PETSC_COMM_SELF,PETSC_ERR_SUP,"Mat type %s",((PetscObject)mat)->type_name);
1494   PetscCall((*mat->ops->setvaluesrow)(mat,row,v));
1495   PetscCall(PetscLogEventEnd(MAT_SetValues,mat,0,0,0));
1496   PetscFunctionReturn(0);
1497 }
1498 
1499 /*@
1500    MatSetValuesStencil - Inserts or adds a block of values into a matrix.
1501      Using structured grid indexing
1502 
1503    Not Collective
1504 
1505    Input Parameters:
1506 +  mat - the matrix
1507 .  m - number of rows being entered
1508 .  idxm - grid coordinates (and component number when dof > 1) for matrix rows being entered
1509 .  n - number of columns being entered
1510 .  idxn - grid coordinates (and component number when dof > 1) for matrix columns being entered
1511 .  v - a logically two-dimensional array of values
1512 -  addv - either ADD_VALUES or INSERT_VALUES, where
1513    ADD_VALUES adds values to any existing entries, and
1514    INSERT_VALUES replaces existing entries with new values
1515 
1516    Notes:
1517    By default the values, v, are row-oriented.  See MatSetOption() for other options.
1518 
1519    Calls to MatSetValuesStencil() with the INSERT_VALUES and ADD_VALUES
1520    options cannot be mixed without intervening calls to the assembly
1521    routines.
1522 
1523    The grid coordinates are across the entire grid, not just the local portion
1524 
1525    MatSetValuesStencil() uses 0-based row and column numbers in Fortran
1526    as well as in C.
1527 
1528    For setting/accessing vector values via array coordinates you can use the DMDAVecGetArray() routine
1529 
1530    In order to use this routine you must either obtain the matrix with DMCreateMatrix()
1531    or call MatSetLocalToGlobalMapping() and MatSetStencil() first.
1532 
1533    The columns and rows in the stencil passed in MUST be contained within the
1534    ghost region of the given process as set with DMDACreateXXX() or MatSetStencil(). For example,
1535    if you create a DMDA with an overlap of one grid level and on a particular process its first
1536    local nonghost x logical coordinate is 6 (so its first ghost x logical coordinate is 5) the
1537    first i index you can use in your column and row indices in MatSetStencil() is 5.
1538 
1539    In Fortran idxm and idxn should be declared as
1540 $     MatStencil idxm(4,m),idxn(4,n)
1541    and the values inserted using
1542 $    idxm(MatStencil_i,1) = i
1543 $    idxm(MatStencil_j,1) = j
1544 $    idxm(MatStencil_k,1) = k
1545 $    idxm(MatStencil_c,1) = c
1546    etc
1547 
1548    For periodic boundary conditions use negative indices for values to the left (below 0; that are to be
1549    obtained by wrapping values from right edge). For values to the right of the last entry using that index plus one
1550    etc to obtain values that obtained by wrapping the values from the left edge. This does not work for anything but the
1551    DM_BOUNDARY_PERIODIC boundary type.
1552 
1553    For indices that don't mean anything for your case (like the k index when working in 2d) or the c index when you have
1554    a single value per point) you can skip filling those indices.
1555 
1556    Inspired by the structured grid interface to the HYPRE package
1557    (https://computation.llnl.gov/projects/hypre-scalable-linear-solvers-multigrid-methods)
1558 
1559    Efficiency Alert:
1560    The routine MatSetValuesBlockedStencil() may offer much better efficiency
1561    for users of block sparse formats (MATSEQBAIJ and MATMPIBAIJ).
1562 
1563    Level: beginner
1564 
1565 .seealso: `MatSetOption()`, `MatAssemblyBegin()`, `MatAssemblyEnd()`, `MatSetValuesBlocked()`, `MatSetValuesLocal()`
1566           `MatSetValues()`, `MatSetValuesBlockedStencil()`, `MatSetStencil()`, `DMCreateMatrix()`, `DMDAVecGetArray()`, `MatStencil`
1567 @*/
1568 PetscErrorCode MatSetValuesStencil(Mat mat,PetscInt m,const MatStencil idxm[],PetscInt n,const MatStencil idxn[],const PetscScalar v[],InsertMode addv)
1569 {
1570   PetscInt       buf[8192],*bufm=NULL,*bufn=NULL,*jdxm,*jdxn;
1571   PetscInt       j,i,dim = mat->stencil.dim,*dims = mat->stencil.dims+1,tmp;
1572   PetscInt       *starts = mat->stencil.starts,*dxm = (PetscInt*)idxm,*dxn = (PetscInt*)idxn,sdim = dim - (1 - (PetscInt)mat->stencil.noc);
1573 
1574   PetscFunctionBegin;
1575   if (!m || !n) PetscFunctionReturn(0); /* no values to insert */
1576   PetscValidHeaderSpecific(mat,MAT_CLASSID,1);
1577   PetscValidType(mat,1);
1578   PetscValidPointer(idxm,3);
1579   PetscValidPointer(idxn,5);
1580 
1581   if ((m+n) <= (PetscInt)(sizeof(buf)/sizeof(PetscInt))) {
1582     jdxm = buf; jdxn = buf+m;
1583   } else {
1584     PetscCall(PetscMalloc2(m,&bufm,n,&bufn));
1585     jdxm = bufm; jdxn = bufn;
1586   }
1587   for (i=0; i<m; i++) {
1588     for (j=0; j<3-sdim; j++) dxm++;
1589     tmp = *dxm++ - starts[0];
1590     for (j=0; j<dim-1; j++) {
1591       if ((*dxm++ - starts[j+1]) < 0 || tmp < 0) tmp = -1;
1592       else                                       tmp = tmp*dims[j] + *(dxm-1) - starts[j+1];
1593     }
1594     if (mat->stencil.noc) dxm++;
1595     jdxm[i] = tmp;
1596   }
1597   for (i=0; i<n; i++) {
1598     for (j=0; j<3-sdim; j++) dxn++;
1599     tmp = *dxn++ - starts[0];
1600     for (j=0; j<dim-1; j++) {
1601       if ((*dxn++ - starts[j+1]) < 0 || tmp < 0) tmp = -1;
1602       else                                       tmp = tmp*dims[j] + *(dxn-1) - starts[j+1];
1603     }
1604     if (mat->stencil.noc) dxn++;
1605     jdxn[i] = tmp;
1606   }
1607   PetscCall(MatSetValuesLocal(mat,m,jdxm,n,jdxn,v,addv));
1608   PetscCall(PetscFree2(bufm,bufn));
1609   PetscFunctionReturn(0);
1610 }
1611 
1612 /*@
1613    MatSetValuesBlockedStencil - Inserts or adds a block of values into a matrix.
1614      Using structured grid indexing
1615 
1616    Not Collective
1617 
1618    Input Parameters:
1619 +  mat - the matrix
1620 .  m - number of rows being entered
1621 .  idxm - grid coordinates for matrix rows being entered
1622 .  n - number of columns being entered
1623 .  idxn - grid coordinates for matrix columns being entered
1624 .  v - a logically two-dimensional array of values
1625 -  addv - either ADD_VALUES or INSERT_VALUES, where
1626    ADD_VALUES adds values to any existing entries, and
1627    INSERT_VALUES replaces existing entries with new values
1628 
1629    Notes:
1630    By default the values, v, are row-oriented and unsorted.
1631    See MatSetOption() for other options.
1632 
1633    Calls to MatSetValuesBlockedStencil() with the INSERT_VALUES and ADD_VALUES
1634    options cannot be mixed without intervening calls to the assembly
1635    routines.
1636 
1637    The grid coordinates are across the entire grid, not just the local portion
1638 
1639    MatSetValuesBlockedStencil() uses 0-based row and column numbers in Fortran
1640    as well as in C.
1641 
1642    For setting/accessing vector values via array coordinates you can use the DMDAVecGetArray() routine
1643 
1644    In order to use this routine you must either obtain the matrix with DMCreateMatrix()
1645    or call MatSetBlockSize(), MatSetLocalToGlobalMapping() and MatSetStencil() first.
1646 
1647    The columns and rows in the stencil passed in MUST be contained within the
1648    ghost region of the given process as set with DMDACreateXXX() or MatSetStencil(). For example,
1649    if you create a DMDA with an overlap of one grid level and on a particular process its first
1650    local nonghost x logical coordinate is 6 (so its first ghost x logical coordinate is 5) the
1651    first i index you can use in your column and row indices in MatSetStencil() is 5.
1652 
1653    In Fortran idxm and idxn should be declared as
1654 $     MatStencil idxm(4,m),idxn(4,n)
1655    and the values inserted using
1656 $    idxm(MatStencil_i,1) = i
1657 $    idxm(MatStencil_j,1) = j
1658 $    idxm(MatStencil_k,1) = k
1659    etc
1660 
1661    Negative indices may be passed in idxm and idxn, these rows and columns are
1662    simply ignored. This allows easily inserting element stiffness matrices
1663    with homogeneous Dirchlet boundary conditions that you don't want represented
1664    in the matrix.
1665 
1666    Inspired by the structured grid interface to the HYPRE package
1667    (https://computation.llnl.gov/projects/hypre-scalable-linear-solvers-multigrid-methods)
1668 
1669    Level: beginner
1670 
1671 .seealso: `MatSetOption()`, `MatAssemblyBegin()`, `MatAssemblyEnd()`, `MatSetValuesBlocked()`, `MatSetValuesLocal()`
1672           `MatSetValues()`, `MatSetValuesStencil()`, `MatSetStencil()`, `DMCreateMatrix()`, `DMDAVecGetArray()`, `MatStencil`,
1673           `MatSetBlockSize()`, `MatSetLocalToGlobalMapping()`
1674 @*/
1675 PetscErrorCode MatSetValuesBlockedStencil(Mat mat,PetscInt m,const MatStencil idxm[],PetscInt n,const MatStencil idxn[],const PetscScalar v[],InsertMode addv)
1676 {
1677   PetscInt       buf[8192],*bufm=NULL,*bufn=NULL,*jdxm,*jdxn;
1678   PetscInt       j,i,dim = mat->stencil.dim,*dims = mat->stencil.dims+1,tmp;
1679   PetscInt       *starts = mat->stencil.starts,*dxm = (PetscInt*)idxm,*dxn = (PetscInt*)idxn,sdim = dim - (1 - (PetscInt)mat->stencil.noc);
1680 
1681   PetscFunctionBegin;
1682   if (!m || !n) PetscFunctionReturn(0); /* no values to insert */
1683   PetscValidHeaderSpecific(mat,MAT_CLASSID,1);
1684   PetscValidType(mat,1);
1685   PetscValidPointer(idxm,3);
1686   PetscValidPointer(idxn,5);
1687   PetscValidScalarPointer(v,6);
1688 
1689   if ((m+n) <= (PetscInt)(sizeof(buf)/sizeof(PetscInt))) {
1690     jdxm = buf; jdxn = buf+m;
1691   } else {
1692     PetscCall(PetscMalloc2(m,&bufm,n,&bufn));
1693     jdxm = bufm; jdxn = bufn;
1694   }
1695   for (i=0; i<m; i++) {
1696     for (j=0; j<3-sdim; j++) dxm++;
1697     tmp = *dxm++ - starts[0];
1698     for (j=0; j<sdim-1; j++) {
1699       if ((*dxm++ - starts[j+1]) < 0 || tmp < 0) tmp = -1;
1700       else                                       tmp = tmp*dims[j] + *(dxm-1) - starts[j+1];
1701     }
1702     dxm++;
1703     jdxm[i] = tmp;
1704   }
1705   for (i=0; i<n; i++) {
1706     for (j=0; j<3-sdim; j++) dxn++;
1707     tmp = *dxn++ - starts[0];
1708     for (j=0; j<sdim-1; j++) {
1709       if ((*dxn++ - starts[j+1]) < 0 || tmp < 0) tmp = -1;
1710       else                                       tmp = tmp*dims[j] + *(dxn-1) - starts[j+1];
1711     }
1712     dxn++;
1713     jdxn[i] = tmp;
1714   }
1715   PetscCall(MatSetValuesBlockedLocal(mat,m,jdxm,n,jdxn,v,addv));
1716   PetscCall(PetscFree2(bufm,bufn));
1717   PetscFunctionReturn(0);
1718 }
1719 
1720 /*@
1721    MatSetStencil - Sets the grid information for setting values into a matrix via
1722         MatSetValuesStencil()
1723 
1724    Not Collective
1725 
1726    Input Parameters:
1727 +  mat - the matrix
1728 .  dim - dimension of the grid 1, 2, or 3
1729 .  dims - number of grid points in x, y, and z direction, including ghost points on your processor
1730 .  starts - starting point of ghost nodes on your processor in x, y, and z direction
1731 -  dof - number of degrees of freedom per node
1732 
1733    Inspired by the structured grid interface to the HYPRE package
1734    (www.llnl.gov/CASC/hyper)
1735 
1736    For matrices generated with DMCreateMatrix() this routine is automatically called and so not needed by the
1737    user.
1738 
1739    Level: beginner
1740 
1741 .seealso: `MatSetOption()`, `MatAssemblyBegin()`, `MatAssemblyEnd()`, `MatSetValuesBlocked()`, `MatSetValuesLocal()`
1742           `MatSetValues()`, `MatSetValuesBlockedStencil()`, `MatSetValuesStencil()`
1743 @*/
1744 PetscErrorCode MatSetStencil(Mat mat,PetscInt dim,const PetscInt dims[],const PetscInt starts[],PetscInt dof)
1745 {
1746   PetscFunctionBegin;
1747   PetscValidHeaderSpecific(mat,MAT_CLASSID,1);
1748   PetscValidIntPointer(dims,3);
1749   PetscValidIntPointer(starts,4);
1750 
1751   mat->stencil.dim = dim + (dof > 1);
1752   for (PetscInt i=0; i<dim; i++) {
1753     mat->stencil.dims[i]   = dims[dim-i-1];      /* copy the values in backwards */
1754     mat->stencil.starts[i] = starts[dim-i-1];
1755   }
1756   mat->stencil.dims[dim]   = dof;
1757   mat->stencil.starts[dim] = 0;
1758   mat->stencil.noc         = (PetscBool)(dof == 1);
1759   PetscFunctionReturn(0);
1760 }
1761 
1762 /*@C
1763    MatSetValuesBlocked - Inserts or adds a block of values into a matrix.
1764 
1765    Not Collective
1766 
1767    Input Parameters:
1768 +  mat - the matrix
1769 .  v - a logically two-dimensional array of values
1770 .  m, idxm - the number of block rows and their global block indices
1771 .  n, idxn - the number of block columns and their global block indices
1772 -  addv - either ADD_VALUES or INSERT_VALUES, where
1773    ADD_VALUES adds values to any existing entries, and
1774    INSERT_VALUES replaces existing entries with new values
1775 
1776    Notes:
1777    If you create the matrix yourself (that is not with a call to DMCreateMatrix()) then you MUST call
1778    MatXXXXSetPreallocation() or MatSetUp() before using this routine.
1779 
1780    The m and n count the NUMBER of blocks in the row direction and column direction,
1781    NOT the total number of rows/columns; for example, if the block size is 2 and
1782    you are passing in values for rows 2,3,4,5  then m would be 2 (not 4).
1783    The values in idxm would be 1 2; that is the first index for each block divided by
1784    the block size.
1785 
1786    Note that you must call MatSetBlockSize() when constructing this matrix (before
1787    preallocating it).
1788 
1789    By default the values, v, are row-oriented, so the layout of
1790    v is the same as for MatSetValues(). See MatSetOption() for other options.
1791 
1792    Calls to MatSetValuesBlocked() with the INSERT_VALUES and ADD_VALUES
1793    options cannot be mixed without intervening calls to the assembly
1794    routines.
1795 
1796    MatSetValuesBlocked() uses 0-based row and column numbers in Fortran
1797    as well as in C.
1798 
1799    Negative indices may be passed in idxm and idxn, these rows and columns are
1800    simply ignored. This allows easily inserting element stiffness matrices
1801    with homogeneous Dirchlet boundary conditions that you don't want represented
1802    in the matrix.
1803 
1804    Each time an entry is set within a sparse matrix via MatSetValues(),
1805    internal searching must be done to determine where to place the
1806    data in the matrix storage space.  By instead inserting blocks of
1807    entries via MatSetValuesBlocked(), the overhead of matrix assembly is
1808    reduced.
1809 
1810    Example:
1811 $   Suppose m=n=2 and block size(bs) = 2 The array is
1812 $
1813 $   1  2  | 3  4
1814 $   5  6  | 7  8
1815 $   - - - | - - -
1816 $   9  10 | 11 12
1817 $   13 14 | 15 16
1818 $
1819 $   v[] should be passed in like
1820 $   v[] = [1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16]
1821 $
1822 $  If you are not using row oriented storage of v (that is you called MatSetOption(mat,MAT_ROW_ORIENTED,PETSC_FALSE)) then
1823 $   v[] = [1,5,9,13,2,6,10,14,3,7,11,15,4,8,12,16]
1824 
1825    Level: intermediate
1826 
1827 .seealso: `MatSetBlockSize()`, `MatSetOption()`, `MatAssemblyBegin()`, `MatAssemblyEnd()`, `MatSetValues()`, `MatSetValuesBlockedLocal()`
1828 @*/
1829 PetscErrorCode MatSetValuesBlocked(Mat mat,PetscInt m,const PetscInt idxm[],PetscInt n,const PetscInt idxn[],const PetscScalar v[],InsertMode addv)
1830 {
1831   PetscFunctionBeginHot;
1832   PetscValidHeaderSpecific(mat,MAT_CLASSID,1);
1833   PetscValidType(mat,1);
1834   if (!m || !n) PetscFunctionReturn(0); /* no values to insert */
1835   PetscValidIntPointer(idxm,3);
1836   PetscValidIntPointer(idxn,5);
1837   MatCheckPreallocated(mat,1);
1838   if (mat->insertmode == NOT_SET_VALUES) mat->insertmode = addv;
1839   else PetscCheck(mat->insertmode == addv,PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Cannot mix add values and insert values");
1840   if (PetscDefined(USE_DEBUG)) {
1841     PetscCheck(!mat->factortype,PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Not for factored matrix");
1842     PetscCheck(mat->ops->setvaluesblocked || mat->ops->setvalues,PETSC_COMM_SELF,PETSC_ERR_SUP,"Mat type %s",((PetscObject)mat)->type_name);
1843   }
1844   if (PetscDefined(USE_DEBUG)) {
1845     PetscInt rbs,cbs,M,N,i;
1846     PetscCall(MatGetBlockSizes(mat,&rbs,&cbs));
1847     PetscCall(MatGetSize(mat,&M,&N));
1848     for (i=0; i<m; i++) PetscCheck(idxm[i]*rbs < M,PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Row block index %" PetscInt_FMT " (index %" PetscInt_FMT ") greater than row length %" PetscInt_FMT,i,idxm[i],M);
1849     for (i=0; i<n; i++) PetscCheck(idxn[i]*cbs < N,PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Column block index %" PetscInt_FMT " (index %" PetscInt_FMT ") great than column length %" PetscInt_FMT,i,idxn[i],N);
1850   }
1851   if (mat->assembled) {
1852     mat->was_assembled = PETSC_TRUE;
1853     mat->assembled     = PETSC_FALSE;
1854   }
1855   PetscCall(PetscLogEventBegin(MAT_SetValues,mat,0,0,0));
1856   if (mat->ops->setvaluesblocked) {
1857     PetscCall((*mat->ops->setvaluesblocked)(mat,m,idxm,n,idxn,v,addv));
1858   } else {
1859     PetscInt buf[8192],*bufr=NULL,*bufc=NULL,*iidxm,*iidxn;
1860     PetscInt i,j,bs,cbs;
1861 
1862     PetscCall(MatGetBlockSizes(mat,&bs,&cbs));
1863     if (m*bs+n*cbs <= (PetscInt)(sizeof(buf)/sizeof(PetscInt))) {
1864       iidxm = buf;
1865       iidxn = buf + m*bs;
1866     } else {
1867       PetscCall(PetscMalloc2(m*bs,&bufr,n*cbs,&bufc));
1868       iidxm = bufr;
1869       iidxn = bufc;
1870     }
1871     for (i=0; i<m; i++) {
1872       for (j=0; j<bs; j++) {
1873         iidxm[i*bs+j] = bs*idxm[i] + j;
1874       }
1875     }
1876     if (m != n || bs != cbs || idxm != idxn) {
1877       for (i=0; i<n; i++) {
1878         for (j=0; j<cbs; j++) {
1879           iidxn[i*cbs+j] = cbs*idxn[i] + j;
1880         }
1881       }
1882     } else iidxn = iidxm;
1883     PetscCall(MatSetValues(mat,m*bs,iidxm,n*cbs,iidxn,v,addv));
1884     PetscCall(PetscFree2(bufr,bufc));
1885   }
1886   PetscCall(PetscLogEventEnd(MAT_SetValues,mat,0,0,0));
1887   PetscFunctionReturn(0);
1888 }
1889 
1890 /*@C
1891    MatGetValues - Gets a block of values from a matrix.
1892 
1893    Not Collective; can only return values that are owned by the give process
1894 
1895    Input Parameters:
1896 +  mat - the matrix
1897 .  v - a logically two-dimensional array for storing the values
1898 .  m, idxm - the number of rows and their global indices
1899 -  n, idxn - the number of columns and their global indices
1900 
1901    Notes:
1902      The user must allocate space (m*n PetscScalars) for the values, v.
1903      The values, v, are then returned in a row-oriented format,
1904      analogous to that used by default in MatSetValues().
1905 
1906      MatGetValues() uses 0-based row and column numbers in
1907      Fortran as well as in C.
1908 
1909      MatGetValues() requires that the matrix has been assembled
1910      with MatAssemblyBegin()/MatAssemblyEnd().  Thus, calls to
1911      MatSetValues() and MatGetValues() CANNOT be made in succession
1912      without intermediate matrix assembly.
1913 
1914      Negative row or column indices will be ignored and those locations in v[] will be
1915      left unchanged.
1916 
1917      For the standard row-based matrix formats, idxm[] can only contain rows owned by the requesting MPI rank.
1918      That is, rows with global index greater than or equal to rstart and less than rend where rstart and rend are obtainable
1919      from MatGetOwnershipRange(mat,&rstart,&rend).
1920 
1921    Level: advanced
1922 
1923 .seealso: `MatGetRow()`, `MatCreateSubMatrices()`, `MatSetValues()`, `MatGetOwnershipRange()`, `MatGetValuesLocal()`, `MatGetValue()`
1924 @*/
1925 PetscErrorCode MatGetValues(Mat mat,PetscInt m,const PetscInt idxm[],PetscInt n,const PetscInt idxn[],PetscScalar v[])
1926 {
1927   PetscFunctionBegin;
1928   PetscValidHeaderSpecific(mat,MAT_CLASSID,1);
1929   PetscValidType(mat,1);
1930   if (!m || !n) PetscFunctionReturn(0);
1931   PetscValidIntPointer(idxm,3);
1932   PetscValidIntPointer(idxn,5);
1933   PetscValidScalarPointer(v,6);
1934   PetscCheck(mat->assembled,PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Not for unassembled matrix");
1935   PetscCheck(!mat->factortype,PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Not for factored matrix");
1936   PetscCheck(mat->ops->getvalues,PETSC_COMM_SELF,PETSC_ERR_SUP,"Mat type %s",((PetscObject)mat)->type_name);
1937   MatCheckPreallocated(mat,1);
1938 
1939   PetscCall(PetscLogEventBegin(MAT_GetValues,mat,0,0,0));
1940   PetscCall((*mat->ops->getvalues)(mat,m,idxm,n,idxn,v));
1941   PetscCall(PetscLogEventEnd(MAT_GetValues,mat,0,0,0));
1942   PetscFunctionReturn(0);
1943 }
1944 
1945 /*@C
1946    MatGetValuesLocal - retrieves values from certain locations in a matrix using the local numbering of the indices
1947      defined previously by MatSetLocalToGlobalMapping()
1948 
1949    Not Collective
1950 
1951    Input Parameters:
1952 +  mat - the matrix
1953 .  nrow, irow - number of rows and their local indices
1954 -  ncol, icol - number of columns and their local indices
1955 
1956    Output Parameter:
1957 .  y -  a logically two-dimensional array of values
1958 
1959    Notes:
1960      If you create the matrix yourself (that is not with a call to DMCreateMatrix()) then you MUST call MatSetLocalToGlobalMapping() before using this routine.
1961 
1962      This routine can only return values that are owned by the requesting MPI rank. That is, for standard matrix formats, rows that, in the global numbering,
1963      are greater than or equal to rstart and less than rend where rstart and rend are obtainable from MatGetOwnershipRange(mat,&rstart,&rend). One can
1964      determine if the resulting global row associated with the local row r is owned by the requesting MPI rank by applying the ISLocalToGlobalMapping set
1965      with MatSetLocalToGlobalMapping().
1966 
1967    Developer Notes:
1968       This is labelled with C so does not automatically generate Fortran stubs and interfaces
1969       because it requires multiple Fortran interfaces depending on which arguments are scalar or arrays.
1970 
1971    Level: advanced
1972 
1973 .seealso: `MatAssemblyBegin()`, `MatAssemblyEnd()`, `MatSetValues()`, `MatSetLocalToGlobalMapping()`,
1974           `MatSetValuesLocal()`, `MatGetValues()`
1975 @*/
1976 PetscErrorCode MatGetValuesLocal(Mat mat,PetscInt nrow,const PetscInt irow[],PetscInt ncol,const PetscInt icol[],PetscScalar y[])
1977 {
1978   PetscFunctionBeginHot;
1979   PetscValidHeaderSpecific(mat,MAT_CLASSID,1);
1980   PetscValidType(mat,1);
1981   MatCheckPreallocated(mat,1);
1982   if (!nrow || !ncol) PetscFunctionReturn(0); /* no values to retrieve */
1983   PetscValidIntPointer(irow,3);
1984   PetscValidIntPointer(icol,5);
1985   if (PetscDefined(USE_DEBUG)) {
1986     PetscCheck(!mat->factortype,PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Not for factored matrix");
1987     PetscCheck(mat->ops->getvalueslocal || mat->ops->getvalues,PETSC_COMM_SELF,PETSC_ERR_SUP,"Mat type %s",((PetscObject)mat)->type_name);
1988   }
1989   PetscCheck(mat->assembled,PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Not for unassembled matrix");
1990   PetscCall(PetscLogEventBegin(MAT_GetValues,mat,0,0,0));
1991   if (mat->ops->getvalueslocal) {
1992     PetscCall((*mat->ops->getvalueslocal)(mat,nrow,irow,ncol,icol,y));
1993   } else {
1994     PetscInt buf[8192],*bufr=NULL,*bufc=NULL,*irowm,*icolm;
1995     if ((nrow+ncol) <= (PetscInt)(sizeof(buf)/sizeof(PetscInt))) {
1996       irowm = buf; icolm = buf+nrow;
1997     } else {
1998       PetscCall(PetscMalloc2(nrow,&bufr,ncol,&bufc));
1999       irowm = bufr; icolm = bufc;
2000     }
2001     PetscCheck(mat->rmap->mapping,PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"MatGetValuesLocal() cannot proceed without local-to-global row mapping (See MatSetLocalToGlobalMapping()).");
2002     PetscCheck(mat->cmap->mapping,PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"MatGetValuesLocal() cannot proceed without local-to-global column mapping (See MatSetLocalToGlobalMapping()).");
2003     PetscCall(ISLocalToGlobalMappingApply(mat->rmap->mapping,nrow,irow,irowm));
2004     PetscCall(ISLocalToGlobalMappingApply(mat->cmap->mapping,ncol,icol,icolm));
2005     PetscCall(MatGetValues(mat,nrow,irowm,ncol,icolm,y));
2006     PetscCall(PetscFree2(bufr,bufc));
2007   }
2008   PetscCall(PetscLogEventEnd(MAT_GetValues,mat,0,0,0));
2009   PetscFunctionReturn(0);
2010 }
2011 
2012 /*@
2013   MatSetValuesBatch - Adds (ADD_VALUES) many blocks of values into a matrix at once. The blocks must all be square and
2014   the same size. Currently, this can only be called once and creates the given matrix.
2015 
2016   Not Collective
2017 
2018   Input Parameters:
2019 + mat - the matrix
2020 . nb - the number of blocks
2021 . bs - the number of rows (and columns) in each block
2022 . rows - a concatenation of the rows for each block
2023 - v - a concatenation of logically two-dimensional arrays of values
2024 
2025   Notes:
2026   In the future, we will extend this routine to handle rectangular blocks, and to allow multiple calls for a given matrix.
2027 
2028   Level: advanced
2029 
2030 .seealso: `MatSetOption()`, `MatAssemblyBegin()`, `MatAssemblyEnd()`, `MatSetValuesBlocked()`, `MatSetValuesLocal()`,
2031           `InsertMode`, `INSERT_VALUES`, `ADD_VALUES`, `MatSetValues()`
2032 @*/
2033 PetscErrorCode MatSetValuesBatch(Mat mat, PetscInt nb, PetscInt bs, PetscInt rows[], const PetscScalar v[])
2034 {
2035   PetscFunctionBegin;
2036   PetscValidHeaderSpecific(mat,MAT_CLASSID,1);
2037   PetscValidType(mat,1);
2038   PetscValidIntPointer(rows,4);
2039   PetscValidScalarPointer(v,5);
2040   PetscAssert(!mat->factortype,PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Not for factored matrix");
2041 
2042   PetscCall(PetscLogEventBegin(MAT_SetValuesBatch,mat,0,0,0));
2043   if (mat->ops->setvaluesbatch) {
2044     PetscCall((*mat->ops->setvaluesbatch)(mat,nb,bs,rows,v));
2045   } else {
2046     for (PetscInt b = 0; b < nb; ++b) PetscCall(MatSetValues(mat, bs, &rows[b*bs], bs, &rows[b*bs], &v[b*bs*bs], ADD_VALUES));
2047   }
2048   PetscCall(PetscLogEventEnd(MAT_SetValuesBatch,mat,0,0,0));
2049   PetscFunctionReturn(0);
2050 }
2051 
2052 /*@
2053    MatSetLocalToGlobalMapping - Sets a local-to-global numbering for use by
2054    the routine MatSetValuesLocal() to allow users to insert matrix entries
2055    using a local (per-processor) numbering.
2056 
2057    Not Collective
2058 
2059    Input Parameters:
2060 +  x - the matrix
2061 .  rmapping - row mapping created with ISLocalToGlobalMappingCreate() or ISLocalToGlobalMappingCreateIS()
2062 -  cmapping - column mapping
2063 
2064    Level: intermediate
2065 
2066 .seealso: `MatAssemblyBegin()`, `MatAssemblyEnd()`, `MatSetValues()`, `MatSetValuesLocal()`, `MatGetValuesLocal()`
2067 @*/
2068 PetscErrorCode MatSetLocalToGlobalMapping(Mat x,ISLocalToGlobalMapping rmapping,ISLocalToGlobalMapping cmapping)
2069 {
2070   PetscFunctionBegin;
2071   PetscValidHeaderSpecific(x,MAT_CLASSID,1);
2072   PetscValidType(x,1);
2073   if (rmapping) PetscValidHeaderSpecific(rmapping,IS_LTOGM_CLASSID,2);
2074   if (cmapping) PetscValidHeaderSpecific(cmapping,IS_LTOGM_CLASSID,3);
2075   if (x->ops->setlocaltoglobalmapping) {
2076     PetscCall((*x->ops->setlocaltoglobalmapping)(x,rmapping,cmapping));
2077   } else {
2078     PetscCall(PetscLayoutSetISLocalToGlobalMapping(x->rmap,rmapping));
2079     PetscCall(PetscLayoutSetISLocalToGlobalMapping(x->cmap,cmapping));
2080   }
2081   PetscFunctionReturn(0);
2082 }
2083 
2084 /*@
2085    MatGetLocalToGlobalMapping - Gets the local-to-global numbering set by MatSetLocalToGlobalMapping()
2086 
2087    Not Collective
2088 
2089    Input Parameter:
2090 .  A - the matrix
2091 
2092    Output Parameters:
2093 + rmapping - row mapping
2094 - cmapping - column mapping
2095 
2096    Level: advanced
2097 
2098 .seealso: `MatSetValuesLocal()`
2099 @*/
2100 PetscErrorCode MatGetLocalToGlobalMapping(Mat A,ISLocalToGlobalMapping *rmapping,ISLocalToGlobalMapping *cmapping)
2101 {
2102   PetscFunctionBegin;
2103   PetscValidHeaderSpecific(A,MAT_CLASSID,1);
2104   PetscValidType(A,1);
2105   if (rmapping) {
2106     PetscValidPointer(rmapping,2);
2107     *rmapping = A->rmap->mapping;
2108   }
2109   if (cmapping) {
2110     PetscValidPointer(cmapping,3);
2111     *cmapping = A->cmap->mapping;
2112   }
2113   PetscFunctionReturn(0);
2114 }
2115 
2116 /*@
2117    MatSetLayouts - Sets the PetscLayout objects for rows and columns of a matrix
2118 
2119    Logically Collective on A
2120 
2121    Input Parameters:
2122 +  A - the matrix
2123 . rmap - row layout
2124 - cmap - column layout
2125 
2126    Level: advanced
2127 
2128 .seealso: `MatCreateVecs()`, `MatGetLocalToGlobalMapping()`, `MatGetLayouts()`
2129 @*/
2130 PetscErrorCode MatSetLayouts(Mat A,PetscLayout rmap,PetscLayout cmap)
2131 {
2132   PetscFunctionBegin;
2133   PetscValidHeaderSpecific(A,MAT_CLASSID,1);
2134   PetscCall(PetscLayoutReference(rmap,&A->rmap));
2135   PetscCall(PetscLayoutReference(cmap,&A->cmap));
2136   PetscFunctionReturn(0);
2137 }
2138 
2139 /*@
2140    MatGetLayouts - Gets the PetscLayout objects for rows and columns
2141 
2142    Not Collective
2143 
2144    Input Parameter:
2145 .  A - the matrix
2146 
2147    Output Parameters:
2148 + rmap - row layout
2149 - cmap - column layout
2150 
2151    Level: advanced
2152 
2153 .seealso: `MatCreateVecs()`, `MatGetLocalToGlobalMapping()`, `MatSetLayouts()`
2154 @*/
2155 PetscErrorCode MatGetLayouts(Mat A,PetscLayout *rmap,PetscLayout *cmap)
2156 {
2157   PetscFunctionBegin;
2158   PetscValidHeaderSpecific(A,MAT_CLASSID,1);
2159   PetscValidType(A,1);
2160   if (rmap) {
2161     PetscValidPointer(rmap,2);
2162     *rmap = A->rmap;
2163   }
2164   if (cmap) {
2165     PetscValidPointer(cmap,3);
2166     *cmap = A->cmap;
2167   }
2168   PetscFunctionReturn(0);
2169 }
2170 
2171 /*@C
2172    MatSetValuesLocal - Inserts or adds values into certain locations of a matrix,
2173    using a local numbering of the nodes.
2174 
2175    Not Collective
2176 
2177    Input Parameters:
2178 +  mat - the matrix
2179 .  nrow, irow - number of rows and their local indices
2180 .  ncol, icol - number of columns and their local indices
2181 .  y -  a logically two-dimensional array of values
2182 -  addv - either INSERT_VALUES or ADD_VALUES, where
2183    ADD_VALUES adds values to any existing entries, and
2184    INSERT_VALUES replaces existing entries with new values
2185 
2186    Notes:
2187    If you create the matrix yourself (that is not with a call to DMCreateMatrix()) then you MUST call MatXXXXSetPreallocation() or
2188       MatSetUp() before using this routine
2189 
2190    If you create the matrix yourself (that is not with a call to DMCreateMatrix()) then you MUST call MatSetLocalToGlobalMapping() before using this routine
2191 
2192    Calls to MatSetValuesLocal() with the INSERT_VALUES and ADD_VALUES
2193    options cannot be mixed without intervening calls to the assembly
2194    routines.
2195 
2196    These values may be cached, so MatAssemblyBegin() and MatAssemblyEnd()
2197    MUST be called after all calls to MatSetValuesLocal() have been completed.
2198 
2199    Level: intermediate
2200 
2201    Developer Notes:
2202     This is labeled with C so does not automatically generate Fortran stubs and interfaces
2203                     because it requires multiple Fortran interfaces depending on which arguments are scalar or arrays.
2204 
2205 .seealso: `MatAssemblyBegin()`, `MatAssemblyEnd()`, `MatSetValues()`, `MatSetLocalToGlobalMapping()`,
2206           `MatSetValueLocal()`, `MatGetValuesLocal()`
2207 @*/
2208 PetscErrorCode MatSetValuesLocal(Mat mat,PetscInt nrow,const PetscInt irow[],PetscInt ncol,const PetscInt icol[],const PetscScalar y[],InsertMode addv)
2209 {
2210   PetscFunctionBeginHot;
2211   PetscValidHeaderSpecific(mat,MAT_CLASSID,1);
2212   PetscValidType(mat,1);
2213   MatCheckPreallocated(mat,1);
2214   if (!nrow || !ncol) PetscFunctionReturn(0); /* no values to insert */
2215   PetscValidIntPointer(irow,3);
2216   PetscValidIntPointer(icol,5);
2217   if (mat->insertmode == NOT_SET_VALUES) mat->insertmode = addv;
2218   else PetscCheck(mat->insertmode == addv,PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Cannot mix add values and insert values");
2219   if (PetscDefined(USE_DEBUG)) {
2220     PetscCheck(!mat->factortype,PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Not for factored matrix");
2221     PetscCheck(mat->ops->setvalueslocal || mat->ops->setvalues,PETSC_COMM_SELF,PETSC_ERR_SUP,"Mat type %s",((PetscObject)mat)->type_name);
2222   }
2223 
2224   if (mat->assembled) {
2225     mat->was_assembled = PETSC_TRUE;
2226     mat->assembled     = PETSC_FALSE;
2227   }
2228   PetscCall(PetscLogEventBegin(MAT_SetValues,mat,0,0,0));
2229   if (mat->ops->setvalueslocal) {
2230     PetscCall((*mat->ops->setvalueslocal)(mat,nrow,irow,ncol,icol,y,addv));
2231   } else {
2232     PetscInt       buf[8192],*bufr=NULL,*bufc=NULL;
2233     const PetscInt *irowm,*icolm;
2234 
2235     if ((!mat->rmap->mapping && !mat->cmap->mapping) || (nrow+ncol) <= (PetscInt)(sizeof(buf)/sizeof(PetscInt))) {
2236       bufr  = buf;
2237       bufc  = buf + nrow;
2238       irowm = bufr;
2239       icolm = bufc;
2240     } else {
2241       PetscCall(PetscMalloc2(nrow,&bufr,ncol,&bufc));
2242       irowm = bufr;
2243       icolm = bufc;
2244     }
2245     if (mat->rmap->mapping) PetscCall(ISLocalToGlobalMappingApply(mat->rmap->mapping,nrow,irow,bufr));
2246     else irowm = irow;
2247     if (mat->cmap->mapping) {
2248       if (mat->cmap->mapping != mat->rmap->mapping || ncol != nrow || icol != irow) {
2249         PetscCall(ISLocalToGlobalMappingApply(mat->cmap->mapping,ncol,icol,bufc));
2250       } else icolm = irowm;
2251     } else icolm = icol;
2252     PetscCall(MatSetValues(mat,nrow,irowm,ncol,icolm,y,addv));
2253     if (bufr != buf) PetscCall(PetscFree2(bufr,bufc));
2254   }
2255   PetscCall(PetscLogEventEnd(MAT_SetValues,mat,0,0,0));
2256   PetscFunctionReturn(0);
2257 }
2258 
2259 /*@C
2260    MatSetValuesBlockedLocal - Inserts or adds values into certain locations of a matrix,
2261    using a local ordering of the nodes a block at a time.
2262 
2263    Not Collective
2264 
2265    Input Parameters:
2266 +  x - the matrix
2267 .  nrow, irow - number of rows and their local indices
2268 .  ncol, icol - number of columns and their local indices
2269 .  y -  a logically two-dimensional array of values
2270 -  addv - either INSERT_VALUES or ADD_VALUES, where
2271    ADD_VALUES adds values to any existing entries, and
2272    INSERT_VALUES replaces existing entries with new values
2273 
2274    Notes:
2275    If you create the matrix yourself (that is not with a call to DMCreateMatrix()) then you MUST call MatXXXXSetPreallocation() or
2276       MatSetUp() before using this routine
2277 
2278    If you create the matrix yourself (that is not with a call to DMCreateMatrix()) then you MUST call MatSetBlockSize() and MatSetLocalToGlobalMapping()
2279       before using this routineBefore calling MatSetValuesLocal(), the user must first set the
2280 
2281    Calls to MatSetValuesBlockedLocal() with the INSERT_VALUES and ADD_VALUES
2282    options cannot be mixed without intervening calls to the assembly
2283    routines.
2284 
2285    These values may be cached, so MatAssemblyBegin() and MatAssemblyEnd()
2286    MUST be called after all calls to MatSetValuesBlockedLocal() have been completed.
2287 
2288    Level: intermediate
2289 
2290    Developer Notes:
2291     This is labeled with C so does not automatically generate Fortran stubs and interfaces
2292                     because it requires multiple Fortran interfaces depending on which arguments are scalar or arrays.
2293 
2294 .seealso: `MatSetBlockSize()`, `MatSetLocalToGlobalMapping()`, `MatAssemblyBegin()`, `MatAssemblyEnd()`,
2295           `MatSetValuesLocal()`, `MatSetValuesBlocked()`
2296 @*/
2297 PetscErrorCode MatSetValuesBlockedLocal(Mat mat,PetscInt nrow,const PetscInt irow[],PetscInt ncol,const PetscInt icol[],const PetscScalar y[],InsertMode addv)
2298 {
2299   PetscFunctionBeginHot;
2300   PetscValidHeaderSpecific(mat,MAT_CLASSID,1);
2301   PetscValidType(mat,1);
2302   MatCheckPreallocated(mat,1);
2303   if (!nrow || !ncol) PetscFunctionReturn(0); /* no values to insert */
2304   PetscValidIntPointer(irow,3);
2305   PetscValidIntPointer(icol,5);
2306   if (mat->insertmode == NOT_SET_VALUES) mat->insertmode = addv;
2307   else PetscCheck(mat->insertmode == addv,PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Cannot mix add values and insert values");
2308   if (PetscDefined(USE_DEBUG)) {
2309     PetscCheck(!mat->factortype,PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Not for factored matrix");
2310     PetscCheck(mat->ops->setvaluesblockedlocal || mat->ops->setvaluesblocked || mat->ops->setvalueslocal || mat->ops->setvalues,PETSC_COMM_SELF,PETSC_ERR_SUP,"Mat type %s",((PetscObject)mat)->type_name);
2311   }
2312 
2313   if (mat->assembled) {
2314     mat->was_assembled = PETSC_TRUE;
2315     mat->assembled     = PETSC_FALSE;
2316   }
2317   if (PetscUnlikelyDebug(mat->rmap->mapping)) { /* Condition on the mapping existing, because MatSetValuesBlockedLocal_IS does not require it to be set. */
2318     PetscInt irbs, rbs;
2319     PetscCall(MatGetBlockSizes(mat, &rbs, NULL));
2320     PetscCall(ISLocalToGlobalMappingGetBlockSize(mat->rmap->mapping,&irbs));
2321     PetscCheck(rbs == irbs,PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Different row block sizes! mat %" PetscInt_FMT ", row l2g map %" PetscInt_FMT,rbs,irbs);
2322   }
2323   if (PetscUnlikelyDebug(mat->cmap->mapping)) {
2324     PetscInt icbs, cbs;
2325     PetscCall(MatGetBlockSizes(mat,NULL,&cbs));
2326     PetscCall(ISLocalToGlobalMappingGetBlockSize(mat->cmap->mapping,&icbs));
2327     PetscCheck(cbs == icbs,PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Different col block sizes! mat %" PetscInt_FMT ", col l2g map %" PetscInt_FMT,cbs,icbs);
2328   }
2329   PetscCall(PetscLogEventBegin(MAT_SetValues,mat,0,0,0));
2330   if (mat->ops->setvaluesblockedlocal) {
2331     PetscCall((*mat->ops->setvaluesblockedlocal)(mat,nrow,irow,ncol,icol,y,addv));
2332   } else {
2333     PetscInt       buf[8192],*bufr=NULL,*bufc=NULL;
2334     const PetscInt *irowm,*icolm;
2335 
2336     if ((!mat->rmap->mapping && !mat->cmap->mapping) || (nrow+ncol) <= (PetscInt)(sizeof(buf)/sizeof(PetscInt))) {
2337       bufr  = buf;
2338       bufc  = buf + nrow;
2339       irowm = bufr;
2340       icolm = bufc;
2341     } else {
2342       PetscCall(PetscMalloc2(nrow,&bufr,ncol,&bufc));
2343       irowm = bufr;
2344       icolm = bufc;
2345     }
2346     if (mat->rmap->mapping) PetscCall(ISLocalToGlobalMappingApplyBlock(mat->rmap->mapping,nrow,irow,bufr));
2347     else irowm = irow;
2348     if (mat->cmap->mapping) {
2349       if (mat->cmap->mapping != mat->rmap->mapping || ncol != nrow || icol != irow) {
2350         PetscCall(ISLocalToGlobalMappingApplyBlock(mat->cmap->mapping,ncol,icol,bufc));
2351       } else icolm = irowm;
2352     } else icolm = icol;
2353     PetscCall(MatSetValuesBlocked(mat,nrow,irowm,ncol,icolm,y,addv));
2354     if (bufr != buf) PetscCall(PetscFree2(bufr,bufc));
2355   }
2356   PetscCall(PetscLogEventEnd(MAT_SetValues,mat,0,0,0));
2357   PetscFunctionReturn(0);
2358 }
2359 
2360 /*@
2361    MatMultDiagonalBlock - Computes the matrix-vector product, y = Dx. Where D is defined by the inode or block structure of the diagonal
2362 
2363    Collective on Mat
2364 
2365    Input Parameters:
2366 +  mat - the matrix
2367 -  x   - the vector to be multiplied
2368 
2369    Output Parameters:
2370 .  y - the result
2371 
2372    Notes:
2373    The vectors x and y cannot be the same.  I.e., one cannot
2374    call MatMult(A,y,y).
2375 
2376    Level: developer
2377 
2378 .seealso: `MatMultTranspose()`, `MatMultAdd()`, `MatMultTransposeAdd()`
2379 @*/
2380 PetscErrorCode MatMultDiagonalBlock(Mat mat,Vec x,Vec y)
2381 {
2382   PetscFunctionBegin;
2383   PetscValidHeaderSpecific(mat,MAT_CLASSID,1);
2384   PetscValidType(mat,1);
2385   PetscValidHeaderSpecific(x,VEC_CLASSID,2);
2386   PetscValidHeaderSpecific(y,VEC_CLASSID,3);
2387 
2388   PetscCheck(mat->assembled,PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Not for unassembled matrix");
2389   PetscCheck(!mat->factortype,PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Not for factored matrix");
2390   PetscCheck(x != y,PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"x and y must be different vectors");
2391   MatCheckPreallocated(mat,1);
2392 
2393   PetscCheck(mat->ops->multdiagonalblock,PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Matrix type %s does not have a multiply defined",((PetscObject)mat)->type_name);
2394   PetscCall((*mat->ops->multdiagonalblock)(mat,x,y));
2395   PetscCall(PetscObjectStateIncrease((PetscObject)y));
2396   PetscFunctionReturn(0);
2397 }
2398 
2399 /* --------------------------------------------------------*/
2400 /*@
2401    MatMult - Computes the matrix-vector product, y = Ax.
2402 
2403    Neighbor-wise Collective on Mat
2404 
2405    Input Parameters:
2406 +  mat - the matrix
2407 -  x   - the vector to be multiplied
2408 
2409    Output Parameters:
2410 .  y - the result
2411 
2412    Notes:
2413    The vectors x and y cannot be the same.  I.e., one cannot
2414    call MatMult(A,y,y).
2415 
2416    Level: beginner
2417 
2418 .seealso: `MatMultTranspose()`, `MatMultAdd()`, `MatMultTransposeAdd()`
2419 @*/
2420 PetscErrorCode MatMult(Mat mat,Vec x,Vec y)
2421 {
2422   PetscFunctionBegin;
2423   PetscValidHeaderSpecific(mat,MAT_CLASSID,1);
2424   PetscValidType(mat,1);
2425   PetscValidHeaderSpecific(x,VEC_CLASSID,2);
2426   PetscValidHeaderSpecific(y,VEC_CLASSID,3);
2427   PetscCheck(mat->assembled,PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Not for unassembled matrix");
2428   PetscCheck(!mat->factortype,PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Not for factored matrix");
2429   PetscCheck(x != y,PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"x and y must be different vectors");
2430   PetscCheck(mat->cmap->N == x->map->N,PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_SIZ,"Mat mat,Vec x: global dim %" PetscInt_FMT " %" PetscInt_FMT,mat->cmap->N,x->map->N);
2431   PetscCheck(mat->rmap->N == y->map->N,PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_SIZ,"Mat mat,Vec y: global dim %" PetscInt_FMT " %" PetscInt_FMT,mat->rmap->N,y->map->N);
2432   PetscCheck(mat->cmap->n == x->map->n,PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Mat mat,Vec x: local dim %" PetscInt_FMT " %" PetscInt_FMT,mat->cmap->n,x->map->n);
2433   PetscCheck(mat->rmap->n == y->map->n,PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Mat mat,Vec y: local dim %" PetscInt_FMT " %" PetscInt_FMT,mat->rmap->n,y->map->n);
2434   PetscCall(VecSetErrorIfLocked(y,3));
2435   if (mat->erroriffailure) PetscCall(VecValidValues(x,2,PETSC_TRUE));
2436   MatCheckPreallocated(mat,1);
2437 
2438   PetscCall(VecLockReadPush(x));
2439   PetscCheck(mat->ops->mult,PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Matrix type %s does not have a multiply defined",((PetscObject)mat)->type_name);
2440   PetscCall(PetscLogEventBegin(MAT_Mult,mat,x,y,0));
2441   PetscCall((*mat->ops->mult)(mat,x,y));
2442   PetscCall(PetscLogEventEnd(MAT_Mult,mat,x,y,0));
2443   if (mat->erroriffailure) PetscCall(VecValidValues(y,3,PETSC_FALSE));
2444   PetscCall(VecLockReadPop(x));
2445   PetscFunctionReturn(0);
2446 }
2447 
2448 /*@
2449    MatMultTranspose - Computes matrix transpose times a vector y = A^T * x.
2450 
2451    Neighbor-wise Collective on Mat
2452 
2453    Input Parameters:
2454 +  mat - the matrix
2455 -  x   - the vector to be multiplied
2456 
2457    Output Parameters:
2458 .  y - the result
2459 
2460    Notes:
2461    The vectors x and y cannot be the same.  I.e., one cannot
2462    call MatMultTranspose(A,y,y).
2463 
2464    For complex numbers this does NOT compute the Hermitian (complex conjugate) transpose multiple,
2465    use MatMultHermitianTranspose()
2466 
2467    Level: beginner
2468 
2469 .seealso: `MatMult()`, `MatMultAdd()`, `MatMultTransposeAdd()`, `MatMultHermitianTranspose()`, `MatTranspose()`
2470 @*/
2471 PetscErrorCode MatMultTranspose(Mat mat,Vec x,Vec y)
2472 {
2473   PetscErrorCode (*op)(Mat,Vec,Vec) = NULL;
2474 
2475   PetscFunctionBegin;
2476   PetscValidHeaderSpecific(mat,MAT_CLASSID,1);
2477   PetscValidType(mat,1);
2478   PetscValidHeaderSpecific(x,VEC_CLASSID,2);
2479   PetscValidHeaderSpecific(y,VEC_CLASSID,3);
2480 
2481   PetscCheck(mat->assembled,PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Not for unassembled matrix");
2482   PetscCheck(!mat->factortype,PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Not for factored matrix");
2483   PetscCheck(x != y,PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"x and y must be different vectors");
2484   PetscCheck(mat->cmap->N == y->map->N,PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_SIZ,"Mat mat,Vec y: global dim %" PetscInt_FMT " %" PetscInt_FMT,mat->cmap->N,y->map->N);
2485   PetscCheck(mat->rmap->N == x->map->N,PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_SIZ,"Mat mat,Vec x: global dim %" PetscInt_FMT " %" PetscInt_FMT,mat->rmap->N,x->map->N);
2486   PetscCheck(mat->cmap->n == y->map->n,PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Mat mat,Vec y: local dim %" PetscInt_FMT " %" PetscInt_FMT,mat->cmap->n,y->map->n);
2487   PetscCheck(mat->rmap->n == x->map->n,PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Mat mat,Vec x: local dim %" PetscInt_FMT " %" PetscInt_FMT,mat->rmap->n,x->map->n);
2488   if (mat->erroriffailure) PetscCall(VecValidValues(x,2,PETSC_TRUE));
2489   MatCheckPreallocated(mat,1);
2490 
2491   if (!mat->ops->multtranspose) {
2492     if (mat->symmetric && mat->ops->mult) op = mat->ops->mult;
2493     PetscCheck(op,PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Matrix type %s does not have a multiply transpose defined or is symmetric and does not have a multiply defined",((PetscObject)mat)->type_name);
2494   } else op = mat->ops->multtranspose;
2495   PetscCall(PetscLogEventBegin(MAT_MultTranspose,mat,x,y,0));
2496   PetscCall(VecLockReadPush(x));
2497   PetscCall((*op)(mat,x,y));
2498   PetscCall(VecLockReadPop(x));
2499   PetscCall(PetscLogEventEnd(MAT_MultTranspose,mat,x,y,0));
2500   PetscCall(PetscObjectStateIncrease((PetscObject)y));
2501   if (mat->erroriffailure) PetscCall(VecValidValues(y,3,PETSC_FALSE));
2502   PetscFunctionReturn(0);
2503 }
2504 
2505 /*@
2506    MatMultHermitianTranspose - Computes matrix Hermitian transpose times a vector.
2507 
2508    Neighbor-wise Collective on Mat
2509 
2510    Input Parameters:
2511 +  mat - the matrix
2512 -  x   - the vector to be multilplied
2513 
2514    Output Parameters:
2515 .  y - the result
2516 
2517    Notes:
2518    The vectors x and y cannot be the same.  I.e., one cannot
2519    call MatMultHermitianTranspose(A,y,y).
2520 
2521    Also called the conjugate transpose, complex conjugate transpose, or adjoint.
2522 
2523    For real numbers MatMultTranspose() and MatMultHermitianTranspose() are identical.
2524 
2525    Level: beginner
2526 
2527 .seealso: `MatMult()`, `MatMultAdd()`, `MatMultHermitianTransposeAdd()`, `MatMultTranspose()`
2528 @*/
2529 PetscErrorCode MatMultHermitianTranspose(Mat mat,Vec x,Vec y)
2530 {
2531   PetscFunctionBegin;
2532   PetscValidHeaderSpecific(mat,MAT_CLASSID,1);
2533   PetscValidType(mat,1);
2534   PetscValidHeaderSpecific(x,VEC_CLASSID,2);
2535   PetscValidHeaderSpecific(y,VEC_CLASSID,3);
2536 
2537   PetscCheck(mat->assembled,PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Not for unassembled matrix");
2538   PetscCheck(!mat->factortype,PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Not for factored matrix");
2539   PetscCheck(x != y,PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"x and y must be different vectors");
2540   PetscCheck(mat->cmap->N == y->map->N,PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_SIZ,"Mat mat,Vec y: global dim %" PetscInt_FMT " %" PetscInt_FMT,mat->cmap->N,y->map->N);
2541   PetscCheck(mat->rmap->N == x->map->N,PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_SIZ,"Mat mat,Vec x: global dim %" PetscInt_FMT " %" PetscInt_FMT,mat->rmap->N,x->map->N);
2542   PetscCheck(mat->cmap->n == y->map->n,PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Mat mat,Vec y: local dim %" PetscInt_FMT " %" PetscInt_FMT,mat->cmap->n,y->map->n);
2543   PetscCheck(mat->rmap->n == x->map->n,PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Mat mat,Vec x: local dim %" PetscInt_FMT " %" PetscInt_FMT,mat->rmap->n,x->map->n);
2544   MatCheckPreallocated(mat,1);
2545 
2546   PetscCall(PetscLogEventBegin(MAT_MultHermitianTranspose,mat,x,y,0));
2547 #if defined(PETSC_USE_COMPLEX)
2548   if (mat->ops->multhermitiantranspose || (mat->hermitian && mat->ops->mult)) {
2549     PetscCall(VecLockReadPush(x));
2550     if (mat->ops->multhermitiantranspose) {
2551       PetscCall((*mat->ops->multhermitiantranspose)(mat,x,y));
2552     } else {
2553       PetscCall((*mat->ops->mult)(mat,x,y));
2554     }
2555     PetscCall(VecLockReadPop(x));
2556   } else {
2557     Vec w;
2558     PetscCall(VecDuplicate(x,&w));
2559     PetscCall(VecCopy(x,w));
2560     PetscCall(VecConjugate(w));
2561     PetscCall(MatMultTranspose(mat,w,y));
2562     PetscCall(VecDestroy(&w));
2563     PetscCall(VecConjugate(y));
2564   }
2565   PetscCall(PetscObjectStateIncrease((PetscObject)y));
2566 #else
2567   PetscCall(MatMultTranspose(mat,x,y));
2568 #endif
2569   PetscCall(PetscLogEventEnd(MAT_MultHermitianTranspose,mat,x,y,0));
2570   PetscFunctionReturn(0);
2571 }
2572 
2573 /*@
2574     MatMultAdd -  Computes v3 = v2 + A * v1.
2575 
2576     Neighbor-wise Collective on Mat
2577 
2578     Input Parameters:
2579 +   mat - the matrix
2580 -   v1, v2 - the vectors
2581 
2582     Output Parameters:
2583 .   v3 - the result
2584 
2585     Notes:
2586     The vectors v1 and v3 cannot be the same.  I.e., one cannot
2587     call MatMultAdd(A,v1,v2,v1).
2588 
2589     Level: beginner
2590 
2591 .seealso: `MatMultTranspose()`, `MatMult()`, `MatMultTransposeAdd()`
2592 @*/
2593 PetscErrorCode MatMultAdd(Mat mat,Vec v1,Vec v2,Vec v3)
2594 {
2595   PetscFunctionBegin;
2596   PetscValidHeaderSpecific(mat,MAT_CLASSID,1);
2597   PetscValidType(mat,1);
2598   PetscValidHeaderSpecific(v1,VEC_CLASSID,2);
2599   PetscValidHeaderSpecific(v2,VEC_CLASSID,3);
2600   PetscValidHeaderSpecific(v3,VEC_CLASSID,4);
2601 
2602   PetscCheck(mat->assembled,PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Not for unassembled matrix");
2603   PetscCheck(!mat->factortype,PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Not for factored matrix");
2604   PetscCheck(mat->cmap->N == v1->map->N,PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_SIZ,"Mat mat,Vec v1: global dim %" PetscInt_FMT " %" PetscInt_FMT,mat->cmap->N,v1->map->N);
2605   /* PetscCheck(mat->rmap->N == v2->map->N,PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Mat mat,Vec v2: global dim %" PetscInt_FMT " %" PetscInt_FMT,mat->rmap->N,v2->map->N);
2606      PetscCheck(mat->rmap->N == v3->map->N,PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Mat mat,Vec v3: global dim %" PetscInt_FMT " %" PetscInt_FMT,mat->rmap->N,v3->map->N); */
2607   PetscCheck(mat->rmap->n == v3->map->n,PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Mat mat,Vec v3: local dim %" PetscInt_FMT " %" PetscInt_FMT,mat->rmap->n,v3->map->n);
2608   PetscCheck(mat->rmap->n == v2->map->n,PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Mat mat,Vec v2: local dim %" PetscInt_FMT " %" PetscInt_FMT,mat->rmap->n,v2->map->n);
2609   PetscCheck(v1 != v3,PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_IDN,"v1 and v3 must be different vectors");
2610   MatCheckPreallocated(mat,1);
2611 
2612   PetscCheck(mat->ops->multadd,PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"No MatMultAdd() for matrix type %s",((PetscObject)mat)->type_name);
2613   PetscCall(PetscLogEventBegin(MAT_MultAdd,mat,v1,v2,v3));
2614   PetscCall(VecLockReadPush(v1));
2615   PetscCall((*mat->ops->multadd)(mat,v1,v2,v3));
2616   PetscCall(VecLockReadPop(v1));
2617   PetscCall(PetscLogEventEnd(MAT_MultAdd,mat,v1,v2,v3));
2618   PetscCall(PetscObjectStateIncrease((PetscObject)v3));
2619   PetscFunctionReturn(0);
2620 }
2621 
2622 /*@
2623    MatMultTransposeAdd - Computes v3 = v2 + A' * v1.
2624 
2625    Neighbor-wise Collective on Mat
2626 
2627    Input Parameters:
2628 +  mat - the matrix
2629 -  v1, v2 - the vectors
2630 
2631    Output Parameters:
2632 .  v3 - the result
2633 
2634    Notes:
2635    The vectors v1 and v3 cannot be the same.  I.e., one cannot
2636    call MatMultTransposeAdd(A,v1,v2,v1).
2637 
2638    Level: beginner
2639 
2640 .seealso: `MatMultTranspose()`, `MatMultAdd()`, `MatMult()`
2641 @*/
2642 PetscErrorCode MatMultTransposeAdd(Mat mat,Vec v1,Vec v2,Vec v3)
2643 {
2644   PetscErrorCode (*op)(Mat,Vec,Vec,Vec) = (!mat->ops->multtransposeadd && mat->symmetric) ? mat->ops->multadd : mat->ops->multtransposeadd;
2645 
2646   PetscFunctionBegin;
2647   PetscValidHeaderSpecific(mat,MAT_CLASSID,1);
2648   PetscValidType(mat,1);
2649   PetscValidHeaderSpecific(v1,VEC_CLASSID,2);
2650   PetscValidHeaderSpecific(v2,VEC_CLASSID,3);
2651   PetscValidHeaderSpecific(v3,VEC_CLASSID,4);
2652 
2653   PetscCheck(mat->assembled,PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Not for unassembled matrix");
2654   PetscCheck(!mat->factortype,PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Not for factored matrix");
2655   PetscCheck(mat->rmap->N == v1->map->N,PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_SIZ,"Mat mat,Vec v1: global dim %" PetscInt_FMT " %" PetscInt_FMT,mat->rmap->N,v1->map->N);
2656   PetscCheck(mat->cmap->N == v2->map->N,PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_SIZ,"Mat mat,Vec v2: global dim %" PetscInt_FMT " %" PetscInt_FMT,mat->cmap->N,v2->map->N);
2657   PetscCheck(mat->cmap->N == v3->map->N,PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_SIZ,"Mat mat,Vec v3: global dim %" PetscInt_FMT " %" PetscInt_FMT,mat->cmap->N,v3->map->N);
2658   PetscCheck(v1 != v3,PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_IDN,"v1 and v3 must be different vectors");
2659   PetscCheck(op,PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Mat type %s",((PetscObject)mat)->type_name);
2660   MatCheckPreallocated(mat,1);
2661 
2662   PetscCall(PetscLogEventBegin(MAT_MultTransposeAdd,mat,v1,v2,v3));
2663   PetscCall(VecLockReadPush(v1));
2664   PetscCall((*op)(mat,v1,v2,v3));
2665   PetscCall(VecLockReadPop(v1));
2666   PetscCall(PetscLogEventEnd(MAT_MultTransposeAdd,mat,v1,v2,v3));
2667   PetscCall(PetscObjectStateIncrease((PetscObject)v3));
2668   PetscFunctionReturn(0);
2669 }
2670 
2671 /*@
2672    MatMultHermitianTransposeAdd - Computes v3 = v2 + A^H * v1.
2673 
2674    Neighbor-wise Collective on Mat
2675 
2676    Input Parameters:
2677 +  mat - the matrix
2678 -  v1, v2 - the vectors
2679 
2680    Output Parameters:
2681 .  v3 - the result
2682 
2683    Notes:
2684    The vectors v1 and v3 cannot be the same.  I.e., one cannot
2685    call MatMultHermitianTransposeAdd(A,v1,v2,v1).
2686 
2687    Level: beginner
2688 
2689 .seealso: `MatMultHermitianTranspose()`, `MatMultTranspose()`, `MatMultAdd()`, `MatMult()`
2690 @*/
2691 PetscErrorCode MatMultHermitianTransposeAdd(Mat mat,Vec v1,Vec v2,Vec v3)
2692 {
2693   PetscFunctionBegin;
2694   PetscValidHeaderSpecific(mat,MAT_CLASSID,1);
2695   PetscValidType(mat,1);
2696   PetscValidHeaderSpecific(v1,VEC_CLASSID,2);
2697   PetscValidHeaderSpecific(v2,VEC_CLASSID,3);
2698   PetscValidHeaderSpecific(v3,VEC_CLASSID,4);
2699 
2700   PetscCheck(mat->assembled,PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Not for unassembled matrix");
2701   PetscCheck(!mat->factortype,PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Not for factored matrix");
2702   PetscCheck(v1 != v3,PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_IDN,"v1 and v3 must be different vectors");
2703   PetscCheck(mat->rmap->N == v1->map->N,PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_SIZ,"Mat mat,Vec v1: global dim %" PetscInt_FMT " %" PetscInt_FMT,mat->rmap->N,v1->map->N);
2704   PetscCheck(mat->cmap->N == v2->map->N,PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_SIZ,"Mat mat,Vec v2: global dim %" PetscInt_FMT " %" PetscInt_FMT,mat->cmap->N,v2->map->N);
2705   PetscCheck(mat->cmap->N == v3->map->N,PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_SIZ,"Mat mat,Vec v3: global dim %" PetscInt_FMT " %" PetscInt_FMT,mat->cmap->N,v3->map->N);
2706   MatCheckPreallocated(mat,1);
2707 
2708   PetscCall(PetscLogEventBegin(MAT_MultHermitianTransposeAdd,mat,v1,v2,v3));
2709   PetscCall(VecLockReadPush(v1));
2710   if (mat->ops->multhermitiantransposeadd) {
2711     PetscCall((*mat->ops->multhermitiantransposeadd)(mat,v1,v2,v3));
2712   } else {
2713     Vec w,z;
2714     PetscCall(VecDuplicate(v1,&w));
2715     PetscCall(VecCopy(v1,w));
2716     PetscCall(VecConjugate(w));
2717     PetscCall(VecDuplicate(v3,&z));
2718     PetscCall(MatMultTranspose(mat,w,z));
2719     PetscCall(VecDestroy(&w));
2720     PetscCall(VecConjugate(z));
2721     if (v2 != v3) {
2722       PetscCall(VecWAXPY(v3,1.0,v2,z));
2723     } else {
2724       PetscCall(VecAXPY(v3,1.0,z));
2725     }
2726     PetscCall(VecDestroy(&z));
2727   }
2728   PetscCall(VecLockReadPop(v1));
2729   PetscCall(PetscLogEventEnd(MAT_MultHermitianTransposeAdd,mat,v1,v2,v3));
2730   PetscCall(PetscObjectStateIncrease((PetscObject)v3));
2731   PetscFunctionReturn(0);
2732 }
2733 
2734 /*@C
2735    MatGetFactorType - gets the type of factorization it is
2736 
2737    Not Collective
2738 
2739    Input Parameters:
2740 .  mat - the matrix
2741 
2742    Output Parameters:
2743 .  t - the type, one of MAT_FACTOR_NONE, MAT_FACTOR_LU, MAT_FACTOR_CHOLESKY, MAT_FACTOR_ILU, MAT_FACTOR_ICC,MAT_FACTOR_ILUDT
2744 
2745    Level: intermediate
2746 
2747 .seealso: `MatFactorType`, `MatGetFactor()`, `MatSetFactorType()`
2748 @*/
2749 PetscErrorCode MatGetFactorType(Mat mat,MatFactorType *t)
2750 {
2751   PetscFunctionBegin;
2752   PetscValidHeaderSpecific(mat,MAT_CLASSID,1);
2753   PetscValidType(mat,1);
2754   PetscValidPointer(t,2);
2755   *t = mat->factortype;
2756   PetscFunctionReturn(0);
2757 }
2758 
2759 /*@C
2760    MatSetFactorType - sets the type of factorization it is
2761 
2762    Logically Collective on Mat
2763 
2764    Input Parameters:
2765 +  mat - the matrix
2766 -  t - the type, one of MAT_FACTOR_NONE, MAT_FACTOR_LU, MAT_FACTOR_CHOLESKY, MAT_FACTOR_ILU, MAT_FACTOR_ICC,MAT_FACTOR_ILUDT
2767 
2768    Level: intermediate
2769 
2770 .seealso: `MatFactorType`, `MatGetFactor()`, `MatGetFactorType()`
2771 @*/
2772 PetscErrorCode MatSetFactorType(Mat mat, MatFactorType t)
2773 {
2774   PetscFunctionBegin;
2775   PetscValidHeaderSpecific(mat,MAT_CLASSID,1);
2776   PetscValidType(mat,1);
2777   mat->factortype = t;
2778   PetscFunctionReturn(0);
2779 }
2780 
2781 /* ------------------------------------------------------------*/
2782 /*@C
2783    MatGetInfo - Returns information about matrix storage (number of
2784    nonzeros, memory, etc.).
2785 
2786    Collective on Mat if MAT_GLOBAL_MAX or MAT_GLOBAL_SUM is used as the flag
2787 
2788    Input Parameter:
2789 .  mat - the matrix
2790 
2791    Output Parameters:
2792 +  flag - flag indicating the type of parameters to be returned
2793    (MAT_LOCAL - local matrix, MAT_GLOBAL_MAX - maximum over all processors,
2794    MAT_GLOBAL_SUM - sum over all processors)
2795 -  info - matrix information context
2796 
2797    Notes:
2798    The MatInfo context contains a variety of matrix data, including
2799    number of nonzeros allocated and used, number of mallocs during
2800    matrix assembly, etc.  Additional information for factored matrices
2801    is provided (such as the fill ratio, number of mallocs during
2802    factorization, etc.).  Much of this info is printed to PETSC_STDOUT
2803    when using the runtime options
2804 $       -info -mat_view ::ascii_info
2805 
2806    Example for C/C++ Users:
2807    See the file ${PETSC_DIR}/include/petscmat.h for a complete list of
2808    data within the MatInfo context.  For example,
2809 .vb
2810       MatInfo info;
2811       Mat     A;
2812       double  mal, nz_a, nz_u;
2813 
2814       MatGetInfo(A,MAT_LOCAL,&info);
2815       mal  = info.mallocs;
2816       nz_a = info.nz_allocated;
2817 .ve
2818 
2819    Example for Fortran Users:
2820    Fortran users should declare info as a double precision
2821    array of dimension MAT_INFO_SIZE, and then extract the parameters
2822    of interest.  See the file ${PETSC_DIR}/include/petsc/finclude/petscmat.h
2823    a complete list of parameter names.
2824 .vb
2825       double  precision info(MAT_INFO_SIZE)
2826       double  precision mal, nz_a
2827       Mat     A
2828       integer ierr
2829 
2830       call MatGetInfo(A,MAT_LOCAL,info,ierr)
2831       mal = info(MAT_INFO_MALLOCS)
2832       nz_a = info(MAT_INFO_NZ_ALLOCATED)
2833 .ve
2834 
2835     Level: intermediate
2836 
2837     Developer Note: fortran interface is not autogenerated as the f90
2838     interface definition cannot be generated correctly [due to MatInfo]
2839 
2840 .seealso: `MatStashGetInfo()`
2841 
2842 @*/
2843 PetscErrorCode MatGetInfo(Mat mat,MatInfoType flag,MatInfo *info)
2844 {
2845   PetscFunctionBegin;
2846   PetscValidHeaderSpecific(mat,MAT_CLASSID,1);
2847   PetscValidType(mat,1);
2848   PetscValidPointer(info,3);
2849   PetscCheck(mat->ops->getinfo,PETSC_COMM_SELF,PETSC_ERR_SUP,"Mat type %s",((PetscObject)mat)->type_name);
2850   MatCheckPreallocated(mat,1);
2851   PetscCall((*mat->ops->getinfo)(mat,flag,info));
2852   PetscFunctionReturn(0);
2853 }
2854 
2855 /*
2856    This is used by external packages where it is not easy to get the info from the actual
2857    matrix factorization.
2858 */
2859 PetscErrorCode MatGetInfo_External(Mat A,MatInfoType flag,MatInfo *info)
2860 {
2861   PetscFunctionBegin;
2862   PetscCall(PetscMemzero(info,sizeof(MatInfo)));
2863   PetscFunctionReturn(0);
2864 }
2865 
2866 /* ----------------------------------------------------------*/
2867 
2868 /*@C
2869    MatLUFactor - Performs in-place LU factorization of matrix.
2870 
2871    Collective on Mat
2872 
2873    Input Parameters:
2874 +  mat - the matrix
2875 .  row - row permutation
2876 .  col - column permutation
2877 -  info - options for factorization, includes
2878 $          fill - expected fill as ratio of original fill.
2879 $          dtcol - pivot tolerance (0 no pivot, 1 full column pivoting)
2880 $                   Run with the option -info to determine an optimal value to use
2881 
2882    Notes:
2883    Most users should employ the simplified KSP interface for linear solvers
2884    instead of working directly with matrix algebra routines such as this.
2885    See, e.g., KSPCreate().
2886 
2887    This changes the state of the matrix to a factored matrix; it cannot be used
2888    for example with MatSetValues() unless one first calls MatSetUnfactored().
2889 
2890    Level: developer
2891 
2892 .seealso: `MatLUFactorSymbolic()`, `MatLUFactorNumeric()`, `MatCholeskyFactor()`,
2893           `MatGetOrdering()`, `MatSetUnfactored()`, `MatFactorInfo`, `MatGetFactor()`
2894 
2895     Developer Note: fortran interface is not autogenerated as the f90
2896     interface definition cannot be generated correctly [due to MatFactorInfo]
2897 
2898 @*/
2899 PetscErrorCode MatLUFactor(Mat mat,IS row,IS col,const MatFactorInfo *info)
2900 {
2901   MatFactorInfo  tinfo;
2902 
2903   PetscFunctionBegin;
2904   PetscValidHeaderSpecific(mat,MAT_CLASSID,1);
2905   if (row) PetscValidHeaderSpecific(row,IS_CLASSID,2);
2906   if (col) PetscValidHeaderSpecific(col,IS_CLASSID,3);
2907   if (info) PetscValidPointer(info,4);
2908   PetscValidType(mat,1);
2909   PetscCheck(mat->assembled,PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Not for unassembled matrix");
2910   PetscCheck(!mat->factortype,PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Not for factored matrix");
2911   PetscCheck(mat->ops->lufactor,PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Mat type %s",((PetscObject)mat)->type_name);
2912   MatCheckPreallocated(mat,1);
2913   if (!info) {
2914     PetscCall(MatFactorInfoInitialize(&tinfo));
2915     info = &tinfo;
2916   }
2917 
2918   PetscCall(PetscLogEventBegin(MAT_LUFactor,mat,row,col,0));
2919   PetscCall((*mat->ops->lufactor)(mat,row,col,info));
2920   PetscCall(PetscLogEventEnd(MAT_LUFactor,mat,row,col,0));
2921   PetscCall(PetscObjectStateIncrease((PetscObject)mat));
2922   PetscFunctionReturn(0);
2923 }
2924 
2925 /*@C
2926    MatILUFactor - Performs in-place ILU factorization of matrix.
2927 
2928    Collective on Mat
2929 
2930    Input Parameters:
2931 +  mat - the matrix
2932 .  row - row permutation
2933 .  col - column permutation
2934 -  info - structure containing
2935 $      levels - number of levels of fill.
2936 $      expected fill - as ratio of original fill.
2937 $      1 or 0 - indicating force fill on diagonal (improves robustness for matrices
2938                 missing diagonal entries)
2939 
2940    Notes:
2941    Probably really in-place only when level of fill is zero, otherwise allocates
2942    new space to store factored matrix and deletes previous memory.
2943 
2944    Most users should employ the simplified KSP interface for linear solvers
2945    instead of working directly with matrix algebra routines such as this.
2946    See, e.g., KSPCreate().
2947 
2948    Level: developer
2949 
2950 .seealso: `MatILUFactorSymbolic()`, `MatLUFactorNumeric()`, `MatCholeskyFactor()`, `MatFactorInfo`
2951 
2952     Developer Note: fortran interface is not autogenerated as the f90
2953     interface definition cannot be generated correctly [due to MatFactorInfo]
2954 
2955 @*/
2956 PetscErrorCode MatILUFactor(Mat mat,IS row,IS col,const MatFactorInfo *info)
2957 {
2958   PetscFunctionBegin;
2959   PetscValidHeaderSpecific(mat,MAT_CLASSID,1);
2960   if (row) PetscValidHeaderSpecific(row,IS_CLASSID,2);
2961   if (col) PetscValidHeaderSpecific(col,IS_CLASSID,3);
2962   PetscValidPointer(info,4);
2963   PetscValidType(mat,1);
2964   PetscCheck(mat->rmap->N == mat->cmap->N,PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONG,"matrix must be square");
2965   PetscCheck(mat->assembled,PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Not for unassembled matrix");
2966   PetscCheck(!mat->factortype,PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Not for factored matrix");
2967   PetscCheck(mat->ops->ilufactor,PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Mat type %s",((PetscObject)mat)->type_name);
2968   MatCheckPreallocated(mat,1);
2969 
2970   PetscCall(PetscLogEventBegin(MAT_ILUFactor,mat,row,col,0));
2971   PetscCall((*mat->ops->ilufactor)(mat,row,col,info));
2972   PetscCall(PetscLogEventEnd(MAT_ILUFactor,mat,row,col,0));
2973   PetscCall(PetscObjectStateIncrease((PetscObject)mat));
2974   PetscFunctionReturn(0);
2975 }
2976 
2977 /*@C
2978    MatLUFactorSymbolic - Performs symbolic LU factorization of matrix.
2979    Call this routine before calling MatLUFactorNumeric().
2980 
2981    Collective on Mat
2982 
2983    Input Parameters:
2984 +  fact - the factor matrix obtained with MatGetFactor()
2985 .  mat - the matrix
2986 .  row, col - row and column permutations
2987 -  info - options for factorization, includes
2988 $          fill - expected fill as ratio of original fill.
2989 $          dtcol - pivot tolerance (0 no pivot, 1 full column pivoting)
2990 $                   Run with the option -info to determine an optimal value to use
2991 
2992    Notes:
2993     See Users-Manual: ch_mat for additional information about choosing the fill factor for better efficiency.
2994 
2995    Most users should employ the simplified KSP interface for linear solvers
2996    instead of working directly with matrix algebra routines such as this.
2997    See, e.g., KSPCreate().
2998 
2999    Level: developer
3000 
3001 .seealso: `MatLUFactor()`, `MatLUFactorNumeric()`, `MatCholeskyFactor()`, `MatFactorInfo`, `MatFactorInfoInitialize()`
3002 
3003     Developer Note: fortran interface is not autogenerated as the f90
3004     interface definition cannot be generated correctly [due to MatFactorInfo]
3005 
3006 @*/
3007 PetscErrorCode MatLUFactorSymbolic(Mat fact,Mat mat,IS row,IS col,const MatFactorInfo *info)
3008 {
3009   MatFactorInfo  tinfo;
3010 
3011   PetscFunctionBegin;
3012   PetscValidHeaderSpecific(mat,MAT_CLASSID,2);
3013   if (row) PetscValidHeaderSpecific(row,IS_CLASSID,3);
3014   if (col) PetscValidHeaderSpecific(col,IS_CLASSID,4);
3015   if (info) PetscValidPointer(info,5);
3016   PetscValidType(mat,2);
3017   PetscValidPointer(fact,1);
3018   PetscCheck(mat->assembled,PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Not for unassembled matrix");
3019   PetscCheck(!mat->factortype,PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Not for factored matrix");
3020   if (!(fact)->ops->lufactorsymbolic) {
3021     MatSolverType stype;
3022     PetscCall(MatFactorGetSolverType(fact,&stype));
3023     SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Matrix type %s symbolic LU using solver package %s",((PetscObject)mat)->type_name,stype);
3024   }
3025   MatCheckPreallocated(mat,2);
3026   if (!info) {
3027     PetscCall(MatFactorInfoInitialize(&tinfo));
3028     info = &tinfo;
3029   }
3030 
3031   if (!fact->trivialsymbolic) PetscCall(PetscLogEventBegin(MAT_LUFactorSymbolic,mat,row,col,0));
3032   PetscCall((fact->ops->lufactorsymbolic)(fact,mat,row,col,info));
3033   if (!fact->trivialsymbolic) PetscCall(PetscLogEventEnd(MAT_LUFactorSymbolic,mat,row,col,0));
3034   PetscCall(PetscObjectStateIncrease((PetscObject)fact));
3035   PetscFunctionReturn(0);
3036 }
3037 
3038 /*@C
3039    MatLUFactorNumeric - Performs numeric LU factorization of a matrix.
3040    Call this routine after first calling MatLUFactorSymbolic().
3041 
3042    Collective on Mat
3043 
3044    Input Parameters:
3045 +  fact - the factor matrix obtained with MatGetFactor()
3046 .  mat - the matrix
3047 -  info - options for factorization
3048 
3049    Notes:
3050    See MatLUFactor() for in-place factorization.  See
3051    MatCholeskyFactorNumeric() for the symmetric, positive definite case.
3052 
3053    Most users should employ the simplified KSP interface for linear solvers
3054    instead of working directly with matrix algebra routines such as this.
3055    See, e.g., KSPCreate().
3056 
3057    Level: developer
3058 
3059 .seealso: `MatLUFactorSymbolic()`, `MatLUFactor()`, `MatCholeskyFactor()`
3060 
3061     Developer Note: fortran interface is not autogenerated as the f90
3062     interface definition cannot be generated correctly [due to MatFactorInfo]
3063 
3064 @*/
3065 PetscErrorCode MatLUFactorNumeric(Mat fact,Mat mat,const MatFactorInfo *info)
3066 {
3067   MatFactorInfo  tinfo;
3068 
3069   PetscFunctionBegin;
3070   PetscValidHeaderSpecific(mat,MAT_CLASSID,2);
3071   PetscValidType(mat,2);
3072   PetscValidPointer(fact,1);
3073   PetscValidHeaderSpecific(fact,MAT_CLASSID,1);
3074   PetscCheck(mat->assembled,PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Not for unassembled matrix");
3075   PetscCheck(mat->rmap->N == (fact)->rmap->N && mat->cmap->N == (fact)->cmap->N,PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_SIZ,"Mat mat,Mat fact: global dimensions are different %" PetscInt_FMT " should = %" PetscInt_FMT " %" PetscInt_FMT " should = %" PetscInt_FMT,mat->rmap->N,(fact)->rmap->N,mat->cmap->N,(fact)->cmap->N);
3076 
3077   PetscCheck((fact)->ops->lufactornumeric,PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Mat type %s numeric LU",((PetscObject)mat)->type_name);
3078   MatCheckPreallocated(mat,2);
3079   if (!info) {
3080     PetscCall(MatFactorInfoInitialize(&tinfo));
3081     info = &tinfo;
3082   }
3083 
3084   if (!fact->trivialsymbolic) PetscCall(PetscLogEventBegin(MAT_LUFactorNumeric,mat,fact,0,0));
3085   else PetscCall(PetscLogEventBegin(MAT_LUFactor,mat,fact,0,0));
3086   PetscCall((fact->ops->lufactornumeric)(fact,mat,info));
3087   if (!fact->trivialsymbolic) PetscCall(PetscLogEventEnd(MAT_LUFactorNumeric,mat,fact,0,0));
3088   else PetscCall(PetscLogEventEnd(MAT_LUFactor,mat,fact,0,0));
3089   PetscCall(MatViewFromOptions(fact,NULL,"-mat_factor_view"));
3090   PetscCall(PetscObjectStateIncrease((PetscObject)fact));
3091   PetscFunctionReturn(0);
3092 }
3093 
3094 /*@C
3095    MatCholeskyFactor - Performs in-place Cholesky factorization of a
3096    symmetric matrix.
3097 
3098    Collective on Mat
3099 
3100    Input Parameters:
3101 +  mat - the matrix
3102 .  perm - row and column permutations
3103 -  f - expected fill as ratio of original fill
3104 
3105    Notes:
3106    See MatLUFactor() for the nonsymmetric case.  See also
3107    MatCholeskyFactorSymbolic(), and MatCholeskyFactorNumeric().
3108 
3109    Most users should employ the simplified KSP interface for linear solvers
3110    instead of working directly with matrix algebra routines such as this.
3111    See, e.g., KSPCreate().
3112 
3113    Level: developer
3114 
3115 .seealso: `MatLUFactor()`, `MatCholeskyFactorSymbolic()`, `MatCholeskyFactorNumeric()`
3116           `MatGetOrdering()`
3117 
3118     Developer Note: fortran interface is not autogenerated as the f90
3119     interface definition cannot be generated correctly [due to MatFactorInfo]
3120 
3121 @*/
3122 PetscErrorCode MatCholeskyFactor(Mat mat,IS perm,const MatFactorInfo *info)
3123 {
3124   MatFactorInfo  tinfo;
3125 
3126   PetscFunctionBegin;
3127   PetscValidHeaderSpecific(mat,MAT_CLASSID,1);
3128   PetscValidType(mat,1);
3129   if (perm) PetscValidHeaderSpecific(perm,IS_CLASSID,2);
3130   if (info) PetscValidPointer(info,3);
3131   PetscCheck(mat->rmap->N == mat->cmap->N,PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONG,"Matrix must be square");
3132   PetscCheck(mat->assembled,PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Not for unassembled matrix");
3133   PetscCheck(!mat->factortype,PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Not for factored matrix");
3134   PetscCheck(mat->ops->choleskyfactor,PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"In-place factorization for Mat type %s is not supported, try out-of-place factorization. See MatCholeskyFactorSymbolic/Numeric",((PetscObject)mat)->type_name);
3135   MatCheckPreallocated(mat,1);
3136   if (!info) {
3137     PetscCall(MatFactorInfoInitialize(&tinfo));
3138     info = &tinfo;
3139   }
3140 
3141   PetscCall(PetscLogEventBegin(MAT_CholeskyFactor,mat,perm,0,0));
3142   PetscCall((*mat->ops->choleskyfactor)(mat,perm,info));
3143   PetscCall(PetscLogEventEnd(MAT_CholeskyFactor,mat,perm,0,0));
3144   PetscCall(PetscObjectStateIncrease((PetscObject)mat));
3145   PetscFunctionReturn(0);
3146 }
3147 
3148 /*@C
3149    MatCholeskyFactorSymbolic - Performs symbolic Cholesky factorization
3150    of a symmetric matrix.
3151 
3152    Collective on Mat
3153 
3154    Input Parameters:
3155 +  fact - the factor matrix obtained with MatGetFactor()
3156 .  mat - the matrix
3157 .  perm - row and column permutations
3158 -  info - options for factorization, includes
3159 $          fill - expected fill as ratio of original fill.
3160 $          dtcol - pivot tolerance (0 no pivot, 1 full column pivoting)
3161 $                   Run with the option -info to determine an optimal value to use
3162 
3163    Notes:
3164    See MatLUFactorSymbolic() for the nonsymmetric case.  See also
3165    MatCholeskyFactor() and MatCholeskyFactorNumeric().
3166 
3167    Most users should employ the simplified KSP interface for linear solvers
3168    instead of working directly with matrix algebra routines such as this.
3169    See, e.g., KSPCreate().
3170 
3171    Level: developer
3172 
3173 .seealso: `MatLUFactorSymbolic()`, `MatCholeskyFactor()`, `MatCholeskyFactorNumeric()`
3174           `MatGetOrdering()`
3175 
3176     Developer Note: fortran interface is not autogenerated as the f90
3177     interface definition cannot be generated correctly [due to MatFactorInfo]
3178 
3179 @*/
3180 PetscErrorCode MatCholeskyFactorSymbolic(Mat fact,Mat mat,IS perm,const MatFactorInfo *info)
3181 {
3182   MatFactorInfo  tinfo;
3183 
3184   PetscFunctionBegin;
3185   PetscValidHeaderSpecific(mat,MAT_CLASSID,2);
3186   PetscValidType(mat,2);
3187   if (perm) PetscValidHeaderSpecific(perm,IS_CLASSID,3);
3188   if (info) PetscValidPointer(info,4);
3189   PetscValidPointer(fact,1);
3190   PetscCheck(mat->rmap->N == mat->cmap->N,PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONG,"Matrix must be square");
3191   PetscCheck(mat->assembled,PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Not for unassembled matrix");
3192   PetscCheck(!mat->factortype,PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Not for factored matrix");
3193   if (!(fact)->ops->choleskyfactorsymbolic) {
3194     MatSolverType stype;
3195     PetscCall(MatFactorGetSolverType(fact,&stype));
3196     SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Mat type %s symbolic factor Cholesky using solver package %s",((PetscObject)mat)->type_name,stype);
3197   }
3198   MatCheckPreallocated(mat,2);
3199   if (!info) {
3200     PetscCall(MatFactorInfoInitialize(&tinfo));
3201     info = &tinfo;
3202   }
3203 
3204   if (!fact->trivialsymbolic) PetscCall(PetscLogEventBegin(MAT_CholeskyFactorSymbolic,mat,perm,0,0));
3205   PetscCall((fact->ops->choleskyfactorsymbolic)(fact,mat,perm,info));
3206   if (!fact->trivialsymbolic) PetscCall(PetscLogEventEnd(MAT_CholeskyFactorSymbolic,mat,perm,0,0));
3207   PetscCall(PetscObjectStateIncrease((PetscObject)fact));
3208   PetscFunctionReturn(0);
3209 }
3210 
3211 /*@C
3212    MatCholeskyFactorNumeric - Performs numeric Cholesky factorization
3213    of a symmetric matrix. Call this routine after first calling
3214    MatCholeskyFactorSymbolic().
3215 
3216    Collective on Mat
3217 
3218    Input Parameters:
3219 +  fact - the factor matrix obtained with MatGetFactor()
3220 .  mat - the initial matrix
3221 .  info - options for factorization
3222 -  fact - the symbolic factor of mat
3223 
3224    Notes:
3225    Most users should employ the simplified KSP interface for linear solvers
3226    instead of working directly with matrix algebra routines such as this.
3227    See, e.g., KSPCreate().
3228 
3229    Level: developer
3230 
3231 .seealso: `MatCholeskyFactorSymbolic()`, `MatCholeskyFactor()`, `MatLUFactorNumeric()`
3232 
3233     Developer Note: fortran interface is not autogenerated as the f90
3234     interface definition cannot be generated correctly [due to MatFactorInfo]
3235 
3236 @*/
3237 PetscErrorCode MatCholeskyFactorNumeric(Mat fact,Mat mat,const MatFactorInfo *info)
3238 {
3239   MatFactorInfo  tinfo;
3240 
3241   PetscFunctionBegin;
3242   PetscValidHeaderSpecific(mat,MAT_CLASSID,2);
3243   PetscValidType(mat,2);
3244   PetscValidPointer(fact,1);
3245   PetscValidHeaderSpecific(fact,MAT_CLASSID,1);
3246   PetscCheck(mat->assembled,PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Not for unassembled matrix");
3247   PetscCheck((fact)->ops->choleskyfactornumeric,PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Mat type %s numeric factor Cholesky",((PetscObject)mat)->type_name);
3248   PetscCheck(mat->rmap->N == (fact)->rmap->N && mat->cmap->N == (fact)->cmap->N,PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_SIZ,"Mat mat,Mat fact: global dim %" PetscInt_FMT " should = %" PetscInt_FMT " %" PetscInt_FMT " should = %" PetscInt_FMT,mat->rmap->N,(fact)->rmap->N,mat->cmap->N,(fact)->cmap->N);
3249   MatCheckPreallocated(mat,2);
3250   if (!info) {
3251     PetscCall(MatFactorInfoInitialize(&tinfo));
3252     info = &tinfo;
3253   }
3254 
3255   if (!fact->trivialsymbolic) PetscCall(PetscLogEventBegin(MAT_CholeskyFactorNumeric,mat,fact,0,0));
3256   else PetscCall(PetscLogEventBegin(MAT_CholeskyFactor,mat,fact,0,0));
3257   PetscCall((fact->ops->choleskyfactornumeric)(fact,mat,info));
3258   if (!fact->trivialsymbolic) PetscCall(PetscLogEventEnd(MAT_CholeskyFactorNumeric,mat,fact,0,0));
3259   else PetscCall(PetscLogEventEnd(MAT_CholeskyFactor,mat,fact,0,0));
3260   PetscCall(MatViewFromOptions(fact,NULL,"-mat_factor_view"));
3261   PetscCall(PetscObjectStateIncrease((PetscObject)fact));
3262   PetscFunctionReturn(0);
3263 }
3264 
3265 /*@
3266    MatQRFactor - Performs in-place QR factorization of matrix.
3267 
3268    Collective on Mat
3269 
3270    Input Parameters:
3271 +  mat - the matrix
3272 .  col - column permutation
3273 -  info - options for factorization, includes
3274 $          fill - expected fill as ratio of original fill.
3275 $          dtcol - pivot tolerance (0 no pivot, 1 full column pivoting)
3276 $                   Run with the option -info to determine an optimal value to use
3277 
3278    Notes:
3279    Most users should employ the simplified KSP interface for linear solvers
3280    instead of working directly with matrix algebra routines such as this.
3281    See, e.g., KSPCreate().
3282 
3283    This changes the state of the matrix to a factored matrix; it cannot be used
3284    for example with MatSetValues() unless one first calls MatSetUnfactored().
3285 
3286    Level: developer
3287 
3288 .seealso: `MatQRFactorSymbolic()`, `MatQRFactorNumeric()`, `MatLUFactor()`,
3289           `MatSetUnfactored()`, `MatFactorInfo`, `MatGetFactor()`
3290 
3291     Developer Note: fortran interface is not autogenerated as the f90
3292     interface definition cannot be generated correctly [due to MatFactorInfo]
3293 
3294 @*/
3295 PetscErrorCode MatQRFactor(Mat mat, IS col, const MatFactorInfo *info)
3296 {
3297   PetscFunctionBegin;
3298   PetscValidHeaderSpecific(mat,MAT_CLASSID,1);
3299   if (col) PetscValidHeaderSpecific(col,IS_CLASSID,2);
3300   if (info) PetscValidPointer(info,3);
3301   PetscValidType(mat,1);
3302   PetscCheck(mat->assembled,PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Not for unassembled matrix");
3303   PetscCheck(!mat->factortype,PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Not for factored matrix");
3304   MatCheckPreallocated(mat,1);
3305   PetscCall(PetscLogEventBegin(MAT_QRFactor,mat,col,0,0));
3306   PetscUseMethod(mat,"MatQRFactor_C", (Mat,IS,const MatFactorInfo*), (mat, col, info));
3307   PetscCall(PetscLogEventEnd(MAT_QRFactor,mat,col,0,0));
3308   PetscCall(PetscObjectStateIncrease((PetscObject)mat));
3309   PetscFunctionReturn(0);
3310 }
3311 
3312 /*@
3313    MatQRFactorSymbolic - Performs symbolic QR factorization of matrix.
3314    Call this routine before calling MatQRFactorNumeric().
3315 
3316    Collective on Mat
3317 
3318    Input Parameters:
3319 +  fact - the factor matrix obtained with MatGetFactor()
3320 .  mat - the matrix
3321 .  col - column permutation
3322 -  info - options for factorization, includes
3323 $          fill - expected fill as ratio of original fill.
3324 $          dtcol - pivot tolerance (0 no pivot, 1 full column pivoting)
3325 $                   Run with the option -info to determine an optimal value to use
3326 
3327    Most users should employ the simplified KSP interface for linear solvers
3328    instead of working directly with matrix algebra routines such as this.
3329    See, e.g., KSPCreate().
3330 
3331    Level: developer
3332 
3333 .seealso: `MatQRFactor()`, `MatQRFactorNumeric()`, `MatLUFactor()`, `MatFactorInfo`, `MatFactorInfoInitialize()`
3334 
3335     Developer Note: fortran interface is not autogenerated as the f90
3336     interface definition cannot be generated correctly [due to MatFactorInfo]
3337 
3338 @*/
3339 PetscErrorCode MatQRFactorSymbolic(Mat fact,Mat mat,IS col,const MatFactorInfo *info)
3340 {
3341   MatFactorInfo  tinfo;
3342 
3343   PetscFunctionBegin;
3344   PetscValidHeaderSpecific(mat,MAT_CLASSID,2);
3345   if (col) PetscValidHeaderSpecific(col,IS_CLASSID,3);
3346   if (info) PetscValidPointer(info,4);
3347   PetscValidType(mat,2);
3348   PetscValidPointer(fact,1);
3349   PetscCheck(mat->assembled,PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Not for unassembled matrix");
3350   PetscCheck(!mat->factortype,PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Not for factored matrix");
3351   MatCheckPreallocated(mat,2);
3352   if (!info) {
3353     PetscCall(MatFactorInfoInitialize(&tinfo));
3354     info = &tinfo;
3355   }
3356 
3357   if (!fact->trivialsymbolic) PetscCall(PetscLogEventBegin(MAT_QRFactorSymbolic,fact,mat,col,0));
3358   PetscUseMethod(fact,"MatQRFactorSymbolic_C", (Mat,Mat,IS,const MatFactorInfo*), (fact, mat, col, info));
3359   if (!fact->trivialsymbolic) PetscCall(PetscLogEventEnd(MAT_QRFactorSymbolic,fact,mat,col,0));
3360   PetscCall(PetscObjectStateIncrease((PetscObject)fact));
3361   PetscFunctionReturn(0);
3362 }
3363 
3364 /*@
3365    MatQRFactorNumeric - Performs numeric QR factorization of a matrix.
3366    Call this routine after first calling MatQRFactorSymbolic().
3367 
3368    Collective on Mat
3369 
3370    Input Parameters:
3371 +  fact - the factor matrix obtained with MatGetFactor()
3372 .  mat - the matrix
3373 -  info - options for factorization
3374 
3375    Notes:
3376    See MatQRFactor() for in-place factorization.
3377 
3378    Most users should employ the simplified KSP interface for linear solvers
3379    instead of working directly with matrix algebra routines such as this.
3380    See, e.g., KSPCreate().
3381 
3382    Level: developer
3383 
3384 .seealso: `MatQRFactorSymbolic()`, `MatLUFactor()`
3385 
3386     Developer Note: fortran interface is not autogenerated as the f90
3387     interface definition cannot be generated correctly [due to MatFactorInfo]
3388 
3389 @*/
3390 PetscErrorCode MatQRFactorNumeric(Mat fact,Mat mat,const MatFactorInfo *info)
3391 {
3392   MatFactorInfo  tinfo;
3393 
3394   PetscFunctionBegin;
3395   PetscValidHeaderSpecific(mat,MAT_CLASSID,2);
3396   PetscValidType(mat,2);
3397   PetscValidPointer(fact,1);
3398   PetscValidHeaderSpecific(fact,MAT_CLASSID,1);
3399   PetscCheck(mat->assembled,PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Not for unassembled matrix");
3400   PetscCheck(mat->rmap->N == fact->rmap->N && mat->cmap->N == fact->cmap->N,PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_SIZ,"Mat mat,Mat fact: global dimensions are different %" PetscInt_FMT " should = %" PetscInt_FMT " %" PetscInt_FMT " should = %" PetscInt_FMT,mat->rmap->N,(fact)->rmap->N,mat->cmap->N,(fact)->cmap->N);
3401 
3402   MatCheckPreallocated(mat,2);
3403   if (!info) {
3404     PetscCall(MatFactorInfoInitialize(&tinfo));
3405     info = &tinfo;
3406   }
3407 
3408   if (!fact->trivialsymbolic) PetscCall(PetscLogEventBegin(MAT_QRFactorNumeric,mat,fact,0,0));
3409   else  PetscCall(PetscLogEventBegin(MAT_QRFactor,mat,fact,0,0));
3410   PetscUseMethod(fact,"MatQRFactorNumeric_C", (Mat,Mat,const MatFactorInfo*), (fact, mat, info));
3411   if (!fact->trivialsymbolic) PetscCall(PetscLogEventEnd(MAT_QRFactorNumeric,mat,fact,0,0));
3412   else PetscCall(PetscLogEventEnd(MAT_QRFactor,mat,fact,0,0));
3413   PetscCall(MatViewFromOptions(fact,NULL,"-mat_factor_view"));
3414   PetscCall(PetscObjectStateIncrease((PetscObject)fact));
3415   PetscFunctionReturn(0);
3416 }
3417 
3418 /* ----------------------------------------------------------------*/
3419 /*@
3420    MatSolve - Solves A x = b, given a factored matrix.
3421 
3422    Neighbor-wise Collective on Mat
3423 
3424    Input Parameters:
3425 +  mat - the factored matrix
3426 -  b - the right-hand-side vector
3427 
3428    Output Parameter:
3429 .  x - the result vector
3430 
3431    Notes:
3432    The vectors b and x cannot be the same.  I.e., one cannot
3433    call MatSolve(A,x,x).
3434 
3435    Notes:
3436    Most users should employ the simplified KSP interface for linear solvers
3437    instead of working directly with matrix algebra routines such as this.
3438    See, e.g., KSPCreate().
3439 
3440    Level: developer
3441 
3442 .seealso: `MatSolveAdd()`, `MatSolveTranspose()`, `MatSolveTransposeAdd()`
3443 @*/
3444 PetscErrorCode MatSolve(Mat mat,Vec b,Vec x)
3445 {
3446   PetscFunctionBegin;
3447   PetscValidHeaderSpecific(mat,MAT_CLASSID,1);
3448   PetscValidType(mat,1);
3449   PetscValidHeaderSpecific(b,VEC_CLASSID,2);
3450   PetscValidHeaderSpecific(x,VEC_CLASSID,3);
3451   PetscCheckSameComm(mat,1,b,2);
3452   PetscCheckSameComm(mat,1,x,3);
3453   PetscCheck(x != b,PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_IDN,"x and b must be different vectors");
3454   PetscCheck(mat->cmap->N == x->map->N,PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_SIZ,"Mat mat,Vec x: global dim %" PetscInt_FMT " %" PetscInt_FMT,mat->cmap->N,x->map->N);
3455   PetscCheck(mat->rmap->N == b->map->N,PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_SIZ,"Mat mat,Vec b: global dim %" PetscInt_FMT " %" PetscInt_FMT,mat->rmap->N,b->map->N);
3456   PetscCheck(mat->rmap->n == b->map->n,PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Mat mat,Vec b: local dim %" PetscInt_FMT " %" PetscInt_FMT,mat->rmap->n,b->map->n);
3457   if (!mat->rmap->N && !mat->cmap->N) PetscFunctionReturn(0);
3458   MatCheckPreallocated(mat,1);
3459 
3460   PetscCall(PetscLogEventBegin(MAT_Solve,mat,b,x,0));
3461   if (mat->factorerrortype) {
3462     PetscCall(PetscInfo(mat,"MatFactorError %d\n",mat->factorerrortype));
3463     PetscCall(VecSetInf(x));
3464   } else {
3465     PetscCheck(mat->ops->solve,PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Mat type %s",((PetscObject)mat)->type_name);
3466     PetscCall((*mat->ops->solve)(mat,b,x));
3467   }
3468   PetscCall(PetscLogEventEnd(MAT_Solve,mat,b,x,0));
3469   PetscCall(PetscObjectStateIncrease((PetscObject)x));
3470   PetscFunctionReturn(0);
3471 }
3472 
3473 static PetscErrorCode MatMatSolve_Basic(Mat A,Mat B,Mat X,PetscBool trans)
3474 {
3475   Vec            b,x;
3476   PetscInt       N,i;
3477   PetscErrorCode (*f)(Mat,Vec,Vec);
3478   PetscBool      Abound,Bneedconv = PETSC_FALSE,Xneedconv = PETSC_FALSE;
3479 
3480   PetscFunctionBegin;
3481   if (A->factorerrortype) {
3482     PetscCall(PetscInfo(A,"MatFactorError %d\n",A->factorerrortype));
3483     PetscCall(MatSetInf(X));
3484     PetscFunctionReturn(0);
3485   }
3486   f = (!trans || (!A->ops->solvetranspose && A->symmetric)) ? A->ops->solve : A->ops->solvetranspose;
3487   PetscCheck(f,PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"Mat type %s",((PetscObject)A)->type_name);
3488   PetscCall(MatBoundToCPU(A,&Abound));
3489   if (!Abound) {
3490     PetscCall(PetscObjectTypeCompareAny((PetscObject)B,&Bneedconv,MATSEQDENSE,MATMPIDENSE,""));
3491     PetscCall(PetscObjectTypeCompareAny((PetscObject)X,&Xneedconv,MATSEQDENSE,MATMPIDENSE,""));
3492   }
3493   if (Bneedconv) {
3494     PetscCall(MatConvert(B,MATDENSECUDA,MAT_INPLACE_MATRIX,&B));
3495   }
3496   if (Xneedconv) {
3497     PetscCall(MatConvert(X,MATDENSECUDA,MAT_INPLACE_MATRIX,&X));
3498   }
3499   PetscCall(MatGetSize(B,NULL,&N));
3500   for (i=0; i<N; i++) {
3501     PetscCall(MatDenseGetColumnVecRead(B,i,&b));
3502     PetscCall(MatDenseGetColumnVecWrite(X,i,&x));
3503     PetscCall((*f)(A,b,x));
3504     PetscCall(MatDenseRestoreColumnVecWrite(X,i,&x));
3505     PetscCall(MatDenseRestoreColumnVecRead(B,i,&b));
3506   }
3507   if (Bneedconv) {
3508     PetscCall(MatConvert(B,MATDENSE,MAT_INPLACE_MATRIX,&B));
3509   }
3510   if (Xneedconv) {
3511     PetscCall(MatConvert(X,MATDENSE,MAT_INPLACE_MATRIX,&X));
3512   }
3513   PetscFunctionReturn(0);
3514 }
3515 
3516 /*@
3517    MatMatSolve - Solves A X = B, given a factored matrix.
3518 
3519    Neighbor-wise Collective on Mat
3520 
3521    Input Parameters:
3522 +  A - the factored matrix
3523 -  B - the right-hand-side matrix MATDENSE (or sparse -- when using MUMPS)
3524 
3525    Output Parameter:
3526 .  X - the result matrix (dense matrix)
3527 
3528    Notes:
3529    If B is a MATDENSE matrix then one can call MatMatSolve(A,B,B) except with MKL_CPARDISO;
3530    otherwise, B and X cannot be the same.
3531 
3532    Notes:
3533    Most users should usually employ the simplified KSP interface for linear solvers
3534    instead of working directly with matrix algebra routines such as this.
3535    See, e.g., KSPCreate(). However KSP can only solve for one vector (column of X)
3536    at a time.
3537 
3538    Level: developer
3539 
3540 .seealso: `MatMatSolveTranspose()`, `MatLUFactor()`, `MatCholeskyFactor()`
3541 @*/
3542 PetscErrorCode MatMatSolve(Mat A,Mat B,Mat X)
3543 {
3544   PetscFunctionBegin;
3545   PetscValidHeaderSpecific(A,MAT_CLASSID,1);
3546   PetscValidType(A,1);
3547   PetscValidHeaderSpecific(B,MAT_CLASSID,2);
3548   PetscValidHeaderSpecific(X,MAT_CLASSID,3);
3549   PetscCheckSameComm(A,1,B,2);
3550   PetscCheckSameComm(A,1,X,3);
3551   PetscCheck(A->cmap->N == X->rmap->N,PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_SIZ,"Mat A,Mat X: global dim %" PetscInt_FMT " %" PetscInt_FMT,A->cmap->N,X->rmap->N);
3552   PetscCheck(A->rmap->N == B->rmap->N,PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_SIZ,"Mat A,Mat B: global dim %" PetscInt_FMT " %" PetscInt_FMT,A->rmap->N,B->rmap->N);
3553   PetscCheck(X->cmap->N == B->cmap->N,PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_SIZ,"Solution matrix must have same number of columns as rhs matrix");
3554   if (!A->rmap->N && !A->cmap->N) PetscFunctionReturn(0);
3555   PetscCheck(A->factortype,PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_WRONGSTATE,"Unfactored matrix");
3556   MatCheckPreallocated(A,1);
3557 
3558   PetscCall(PetscLogEventBegin(MAT_MatSolve,A,B,X,0));
3559   if (!A->ops->matsolve) {
3560     PetscCall(PetscInfo(A,"Mat type %s using basic MatMatSolve\n",((PetscObject)A)->type_name));
3561     PetscCall(MatMatSolve_Basic(A,B,X,PETSC_FALSE));
3562   } else {
3563     PetscCall((*A->ops->matsolve)(A,B,X));
3564   }
3565   PetscCall(PetscLogEventEnd(MAT_MatSolve,A,B,X,0));
3566   PetscCall(PetscObjectStateIncrease((PetscObject)X));
3567   PetscFunctionReturn(0);
3568 }
3569 
3570 /*@
3571    MatMatSolveTranspose - Solves A^T X = B, given a factored matrix.
3572 
3573    Neighbor-wise Collective on Mat
3574 
3575    Input Parameters:
3576 +  A - the factored matrix
3577 -  B - the right-hand-side matrix  (dense matrix)
3578 
3579    Output Parameter:
3580 .  X - the result matrix (dense matrix)
3581 
3582    Notes:
3583    The matrices B and X cannot be the same.  I.e., one cannot
3584    call MatMatSolveTranspose(A,X,X).
3585 
3586    Notes:
3587    Most users should usually employ the simplified KSP interface for linear solvers
3588    instead of working directly with matrix algebra routines such as this.
3589    See, e.g., KSPCreate(). However KSP can only solve for one vector (column of X)
3590    at a time.
3591 
3592    When using SuperLU_Dist or MUMPS as a parallel solver, PETSc will use their functionality to solve multiple right hand sides simultaneously.
3593 
3594    Level: developer
3595 
3596 .seealso: `MatMatSolve()`, `MatLUFactor()`, `MatCholeskyFactor()`
3597 @*/
3598 PetscErrorCode MatMatSolveTranspose(Mat A,Mat B,Mat X)
3599 {
3600   PetscFunctionBegin;
3601   PetscValidHeaderSpecific(A,MAT_CLASSID,1);
3602   PetscValidType(A,1);
3603   PetscValidHeaderSpecific(B,MAT_CLASSID,2);
3604   PetscValidHeaderSpecific(X,MAT_CLASSID,3);
3605   PetscCheckSameComm(A,1,B,2);
3606   PetscCheckSameComm(A,1,X,3);
3607   PetscCheck(X != B,PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_IDN,"X and B must be different matrices");
3608   PetscCheck(A->cmap->N == X->rmap->N,PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_SIZ,"Mat A,Mat X: global dim %" PetscInt_FMT " %" PetscInt_FMT,A->cmap->N,X->rmap->N);
3609   PetscCheck(A->rmap->N == B->rmap->N,PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_SIZ,"Mat A,Mat B: global dim %" PetscInt_FMT " %" PetscInt_FMT,A->rmap->N,B->rmap->N);
3610   PetscCheck(A->rmap->n == B->rmap->n,PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Mat A,Mat B: local dim %" PetscInt_FMT " %" PetscInt_FMT,A->rmap->n,B->rmap->n);
3611   PetscCheck(X->cmap->N >= B->cmap->N,PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Solution matrix must have same number of columns as rhs matrix");
3612   if (!A->rmap->N && !A->cmap->N) PetscFunctionReturn(0);
3613   PetscCheck(A->factortype,PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_WRONGSTATE,"Unfactored matrix");
3614   MatCheckPreallocated(A,1);
3615 
3616   PetscCall(PetscLogEventBegin(MAT_MatSolve,A,B,X,0));
3617   if (!A->ops->matsolvetranspose) {
3618     PetscCall(PetscInfo(A,"Mat type %s using basic MatMatSolveTranspose\n",((PetscObject)A)->type_name));
3619     PetscCall(MatMatSolve_Basic(A,B,X,PETSC_TRUE));
3620   } else {
3621     PetscCall((*A->ops->matsolvetranspose)(A,B,X));
3622   }
3623   PetscCall(PetscLogEventEnd(MAT_MatSolve,A,B,X,0));
3624   PetscCall(PetscObjectStateIncrease((PetscObject)X));
3625   PetscFunctionReturn(0);
3626 }
3627 
3628 /*@
3629    MatMatTransposeSolve - Solves A X = B^T, given a factored matrix.
3630 
3631    Neighbor-wise Collective on Mat
3632 
3633    Input Parameters:
3634 +  A - the factored matrix
3635 -  Bt - the transpose of right-hand-side matrix
3636 
3637    Output Parameter:
3638 .  X - the result matrix (dense matrix)
3639 
3640    Notes:
3641    Most users should usually employ the simplified KSP interface for linear solvers
3642    instead of working directly with matrix algebra routines such as this.
3643    See, e.g., KSPCreate(). However KSP can only solve for one vector (column of X)
3644    at a time.
3645 
3646    For MUMPS, it only supports centralized sparse compressed column format on the host processor for right hand side matrix. User must create B^T in sparse compressed row format on the host processor and call MatMatTransposeSolve() to implement MUMPS' MatMatSolve().
3647 
3648    Level: developer
3649 
3650 .seealso: `MatMatSolve()`, `MatMatSolveTranspose()`, `MatLUFactor()`, `MatCholeskyFactor()`
3651 @*/
3652 PetscErrorCode MatMatTransposeSolve(Mat A,Mat Bt,Mat X)
3653 {
3654   PetscFunctionBegin;
3655   PetscValidHeaderSpecific(A,MAT_CLASSID,1);
3656   PetscValidType(A,1);
3657   PetscValidHeaderSpecific(Bt,MAT_CLASSID,2);
3658   PetscValidHeaderSpecific(X,MAT_CLASSID,3);
3659   PetscCheckSameComm(A,1,Bt,2);
3660   PetscCheckSameComm(A,1,X,3);
3661 
3662   PetscCheck(X != Bt,PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_IDN,"X and B must be different matrices");
3663   PetscCheck(A->cmap->N == X->rmap->N,PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_SIZ,"Mat A,Mat X: global dim %" PetscInt_FMT " %" PetscInt_FMT,A->cmap->N,X->rmap->N);
3664   PetscCheck(A->rmap->N == Bt->cmap->N,PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_SIZ,"Mat A,Mat Bt: global dim %" PetscInt_FMT " %" PetscInt_FMT,A->rmap->N,Bt->cmap->N);
3665   PetscCheck(X->cmap->N >= Bt->rmap->N,PetscObjectComm((PetscObject)X),PETSC_ERR_ARG_SIZ,"Solution matrix must have same number of columns as row number of the rhs matrix");
3666   if (!A->rmap->N && !A->cmap->N) PetscFunctionReturn(0);
3667   PetscCheck(A->factortype,PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_WRONGSTATE,"Unfactored matrix");
3668   MatCheckPreallocated(A,1);
3669 
3670   PetscCheck(A->ops->mattransposesolve,PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"Mat type %s",((PetscObject)A)->type_name);
3671   PetscCall(PetscLogEventBegin(MAT_MatTrSolve,A,Bt,X,0));
3672   PetscCall((*A->ops->mattransposesolve)(A,Bt,X));
3673   PetscCall(PetscLogEventEnd(MAT_MatTrSolve,A,Bt,X,0));
3674   PetscCall(PetscObjectStateIncrease((PetscObject)X));
3675   PetscFunctionReturn(0);
3676 }
3677 
3678 /*@
3679    MatForwardSolve - Solves L x = b, given a factored matrix, A = LU, or
3680                             U^T*D^(1/2) x = b, given a factored symmetric matrix, A = U^T*D*U,
3681 
3682    Neighbor-wise Collective on Mat
3683 
3684    Input Parameters:
3685 +  mat - the factored matrix
3686 -  b - the right-hand-side vector
3687 
3688    Output Parameter:
3689 .  x - the result vector
3690 
3691    Notes:
3692    MatSolve() should be used for most applications, as it performs
3693    a forward solve followed by a backward solve.
3694 
3695    The vectors b and x cannot be the same,  i.e., one cannot
3696    call MatForwardSolve(A,x,x).
3697 
3698    For matrix in seqsbaij format with block size larger than 1,
3699    the diagonal blocks are not implemented as D = D^(1/2) * D^(1/2) yet.
3700    MatForwardSolve() solves U^T*D y = b, and
3701    MatBackwardSolve() solves U x = y.
3702    Thus they do not provide a symmetric preconditioner.
3703 
3704    Most users should employ the simplified KSP interface for linear solvers
3705    instead of working directly with matrix algebra routines such as this.
3706    See, e.g., KSPCreate().
3707 
3708    Level: developer
3709 
3710 .seealso: `MatSolve()`, `MatBackwardSolve()`
3711 @*/
3712 PetscErrorCode MatForwardSolve(Mat mat,Vec b,Vec x)
3713 {
3714   PetscFunctionBegin;
3715   PetscValidHeaderSpecific(mat,MAT_CLASSID,1);
3716   PetscValidType(mat,1);
3717   PetscValidHeaderSpecific(b,VEC_CLASSID,2);
3718   PetscValidHeaderSpecific(x,VEC_CLASSID,3);
3719   PetscCheckSameComm(mat,1,b,2);
3720   PetscCheckSameComm(mat,1,x,3);
3721   PetscCheck(x != b,PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_IDN,"x and b must be different vectors");
3722   PetscCheck(mat->cmap->N == x->map->N,PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_SIZ,"Mat mat,Vec x: global dim %" PetscInt_FMT " %" PetscInt_FMT,mat->cmap->N,x->map->N);
3723   PetscCheck(mat->rmap->N == b->map->N,PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_SIZ,"Mat mat,Vec b: global dim %" PetscInt_FMT " %" PetscInt_FMT,mat->rmap->N,b->map->N);
3724   PetscCheck(mat->rmap->n == b->map->n,PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Mat mat,Vec b: local dim %" PetscInt_FMT " %" PetscInt_FMT,mat->rmap->n,b->map->n);
3725   if (!mat->rmap->N && !mat->cmap->N) PetscFunctionReturn(0);
3726   MatCheckPreallocated(mat,1);
3727 
3728   PetscCheck(mat->ops->forwardsolve,PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Mat type %s",((PetscObject)mat)->type_name);
3729   PetscCall(PetscLogEventBegin(MAT_ForwardSolve,mat,b,x,0));
3730   PetscCall((*mat->ops->forwardsolve)(mat,b,x));
3731   PetscCall(PetscLogEventEnd(MAT_ForwardSolve,mat,b,x,0));
3732   PetscCall(PetscObjectStateIncrease((PetscObject)x));
3733   PetscFunctionReturn(0);
3734 }
3735 
3736 /*@
3737    MatBackwardSolve - Solves U x = b, given a factored matrix, A = LU.
3738                              D^(1/2) U x = b, given a factored symmetric matrix, A = U^T*D*U,
3739 
3740    Neighbor-wise Collective on Mat
3741 
3742    Input Parameters:
3743 +  mat - the factored matrix
3744 -  b - the right-hand-side vector
3745 
3746    Output Parameter:
3747 .  x - the result vector
3748 
3749    Notes:
3750    MatSolve() should be used for most applications, as it performs
3751    a forward solve followed by a backward solve.
3752 
3753    The vectors b and x cannot be the same.  I.e., one cannot
3754    call MatBackwardSolve(A,x,x).
3755 
3756    For matrix in seqsbaij format with block size larger than 1,
3757    the diagonal blocks are not implemented as D = D^(1/2) * D^(1/2) yet.
3758    MatForwardSolve() solves U^T*D y = b, and
3759    MatBackwardSolve() solves U x = y.
3760    Thus they do not provide a symmetric preconditioner.
3761 
3762    Most users should employ the simplified KSP interface for linear solvers
3763    instead of working directly with matrix algebra routines such as this.
3764    See, e.g., KSPCreate().
3765 
3766    Level: developer
3767 
3768 .seealso: `MatSolve()`, `MatForwardSolve()`
3769 @*/
3770 PetscErrorCode MatBackwardSolve(Mat mat,Vec b,Vec x)
3771 {
3772   PetscFunctionBegin;
3773   PetscValidHeaderSpecific(mat,MAT_CLASSID,1);
3774   PetscValidType(mat,1);
3775   PetscValidHeaderSpecific(b,VEC_CLASSID,2);
3776   PetscValidHeaderSpecific(x,VEC_CLASSID,3);
3777   PetscCheckSameComm(mat,1,b,2);
3778   PetscCheckSameComm(mat,1,x,3);
3779   PetscCheck(x != b,PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_IDN,"x and b must be different vectors");
3780   PetscCheck(mat->cmap->N == x->map->N,PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_SIZ,"Mat mat,Vec x: global dim %" PetscInt_FMT " %" PetscInt_FMT,mat->cmap->N,x->map->N);
3781   PetscCheck(mat->rmap->N == b->map->N,PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_SIZ,"Mat mat,Vec b: global dim %" PetscInt_FMT " %" PetscInt_FMT,mat->rmap->N,b->map->N);
3782   PetscCheck(mat->rmap->n == b->map->n,PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Mat mat,Vec b: local dim %" PetscInt_FMT " %" PetscInt_FMT,mat->rmap->n,b->map->n);
3783   if (!mat->rmap->N && !mat->cmap->N) PetscFunctionReturn(0);
3784   MatCheckPreallocated(mat,1);
3785 
3786   PetscCheck(mat->ops->backwardsolve,PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Mat type %s",((PetscObject)mat)->type_name);
3787   PetscCall(PetscLogEventBegin(MAT_BackwardSolve,mat,b,x,0));
3788   PetscCall((*mat->ops->backwardsolve)(mat,b,x));
3789   PetscCall(PetscLogEventEnd(MAT_BackwardSolve,mat,b,x,0));
3790   PetscCall(PetscObjectStateIncrease((PetscObject)x));
3791   PetscFunctionReturn(0);
3792 }
3793 
3794 /*@
3795    MatSolveAdd - Computes x = y + inv(A)*b, given a factored matrix.
3796 
3797    Neighbor-wise Collective on Mat
3798 
3799    Input Parameters:
3800 +  mat - the factored matrix
3801 .  b - the right-hand-side vector
3802 -  y - the vector to be added to
3803 
3804    Output Parameter:
3805 .  x - the result vector
3806 
3807    Notes:
3808    The vectors b and x cannot be the same.  I.e., one cannot
3809    call MatSolveAdd(A,x,y,x).
3810 
3811    Most users should employ the simplified KSP interface for linear solvers
3812    instead of working directly with matrix algebra routines such as this.
3813    See, e.g., KSPCreate().
3814 
3815    Level: developer
3816 
3817 .seealso: `MatSolve()`, `MatSolveTranspose()`, `MatSolveTransposeAdd()`
3818 @*/
3819 PetscErrorCode MatSolveAdd(Mat mat,Vec b,Vec y,Vec x)
3820 {
3821   PetscScalar    one = 1.0;
3822   Vec            tmp;
3823 
3824   PetscFunctionBegin;
3825   PetscValidHeaderSpecific(mat,MAT_CLASSID,1);
3826   PetscValidType(mat,1);
3827   PetscValidHeaderSpecific(y,VEC_CLASSID,3);
3828   PetscValidHeaderSpecific(b,VEC_CLASSID,2);
3829   PetscValidHeaderSpecific(x,VEC_CLASSID,4);
3830   PetscCheckSameComm(mat,1,b,2);
3831   PetscCheckSameComm(mat,1,y,3);
3832   PetscCheckSameComm(mat,1,x,4);
3833   PetscCheck(x != b,PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_IDN,"x and b must be different vectors");
3834   PetscCheck(mat->cmap->N == x->map->N,PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_SIZ,"Mat mat,Vec x: global dim %" PetscInt_FMT " %" PetscInt_FMT,mat->cmap->N,x->map->N);
3835   PetscCheck(mat->rmap->N == b->map->N,PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_SIZ,"Mat mat,Vec b: global dim %" PetscInt_FMT " %" PetscInt_FMT,mat->rmap->N,b->map->N);
3836   PetscCheck(mat->rmap->N == y->map->N,PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_SIZ,"Mat mat,Vec y: global dim %" PetscInt_FMT " %" PetscInt_FMT,mat->rmap->N,y->map->N);
3837   PetscCheck(mat->rmap->n == b->map->n,PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Mat mat,Vec b: local dim %" PetscInt_FMT " %" PetscInt_FMT,mat->rmap->n,b->map->n);
3838   PetscCheck(x->map->n == y->map->n,PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Vec x,Vec y: local dim %" PetscInt_FMT " %" PetscInt_FMT,x->map->n,y->map->n);
3839   if (!mat->rmap->N && !mat->cmap->N) PetscFunctionReturn(0);
3840    MatCheckPreallocated(mat,1);
3841 
3842   PetscCall(PetscLogEventBegin(MAT_SolveAdd,mat,b,x,y));
3843   if (mat->factorerrortype) {
3844 
3845     PetscCall(PetscInfo(mat,"MatFactorError %d\n",mat->factorerrortype));
3846     PetscCall(VecSetInf(x));
3847   } else if (mat->ops->solveadd) {
3848     PetscCall((*mat->ops->solveadd)(mat,b,y,x));
3849   } else {
3850     /* do the solve then the add manually */
3851     if (x != y) {
3852       PetscCall(MatSolve(mat,b,x));
3853       PetscCall(VecAXPY(x,one,y));
3854     } else {
3855       PetscCall(VecDuplicate(x,&tmp));
3856       PetscCall(PetscLogObjectParent((PetscObject)mat,(PetscObject)tmp));
3857       PetscCall(VecCopy(x,tmp));
3858       PetscCall(MatSolve(mat,b,x));
3859       PetscCall(VecAXPY(x,one,tmp));
3860       PetscCall(VecDestroy(&tmp));
3861     }
3862   }
3863   PetscCall(PetscLogEventEnd(MAT_SolveAdd,mat,b,x,y));
3864   PetscCall(PetscObjectStateIncrease((PetscObject)x));
3865   PetscFunctionReturn(0);
3866 }
3867 
3868 /*@
3869    MatSolveTranspose - Solves A' x = b, given a factored matrix.
3870 
3871    Neighbor-wise Collective on Mat
3872 
3873    Input Parameters:
3874 +  mat - the factored matrix
3875 -  b - the right-hand-side vector
3876 
3877    Output Parameter:
3878 .  x - the result vector
3879 
3880    Notes:
3881    The vectors b and x cannot be the same.  I.e., one cannot
3882    call MatSolveTranspose(A,x,x).
3883 
3884    Most users should employ the simplified KSP interface for linear solvers
3885    instead of working directly with matrix algebra routines such as this.
3886    See, e.g., KSPCreate().
3887 
3888    Level: developer
3889 
3890 .seealso: `MatSolve()`, `MatSolveAdd()`, `MatSolveTransposeAdd()`
3891 @*/
3892 PetscErrorCode MatSolveTranspose(Mat mat,Vec b,Vec x)
3893 {
3894   PetscErrorCode (*f)(Mat,Vec,Vec) = (!mat->ops->solvetranspose && mat->symmetric) ? mat->ops->solve : mat->ops->solvetranspose;
3895 
3896   PetscFunctionBegin;
3897   PetscValidHeaderSpecific(mat,MAT_CLASSID,1);
3898   PetscValidType(mat,1);
3899   PetscValidHeaderSpecific(b,VEC_CLASSID,2);
3900   PetscValidHeaderSpecific(x,VEC_CLASSID,3);
3901   PetscCheckSameComm(mat,1,b,2);
3902   PetscCheckSameComm(mat,1,x,3);
3903   PetscCheck(x != b,PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_IDN,"x and b must be different vectors");
3904   PetscCheck(mat->rmap->N == x->map->N,PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_SIZ,"Mat mat,Vec x: global dim %" PetscInt_FMT " %" PetscInt_FMT,mat->rmap->N,x->map->N);
3905   PetscCheck(mat->cmap->N == b->map->N,PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_SIZ,"Mat mat,Vec b: global dim %" PetscInt_FMT " %" PetscInt_FMT,mat->cmap->N,b->map->N);
3906   if (!mat->rmap->N && !mat->cmap->N) PetscFunctionReturn(0);
3907   MatCheckPreallocated(mat,1);
3908   PetscCall(PetscLogEventBegin(MAT_SolveTranspose,mat,b,x,0));
3909   if (mat->factorerrortype) {
3910     PetscCall(PetscInfo(mat,"MatFactorError %d\n",mat->factorerrortype));
3911     PetscCall(VecSetInf(x));
3912   } else {
3913     PetscCheck(f,PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Matrix type %s",((PetscObject)mat)->type_name);
3914     PetscCall((*f)(mat,b,x));
3915   }
3916   PetscCall(PetscLogEventEnd(MAT_SolveTranspose,mat,b,x,0));
3917   PetscCall(PetscObjectStateIncrease((PetscObject)x));
3918   PetscFunctionReturn(0);
3919 }
3920 
3921 /*@
3922    MatSolveTransposeAdd - Computes x = y + inv(Transpose(A)) b, given a
3923                       factored matrix.
3924 
3925    Neighbor-wise Collective on Mat
3926 
3927    Input Parameters:
3928 +  mat - the factored matrix
3929 .  b - the right-hand-side vector
3930 -  y - the vector to be added to
3931 
3932    Output Parameter:
3933 .  x - the result vector
3934 
3935    Notes:
3936    The vectors b and x cannot be the same.  I.e., one cannot
3937    call MatSolveTransposeAdd(A,x,y,x).
3938 
3939    Most users should employ the simplified KSP interface for linear solvers
3940    instead of working directly with matrix algebra routines such as this.
3941    See, e.g., KSPCreate().
3942 
3943    Level: developer
3944 
3945 .seealso: `MatSolve()`, `MatSolveAdd()`, `MatSolveTranspose()`
3946 @*/
3947 PetscErrorCode MatSolveTransposeAdd(Mat mat,Vec b,Vec y,Vec x)
3948 {
3949   PetscScalar    one = 1.0;
3950   Vec            tmp;
3951   PetscErrorCode (*f)(Mat,Vec,Vec,Vec) = (!mat->ops->solvetransposeadd && mat->symmetric) ? mat->ops->solveadd : mat->ops->solvetransposeadd;
3952 
3953   PetscFunctionBegin;
3954   PetscValidHeaderSpecific(mat,MAT_CLASSID,1);
3955   PetscValidType(mat,1);
3956   PetscValidHeaderSpecific(y,VEC_CLASSID,3);
3957   PetscValidHeaderSpecific(b,VEC_CLASSID,2);
3958   PetscValidHeaderSpecific(x,VEC_CLASSID,4);
3959   PetscCheckSameComm(mat,1,b,2);
3960   PetscCheckSameComm(mat,1,y,3);
3961   PetscCheckSameComm(mat,1,x,4);
3962   PetscCheck(x != b,PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_IDN,"x and b must be different vectors");
3963   PetscCheck(mat->rmap->N == x->map->N,PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_SIZ,"Mat mat,Vec x: global dim %" PetscInt_FMT " %" PetscInt_FMT,mat->rmap->N,x->map->N);
3964   PetscCheck(mat->cmap->N == b->map->N,PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_SIZ,"Mat mat,Vec b: global dim %" PetscInt_FMT " %" PetscInt_FMT,mat->cmap->N,b->map->N);
3965   PetscCheck(mat->cmap->N == y->map->N,PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_SIZ,"Mat mat,Vec y: global dim %" PetscInt_FMT " %" PetscInt_FMT,mat->cmap->N,y->map->N);
3966   PetscCheck(x->map->n == y->map->n,PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Vec x,Vec y: local dim %" PetscInt_FMT " %" PetscInt_FMT,x->map->n,y->map->n);
3967   if (!mat->rmap->N && !mat->cmap->N) PetscFunctionReturn(0);
3968   MatCheckPreallocated(mat,1);
3969 
3970   PetscCall(PetscLogEventBegin(MAT_SolveTransposeAdd,mat,b,x,y));
3971   if (mat->factorerrortype) {
3972     PetscCall(PetscInfo(mat,"MatFactorError %d\n",mat->factorerrortype));
3973     PetscCall(VecSetInf(x));
3974   } else if (f) {
3975     PetscCall((*f)(mat,b,y,x));
3976   } else {
3977     /* do the solve then the add manually */
3978     if (x != y) {
3979       PetscCall(MatSolveTranspose(mat,b,x));
3980       PetscCall(VecAXPY(x,one,y));
3981     } else {
3982       PetscCall(VecDuplicate(x,&tmp));
3983       PetscCall(PetscLogObjectParent((PetscObject)mat,(PetscObject)tmp));
3984       PetscCall(VecCopy(x,tmp));
3985       PetscCall(MatSolveTranspose(mat,b,x));
3986       PetscCall(VecAXPY(x,one,tmp));
3987       PetscCall(VecDestroy(&tmp));
3988     }
3989   }
3990   PetscCall(PetscLogEventEnd(MAT_SolveTransposeAdd,mat,b,x,y));
3991   PetscCall(PetscObjectStateIncrease((PetscObject)x));
3992   PetscFunctionReturn(0);
3993 }
3994 /* ----------------------------------------------------------------*/
3995 
3996 /*@
3997    MatSOR - Computes relaxation (SOR, Gauss-Seidel) sweeps.
3998 
3999    Neighbor-wise Collective on Mat
4000 
4001    Input Parameters:
4002 +  mat - the matrix
4003 .  b - the right hand side
4004 .  omega - the relaxation factor
4005 .  flag - flag indicating the type of SOR (see below)
4006 .  shift -  diagonal shift
4007 .  its - the number of iterations
4008 -  lits - the number of local iterations
4009 
4010    Output Parameter:
4011 .  x - the solution (can contain an initial guess, use option SOR_ZERO_INITIAL_GUESS to indicate no guess)
4012 
4013    SOR Flags:
4014 +     SOR_FORWARD_SWEEP - forward SOR
4015 .     SOR_BACKWARD_SWEEP - backward SOR
4016 .     SOR_SYMMETRIC_SWEEP - SSOR (symmetric SOR)
4017 .     SOR_LOCAL_FORWARD_SWEEP - local forward SOR
4018 .     SOR_LOCAL_BACKWARD_SWEEP - local forward SOR
4019 .     SOR_LOCAL_SYMMETRIC_SWEEP - local SSOR
4020 .     SOR_APPLY_UPPER, SOR_APPLY_LOWER - applies
4021          upper/lower triangular part of matrix to
4022          vector (with omega)
4023 -     SOR_ZERO_INITIAL_GUESS - zero initial guess
4024 
4025    Notes:
4026    SOR_LOCAL_FORWARD_SWEEP, SOR_LOCAL_BACKWARD_SWEEP, and
4027    SOR_LOCAL_SYMMETRIC_SWEEP perform separate independent smoothings
4028    on each processor.
4029 
4030    Application programmers will not generally use MatSOR() directly,
4031    but instead will employ the KSP/PC interface.
4032 
4033    Notes:
4034     for BAIJ, SBAIJ, and AIJ matrices with Inodes this does a block SOR smoothing, otherwise it does a pointwise smoothing
4035 
4036    Notes for Advanced Users:
4037    The flags are implemented as bitwise inclusive or operations.
4038    For example, use (SOR_ZERO_INITIAL_GUESS | SOR_SYMMETRIC_SWEEP)
4039    to specify a zero initial guess for SSOR.
4040 
4041    Most users should employ the simplified KSP interface for linear solvers
4042    instead of working directly with matrix algebra routines such as this.
4043    See, e.g., KSPCreate().
4044 
4045    Vectors x and b CANNOT be the same
4046 
4047    Developer Note: We should add block SOR support for AIJ matrices with block size set to great than one and no inodes
4048 
4049    Level: developer
4050 
4051 @*/
4052 PetscErrorCode MatSOR(Mat mat,Vec b,PetscReal omega,MatSORType flag,PetscReal shift,PetscInt its,PetscInt lits,Vec x)
4053 {
4054   PetscFunctionBegin;
4055   PetscValidHeaderSpecific(mat,MAT_CLASSID,1);
4056   PetscValidType(mat,1);
4057   PetscValidHeaderSpecific(b,VEC_CLASSID,2);
4058   PetscValidHeaderSpecific(x,VEC_CLASSID,8);
4059   PetscCheckSameComm(mat,1,b,2);
4060   PetscCheckSameComm(mat,1,x,8);
4061   PetscCheck(mat->ops->sor,PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Mat type %s",((PetscObject)mat)->type_name);
4062   PetscCheck(mat->assembled,PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Not for unassembled matrix");
4063   PetscCheck(!mat->factortype,PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Not for factored matrix");
4064   PetscCheck(mat->cmap->N == x->map->N,PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_SIZ,"Mat mat,Vec x: global dim %" PetscInt_FMT " %" PetscInt_FMT,mat->cmap->N,x->map->N);
4065   PetscCheck(mat->rmap->N == b->map->N,PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_SIZ,"Mat mat,Vec b: global dim %" PetscInt_FMT " %" PetscInt_FMT,mat->rmap->N,b->map->N);
4066   PetscCheck(mat->rmap->n == b->map->n,PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Mat mat,Vec b: local dim %" PetscInt_FMT " %" PetscInt_FMT,mat->rmap->n,b->map->n);
4067   PetscCheck(its > 0,PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Relaxation requires global its %" PetscInt_FMT " positive",its);
4068   PetscCheck(lits > 0,PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Relaxation requires local its %" PetscInt_FMT " positive",lits);
4069   PetscCheck(b != x,PETSC_COMM_SELF,PETSC_ERR_ARG_IDN,"b and x vector cannot be the same");
4070 
4071   MatCheckPreallocated(mat,1);
4072   PetscCall(PetscLogEventBegin(MAT_SOR,mat,b,x,0));
4073   PetscCall((*mat->ops->sor)(mat,b,omega,flag,shift,its,lits,x));
4074   PetscCall(PetscLogEventEnd(MAT_SOR,mat,b,x,0));
4075   PetscCall(PetscObjectStateIncrease((PetscObject)x));
4076   PetscFunctionReturn(0);
4077 }
4078 
4079 /*
4080       Default matrix copy routine.
4081 */
4082 PetscErrorCode MatCopy_Basic(Mat A,Mat B,MatStructure str)
4083 {
4084   PetscInt          i,rstart = 0,rend = 0,nz;
4085   const PetscInt    *cwork;
4086   const PetscScalar *vwork;
4087 
4088   PetscFunctionBegin;
4089   if (B->assembled) {
4090     PetscCall(MatZeroEntries(B));
4091   }
4092   if (str == SAME_NONZERO_PATTERN) {
4093     PetscCall(MatGetOwnershipRange(A,&rstart,&rend));
4094     for (i=rstart; i<rend; i++) {
4095       PetscCall(MatGetRow(A,i,&nz,&cwork,&vwork));
4096       PetscCall(MatSetValues(B,1,&i,nz,cwork,vwork,INSERT_VALUES));
4097       PetscCall(MatRestoreRow(A,i,&nz,&cwork,&vwork));
4098     }
4099   } else {
4100     PetscCall(MatAYPX(B,0.0,A,str));
4101   }
4102   PetscCall(MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY));
4103   PetscCall(MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY));
4104   PetscFunctionReturn(0);
4105 }
4106 
4107 /*@
4108    MatCopy - Copies a matrix to another matrix.
4109 
4110    Collective on Mat
4111 
4112    Input Parameters:
4113 +  A - the matrix
4114 -  str - SAME_NONZERO_PATTERN or DIFFERENT_NONZERO_PATTERN
4115 
4116    Output Parameter:
4117 .  B - where the copy is put
4118 
4119    Notes:
4120    If you use SAME_NONZERO_PATTERN then the two matrices must have the same nonzero pattern or the routine will crash.
4121 
4122    MatCopy() copies the matrix entries of a matrix to another existing
4123    matrix (after first zeroing the second matrix).  A related routine is
4124    MatConvert(), which first creates a new matrix and then copies the data.
4125 
4126    Level: intermediate
4127 
4128 .seealso: `MatConvert()`, `MatDuplicate()`
4129 @*/
4130 PetscErrorCode MatCopy(Mat A,Mat B,MatStructure str)
4131 {
4132   PetscInt       i;
4133 
4134   PetscFunctionBegin;
4135   PetscValidHeaderSpecific(A,MAT_CLASSID,1);
4136   PetscValidHeaderSpecific(B,MAT_CLASSID,2);
4137   PetscValidType(A,1);
4138   PetscValidType(B,2);
4139   PetscCheckSameComm(A,1,B,2);
4140   MatCheckPreallocated(B,2);
4141   PetscCheck(A->assembled,PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_WRONGSTATE,"Not for unassembled matrix");
4142   PetscCheck(!A->factortype,PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_WRONGSTATE,"Not for factored matrix");
4143   PetscCheck(A->rmap->N == B->rmap->N && A->cmap->N == B->cmap->N,PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_SIZ,"Mat A,Mat B: global dim (%" PetscInt_FMT ",%" PetscInt_FMT ") (%" PetscInt_FMT ",%" PetscInt_FMT ")",A->rmap->N,B->rmap->N,A->cmap->N,B->cmap->N);
4144   MatCheckPreallocated(A,1);
4145   if (A == B) PetscFunctionReturn(0);
4146 
4147   PetscCall(PetscLogEventBegin(MAT_Copy,A,B,0,0));
4148   if (A->ops->copy) {
4149     PetscCall((*A->ops->copy)(A,B,str));
4150   } else { /* generic conversion */
4151     PetscCall(MatCopy_Basic(A,B,str));
4152   }
4153 
4154   B->stencil.dim = A->stencil.dim;
4155   B->stencil.noc = A->stencil.noc;
4156   for (i=0; i<=A->stencil.dim; i++) {
4157     B->stencil.dims[i]   = A->stencil.dims[i];
4158     B->stencil.starts[i] = A->stencil.starts[i];
4159   }
4160 
4161   PetscCall(PetscLogEventEnd(MAT_Copy,A,B,0,0));
4162   PetscCall(PetscObjectStateIncrease((PetscObject)B));
4163   PetscFunctionReturn(0);
4164 }
4165 
4166 /*@C
4167    MatConvert - Converts a matrix to another matrix, either of the same
4168    or different type.
4169 
4170    Collective on Mat
4171 
4172    Input Parameters:
4173 +  mat - the matrix
4174 .  newtype - new matrix type.  Use MATSAME to create a new matrix of the
4175    same type as the original matrix.
4176 -  reuse - denotes if the destination matrix is to be created or reused.
4177    Use MAT_INPLACE_MATRIX for inplace conversion (that is when you want the input mat to be changed to contain the matrix in the new format), otherwise use
4178    MAT_INITIAL_MATRIX or MAT_REUSE_MATRIX (can only be used after the first call was made with MAT_INITIAL_MATRIX, causes the matrix space in M to be reused).
4179 
4180    Output Parameter:
4181 .  M - pointer to place new matrix
4182 
4183    Notes:
4184    MatConvert() first creates a new matrix and then copies the data from
4185    the first matrix.  A related routine is MatCopy(), which copies the matrix
4186    entries of one matrix to another already existing matrix context.
4187 
4188    Cannot be used to convert a sequential matrix to parallel or parallel to sequential,
4189    the MPI communicator of the generated matrix is always the same as the communicator
4190    of the input matrix.
4191 
4192    Level: intermediate
4193 
4194 .seealso: `MatCopy()`, `MatDuplicate()`
4195 @*/
4196 PetscErrorCode MatConvert(Mat mat,MatType newtype,MatReuse reuse,Mat *M)
4197 {
4198   PetscBool      sametype,issame,flg,issymmetric,ishermitian;
4199   char           convname[256],mtype[256];
4200   Mat            B;
4201 
4202   PetscFunctionBegin;
4203   PetscValidHeaderSpecific(mat,MAT_CLASSID,1);
4204   PetscValidType(mat,1);
4205   PetscValidPointer(M,4);
4206   PetscCheck(mat->assembled,PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Not for unassembled matrix");
4207   PetscCheck(!mat->factortype,PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Not for factored matrix");
4208   MatCheckPreallocated(mat,1);
4209 
4210   PetscCall(PetscOptionsGetString(((PetscObject)mat)->options,((PetscObject)mat)->prefix,"-matconvert_type",mtype,sizeof(mtype),&flg));
4211   if (flg) newtype = mtype;
4212 
4213   PetscCall(PetscObjectTypeCompare((PetscObject)mat,newtype,&sametype));
4214   PetscCall(PetscStrcmp(newtype,"same",&issame));
4215   PetscCheck(!(reuse == MAT_INPLACE_MATRIX) || !(mat != *M),PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"MAT_INPLACE_MATRIX requires same input and output matrix");
4216   PetscCheck(!(reuse == MAT_REUSE_MATRIX) || !(mat == *M),PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"MAT_REUSE_MATRIX means reuse matrix in final argument, perhaps you mean MAT_INPLACE_MATRIX");
4217 
4218   if ((reuse == MAT_INPLACE_MATRIX) && (issame || sametype)) {
4219     PetscCall(PetscInfo(mat,"Early return for inplace %s %d %d\n",((PetscObject)mat)->type_name,sametype,issame));
4220     PetscFunctionReturn(0);
4221   }
4222 
4223   /* Cache Mat options because some converter use MatHeaderReplace  */
4224   issymmetric = mat->symmetric;
4225   ishermitian = mat->hermitian;
4226 
4227   if ((sametype || issame) && (reuse==MAT_INITIAL_MATRIX) && mat->ops->duplicate) {
4228     PetscCall(PetscInfo(mat,"Calling duplicate for initial matrix %s %d %d\n",((PetscObject)mat)->type_name,sametype,issame));
4229     PetscCall((*mat->ops->duplicate)(mat,MAT_COPY_VALUES,M));
4230   } else {
4231     PetscErrorCode (*conv)(Mat, MatType,MatReuse,Mat*)=NULL;
4232     const char     *prefix[3] = {"seq","mpi",""};
4233     PetscInt       i;
4234     /*
4235        Order of precedence:
4236        0) See if newtype is a superclass of the current matrix.
4237        1) See if a specialized converter is known to the current matrix.
4238        2) See if a specialized converter is known to the desired matrix class.
4239        3) See if a good general converter is registered for the desired class
4240           (as of 6/27/03 only MATMPIADJ falls into this category).
4241        4) See if a good general converter is known for the current matrix.
4242        5) Use a really basic converter.
4243     */
4244 
4245     /* 0) See if newtype is a superclass of the current matrix.
4246           i.e mat is mpiaij and newtype is aij */
4247     for (i=0; i<2; i++) {
4248       PetscCall(PetscStrncpy(convname,prefix[i],sizeof(convname)));
4249       PetscCall(PetscStrlcat(convname,newtype,sizeof(convname)));
4250       PetscCall(PetscStrcmp(convname,((PetscObject)mat)->type_name,&flg));
4251       PetscCall(PetscInfo(mat,"Check superclass %s %s -> %d\n",convname,((PetscObject)mat)->type_name,flg));
4252       if (flg) {
4253         if (reuse == MAT_INPLACE_MATRIX) {
4254           PetscCall(PetscInfo(mat,"Early return\n"));
4255           PetscFunctionReturn(0);
4256         } else if (reuse == MAT_INITIAL_MATRIX && mat->ops->duplicate) {
4257           PetscCall(PetscInfo(mat,"Calling MatDuplicate\n"));
4258           PetscCall((*mat->ops->duplicate)(mat,MAT_COPY_VALUES,M));
4259           PetscFunctionReturn(0);
4260         } else if (reuse == MAT_REUSE_MATRIX && mat->ops->copy) {
4261           PetscCall(PetscInfo(mat,"Calling MatCopy\n"));
4262           PetscCall(MatCopy(mat,*M,SAME_NONZERO_PATTERN));
4263           PetscFunctionReturn(0);
4264         }
4265       }
4266     }
4267     /* 1) See if a specialized converter is known to the current matrix and the desired class */
4268     for (i=0; i<3; i++) {
4269       PetscCall(PetscStrncpy(convname,"MatConvert_",sizeof(convname)));
4270       PetscCall(PetscStrlcat(convname,((PetscObject)mat)->type_name,sizeof(convname)));
4271       PetscCall(PetscStrlcat(convname,"_",sizeof(convname)));
4272       PetscCall(PetscStrlcat(convname,prefix[i],sizeof(convname)));
4273       PetscCall(PetscStrlcat(convname,issame ? ((PetscObject)mat)->type_name : newtype,sizeof(convname)));
4274       PetscCall(PetscStrlcat(convname,"_C",sizeof(convname)));
4275       PetscCall(PetscObjectQueryFunction((PetscObject)mat,convname,&conv));
4276       PetscCall(PetscInfo(mat,"Check specialized (1) %s (%s) -> %d\n",convname,((PetscObject)mat)->type_name,!!conv));
4277       if (conv) goto foundconv;
4278     }
4279 
4280     /* 2)  See if a specialized converter is known to the desired matrix class. */
4281     PetscCall(MatCreate(PetscObjectComm((PetscObject)mat),&B));
4282     PetscCall(MatSetSizes(B,mat->rmap->n,mat->cmap->n,mat->rmap->N,mat->cmap->N));
4283     PetscCall(MatSetType(B,newtype));
4284     for (i=0; i<3; i++) {
4285       PetscCall(PetscStrncpy(convname,"MatConvert_",sizeof(convname)));
4286       PetscCall(PetscStrlcat(convname,((PetscObject)mat)->type_name,sizeof(convname)));
4287       PetscCall(PetscStrlcat(convname,"_",sizeof(convname)));
4288       PetscCall(PetscStrlcat(convname,prefix[i],sizeof(convname)));
4289       PetscCall(PetscStrlcat(convname,newtype,sizeof(convname)));
4290       PetscCall(PetscStrlcat(convname,"_C",sizeof(convname)));
4291       PetscCall(PetscObjectQueryFunction((PetscObject)B,convname,&conv));
4292       PetscCall(PetscInfo(mat,"Check specialized (2) %s (%s) -> %d\n",convname,((PetscObject)B)->type_name,!!conv));
4293       if (conv) {
4294         PetscCall(MatDestroy(&B));
4295         goto foundconv;
4296       }
4297     }
4298 
4299     /* 3) See if a good general converter is registered for the desired class */
4300     conv = B->ops->convertfrom;
4301     PetscCall(PetscInfo(mat,"Check convertfrom (%s) -> %d\n",((PetscObject)B)->type_name,!!conv));
4302     PetscCall(MatDestroy(&B));
4303     if (conv) goto foundconv;
4304 
4305     /* 4) See if a good general converter is known for the current matrix */
4306     if (mat->ops->convert) conv = mat->ops->convert;
4307     PetscCall(PetscInfo(mat,"Check general convert (%s) -> %d\n",((PetscObject)mat)->type_name,!!conv));
4308     if (conv) goto foundconv;
4309 
4310     /* 5) Use a really basic converter. */
4311     PetscCall(PetscInfo(mat,"Using MatConvert_Basic\n"));
4312     conv = MatConvert_Basic;
4313 
4314 foundconv:
4315     PetscCall(PetscLogEventBegin(MAT_Convert,mat,0,0,0));
4316     PetscCall((*conv)(mat,newtype,reuse,M));
4317     if (mat->rmap->mapping && mat->cmap->mapping && !(*M)->rmap->mapping && !(*M)->cmap->mapping) {
4318       /* the block sizes must be same if the mappings are copied over */
4319       (*M)->rmap->bs = mat->rmap->bs;
4320       (*M)->cmap->bs = mat->cmap->bs;
4321       PetscCall(PetscObjectReference((PetscObject)mat->rmap->mapping));
4322       PetscCall(PetscObjectReference((PetscObject)mat->cmap->mapping));
4323       (*M)->rmap->mapping = mat->rmap->mapping;
4324       (*M)->cmap->mapping = mat->cmap->mapping;
4325     }
4326     (*M)->stencil.dim = mat->stencil.dim;
4327     (*M)->stencil.noc = mat->stencil.noc;
4328     for (i=0; i<=mat->stencil.dim; i++) {
4329       (*M)->stencil.dims[i]   = mat->stencil.dims[i];
4330       (*M)->stencil.starts[i] = mat->stencil.starts[i];
4331     }
4332     PetscCall(PetscLogEventEnd(MAT_Convert,mat,0,0,0));
4333   }
4334   PetscCall(PetscObjectStateIncrease((PetscObject)*M));
4335 
4336   /* Copy Mat options */
4337   if (issymmetric) {
4338     PetscCall(MatSetOption(*M,MAT_SYMMETRIC,PETSC_TRUE));
4339   }
4340   if (ishermitian) {
4341     PetscCall(MatSetOption(*M,MAT_HERMITIAN,PETSC_TRUE));
4342   }
4343   PetscFunctionReturn(0);
4344 }
4345 
4346 /*@C
4347    MatFactorGetSolverType - Returns name of the package providing the factorization routines
4348 
4349    Not Collective
4350 
4351    Input Parameter:
4352 .  mat - the matrix, must be a factored matrix
4353 
4354    Output Parameter:
4355 .   type - the string name of the package (do not free this string)
4356 
4357    Notes:
4358       In Fortran you pass in a empty string and the package name will be copied into it.
4359     (Make sure the string is long enough)
4360 
4361    Level: intermediate
4362 
4363 .seealso: `MatCopy()`, `MatDuplicate()`, `MatGetFactorAvailable()`, `MatGetFactor()`
4364 @*/
4365 PetscErrorCode MatFactorGetSolverType(Mat mat, MatSolverType *type)
4366 {
4367   PetscErrorCode (*conv)(Mat,MatSolverType*);
4368 
4369   PetscFunctionBegin;
4370   PetscValidHeaderSpecific(mat,MAT_CLASSID,1);
4371   PetscValidType(mat,1);
4372   PetscValidPointer(type,2);
4373   PetscCheck(mat->factortype,PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Only for factored matrix");
4374   PetscCall(PetscObjectQueryFunction((PetscObject)mat,"MatFactorGetSolverType_C",&conv));
4375   if (conv) PetscCall((*conv)(mat,type));
4376   else *type = MATSOLVERPETSC;
4377   PetscFunctionReturn(0);
4378 }
4379 
4380 typedef struct _MatSolverTypeForSpecifcType* MatSolverTypeForSpecifcType;
4381 struct _MatSolverTypeForSpecifcType {
4382   MatType                        mtype;
4383   /* no entry for MAT_FACTOR_NONE */
4384   PetscErrorCode                 (*createfactor[MAT_FACTOR_NUM_TYPES-1])(Mat,MatFactorType,Mat*);
4385   MatSolverTypeForSpecifcType next;
4386 };
4387 
4388 typedef struct _MatSolverTypeHolder* MatSolverTypeHolder;
4389 struct _MatSolverTypeHolder {
4390   char                        *name;
4391   MatSolverTypeForSpecifcType handlers;
4392   MatSolverTypeHolder         next;
4393 };
4394 
4395 static MatSolverTypeHolder MatSolverTypeHolders = NULL;
4396 
4397 /*@C
4398    MatSolverTypeRegister - Registers a MatSolverType that works for a particular matrix type
4399 
4400    Input Parameters:
4401 +    package - name of the package, for example petsc or superlu
4402 .    mtype - the matrix type that works with this package
4403 .    ftype - the type of factorization supported by the package
4404 -    createfactor - routine that will create the factored matrix ready to be used
4405 
4406     Level: intermediate
4407 
4408 .seealso: `MatCopy()`, `MatDuplicate()`, `MatGetFactorAvailable()`, `MatGetFactor()`
4409 @*/
4410 PetscErrorCode MatSolverTypeRegister(MatSolverType package,MatType mtype,MatFactorType ftype,PetscErrorCode (*createfactor)(Mat,MatFactorType,Mat*))
4411 {
4412   MatSolverTypeHolder         next = MatSolverTypeHolders,prev = NULL;
4413   PetscBool                   flg;
4414   MatSolverTypeForSpecifcType inext,iprev = NULL;
4415 
4416   PetscFunctionBegin;
4417   PetscCall(MatInitializePackage());
4418   if (!next) {
4419     PetscCall(PetscNew(&MatSolverTypeHolders));
4420     PetscCall(PetscStrallocpy(package,&MatSolverTypeHolders->name));
4421     PetscCall(PetscNew(&MatSolverTypeHolders->handlers));
4422     PetscCall(PetscStrallocpy(mtype,(char **)&MatSolverTypeHolders->handlers->mtype));
4423     MatSolverTypeHolders->handlers->createfactor[(int)ftype-1] = createfactor;
4424     PetscFunctionReturn(0);
4425   }
4426   while (next) {
4427     PetscCall(PetscStrcasecmp(package,next->name,&flg));
4428     if (flg) {
4429       PetscCheck(next->handlers,PETSC_COMM_SELF,PETSC_ERR_PLIB,"MatSolverTypeHolder is missing handlers");
4430       inext = next->handlers;
4431       while (inext) {
4432         PetscCall(PetscStrcasecmp(mtype,inext->mtype,&flg));
4433         if (flg) {
4434           inext->createfactor[(int)ftype-1] = createfactor;
4435           PetscFunctionReturn(0);
4436         }
4437         iprev = inext;
4438         inext = inext->next;
4439       }
4440       PetscCall(PetscNew(&iprev->next));
4441       PetscCall(PetscStrallocpy(mtype,(char **)&iprev->next->mtype));
4442       iprev->next->createfactor[(int)ftype-1] = createfactor;
4443       PetscFunctionReturn(0);
4444     }
4445     prev = next;
4446     next = next->next;
4447   }
4448   PetscCall(PetscNew(&prev->next));
4449   PetscCall(PetscStrallocpy(package,&prev->next->name));
4450   PetscCall(PetscNew(&prev->next->handlers));
4451   PetscCall(PetscStrallocpy(mtype,(char **)&prev->next->handlers->mtype));
4452   prev->next->handlers->createfactor[(int)ftype-1] = createfactor;
4453   PetscFunctionReturn(0);
4454 }
4455 
4456 /*@C
4457    MatSolverTypeGet - Gets the function that creates the factor matrix if it exist
4458 
4459    Input Parameters:
4460 +    type - name of the package, for example petsc or superlu
4461 .    ftype - the type of factorization supported by the type
4462 -    mtype - the matrix type that works with this type
4463 
4464    Output Parameters:
4465 +   foundtype - PETSC_TRUE if the type was registered
4466 .   foundmtype - PETSC_TRUE if the type supports the requested mtype
4467 -   createfactor - routine that will create the factored matrix ready to be used or NULL if not found
4468 
4469     Level: intermediate
4470 
4471 .seealso: `MatCopy()`, `MatDuplicate()`, `MatGetFactorAvailable()`, `MatSolverTypeRegister()`, `MatGetFactor()`
4472 @*/
4473 PetscErrorCode MatSolverTypeGet(MatSolverType type,MatType mtype,MatFactorType ftype,PetscBool *foundtype,PetscBool *foundmtype,PetscErrorCode (**createfactor)(Mat,MatFactorType,Mat*))
4474 {
4475   MatSolverTypeHolder         next = MatSolverTypeHolders;
4476   PetscBool                   flg;
4477   MatSolverTypeForSpecifcType inext;
4478 
4479   PetscFunctionBegin;
4480   if (foundtype) *foundtype = PETSC_FALSE;
4481   if (foundmtype) *foundmtype = PETSC_FALSE;
4482   if (createfactor) *createfactor = NULL;
4483 
4484   if (type) {
4485     while (next) {
4486       PetscCall(PetscStrcasecmp(type,next->name,&flg));
4487       if (flg) {
4488         if (foundtype) *foundtype = PETSC_TRUE;
4489         inext = next->handlers;
4490         while (inext) {
4491           PetscCall(PetscStrbeginswith(mtype,inext->mtype,&flg));
4492           if (flg) {
4493             if (foundmtype) *foundmtype = PETSC_TRUE;
4494             if (createfactor)  *createfactor  = inext->createfactor[(int)ftype-1];
4495             PetscFunctionReturn(0);
4496           }
4497           inext = inext->next;
4498         }
4499       }
4500       next = next->next;
4501     }
4502   } else {
4503     while (next) {
4504       inext = next->handlers;
4505       while (inext) {
4506         PetscCall(PetscStrcmp(mtype,inext->mtype,&flg));
4507         if (flg && inext->createfactor[(int)ftype-1]) {
4508           if (foundtype) *foundtype = PETSC_TRUE;
4509           if (foundmtype)   *foundmtype   = PETSC_TRUE;
4510           if (createfactor) *createfactor = inext->createfactor[(int)ftype-1];
4511           PetscFunctionReturn(0);
4512         }
4513         inext = inext->next;
4514       }
4515       next = next->next;
4516     }
4517     /* try with base classes inext->mtype */
4518     next = MatSolverTypeHolders;
4519     while (next) {
4520       inext = next->handlers;
4521       while (inext) {
4522         PetscCall(PetscStrbeginswith(mtype,inext->mtype,&flg));
4523         if (flg && inext->createfactor[(int)ftype-1]) {
4524           if (foundtype) *foundtype = PETSC_TRUE;
4525           if (foundmtype)   *foundmtype   = PETSC_TRUE;
4526           if (createfactor) *createfactor = inext->createfactor[(int)ftype-1];
4527           PetscFunctionReturn(0);
4528         }
4529         inext = inext->next;
4530       }
4531       next = next->next;
4532     }
4533   }
4534   PetscFunctionReturn(0);
4535 }
4536 
4537 PetscErrorCode MatSolverTypeDestroy(void)
4538 {
4539   MatSolverTypeHolder         next = MatSolverTypeHolders,prev;
4540   MatSolverTypeForSpecifcType inext,iprev;
4541 
4542   PetscFunctionBegin;
4543   while (next) {
4544     PetscCall(PetscFree(next->name));
4545     inext = next->handlers;
4546     while (inext) {
4547       PetscCall(PetscFree(inext->mtype));
4548       iprev = inext;
4549       inext = inext->next;
4550       PetscCall(PetscFree(iprev));
4551     }
4552     prev = next;
4553     next = next->next;
4554     PetscCall(PetscFree(prev));
4555   }
4556   MatSolverTypeHolders = NULL;
4557   PetscFunctionReturn(0);
4558 }
4559 
4560 /*@C
4561    MatFactorGetCanUseOrdering - Indicates if the factorization can use the ordering provided in MatLUFactorSymbolic(), MatCholeskyFactorSymbolic()
4562 
4563    Logically Collective on Mat
4564 
4565    Input Parameters:
4566 .  mat - the matrix
4567 
4568    Output Parameters:
4569 .  flg - PETSC_TRUE if uses the ordering
4570 
4571    Notes:
4572       Most internal PETSc factorizations use the ordering passed to the factorization routine but external
4573       packages do not, thus we want to skip generating the ordering when it is not needed or used.
4574 
4575    Level: developer
4576 
4577 .seealso: `MatCopy()`, `MatDuplicate()`, `MatGetFactorAvailable()`, `MatGetFactor()`, `MatLUFactorSymbolic()`, `MatCholeskyFactorSymbolic()`
4578 @*/
4579 PetscErrorCode MatFactorGetCanUseOrdering(Mat mat, PetscBool *flg)
4580 {
4581   PetscFunctionBegin;
4582   *flg = mat->canuseordering;
4583   PetscFunctionReturn(0);
4584 }
4585 
4586 /*@C
4587    MatFactorGetPreferredOrdering - The preferred ordering for a particular matrix factor object
4588 
4589    Logically Collective on Mat
4590 
4591    Input Parameters:
4592 .  mat - the matrix
4593 
4594    Output Parameters:
4595 .  otype - the preferred type
4596 
4597    Level: developer
4598 
4599 .seealso: `MatCopy()`, `MatDuplicate()`, `MatGetFactorAvailable()`, `MatGetFactor()`, `MatLUFactorSymbolic()`, `MatCholeskyFactorSymbolic()`
4600 @*/
4601 PetscErrorCode MatFactorGetPreferredOrdering(Mat mat, MatFactorType ftype, MatOrderingType *otype)
4602 {
4603   PetscFunctionBegin;
4604   *otype = mat->preferredordering[ftype];
4605   PetscCheck(*otype,PETSC_COMM_SELF,PETSC_ERR_PLIB,"MatFactor did not have a preferred ordering");
4606   PetscFunctionReturn(0);
4607 }
4608 
4609 /*@C
4610    MatGetFactor - Returns a matrix suitable to calls to MatXXFactorSymbolic()
4611 
4612    Collective on Mat
4613 
4614    Input Parameters:
4615 +  mat - the matrix
4616 .  type - name of solver type, for example, superlu, petsc (to use PETSc's default)
4617 -  ftype - factor type, MAT_FACTOR_LU, MAT_FACTOR_CHOLESKY, MAT_FACTOR_ICC, MAT_FACTOR_ILU,
4618 
4619    Output Parameters:
4620 .  f - the factor matrix used with MatXXFactorSymbolic() calls
4621 
4622    Notes:
4623       Some PETSc matrix formats have alternative solvers available that are contained in alternative packages
4624      such as pastix, superlu, mumps etc.
4625 
4626       PETSc must have been ./configure to use the external solver, using the option --download-package
4627 
4628    Developer Notes:
4629       This should actually be called MatCreateFactor() since it creates a new factor object
4630 
4631    Level: intermediate
4632 
4633 .seealso: `MatCopy()`, `MatDuplicate()`, `MatGetFactorAvailable()`, `MatFactorGetCanUseOrdering()`, `MatSolverTypeRegister()`
4634 @*/
4635 PetscErrorCode MatGetFactor(Mat mat, MatSolverType type,MatFactorType ftype,Mat *f)
4636 {
4637   PetscBool      foundtype,foundmtype;
4638   PetscErrorCode (*conv)(Mat,MatFactorType,Mat*);
4639 
4640   PetscFunctionBegin;
4641   PetscValidHeaderSpecific(mat,MAT_CLASSID,1);
4642   PetscValidType(mat,1);
4643 
4644   PetscCheck(!mat->factortype,PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Not for factored matrix");
4645   MatCheckPreallocated(mat,1);
4646 
4647   PetscCall(MatSolverTypeGet(type,((PetscObject)mat)->type_name,ftype,&foundtype,&foundmtype,&conv));
4648   if (!foundtype) {
4649     if (type) {
4650       SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_MISSING_FACTOR,"Could not locate solver type %s for factorization type %s and matrix type %s. Perhaps you must ./configure with --download-%s",type,MatFactorTypes[ftype],((PetscObject)mat)->type_name,type);
4651     } else {
4652       SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_MISSING_FACTOR,"Could not locate a solver type for factorization type %s and matrix type %s.",MatFactorTypes[ftype],((PetscObject)mat)->type_name);
4653     }
4654   }
4655   PetscCheck(foundmtype,PetscObjectComm((PetscObject)mat),PETSC_ERR_MISSING_FACTOR,"MatSolverType %s does not support matrix type %s",type,((PetscObject)mat)->type_name);
4656   PetscCheck(conv,PetscObjectComm((PetscObject)mat),PETSC_ERR_MISSING_FACTOR,"MatSolverType %s does not support factorization type %s for matrix type %s",type,MatFactorTypes[ftype],((PetscObject)mat)->type_name);
4657 
4658   PetscCall((*conv)(mat,ftype,f));
4659   PetscFunctionReturn(0);
4660 }
4661 
4662 /*@C
4663    MatGetFactorAvailable - Returns a a flag if matrix supports particular type and factor type
4664 
4665    Not Collective
4666 
4667    Input Parameters:
4668 +  mat - the matrix
4669 .  type - name of solver type, for example, superlu, petsc (to use PETSc's default)
4670 -  ftype - factor type, MAT_FACTOR_LU, MAT_FACTOR_CHOLESKY, MAT_FACTOR_ICC, MAT_FACTOR_ILU,
4671 
4672    Output Parameter:
4673 .    flg - PETSC_TRUE if the factorization is available
4674 
4675    Notes:
4676       Some PETSc matrix formats have alternative solvers available that are contained in alternative packages
4677      such as pastix, superlu, mumps etc.
4678 
4679       PETSc must have been ./configure to use the external solver, using the option --download-package
4680 
4681    Developer Notes:
4682       This should actually be called MatCreateFactorAvailable() since MatGetFactor() creates a new factor object
4683 
4684    Level: intermediate
4685 
4686 .seealso: `MatCopy()`, `MatDuplicate()`, `MatGetFactor()`, `MatSolverTypeRegister()`
4687 @*/
4688 PetscErrorCode MatGetFactorAvailable(Mat mat, MatSolverType type,MatFactorType ftype,PetscBool  *flg)
4689 {
4690   PetscErrorCode (*gconv)(Mat,MatFactorType,Mat*);
4691 
4692   PetscFunctionBegin;
4693   PetscValidHeaderSpecific(mat,MAT_CLASSID,1);
4694   PetscValidType(mat,1);
4695   PetscValidBoolPointer(flg,4);
4696 
4697   PetscCheck(!mat->factortype,PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Not for factored matrix");
4698   MatCheckPreallocated(mat,1);
4699 
4700   PetscCall(MatSolverTypeGet(type,((PetscObject)mat)->type_name,ftype,NULL,NULL,&gconv));
4701   *flg = gconv ? PETSC_TRUE : PETSC_FALSE;
4702   PetscFunctionReturn(0);
4703 }
4704 
4705 /*@
4706    MatDuplicate - Duplicates a matrix including the non-zero structure.
4707 
4708    Collective on Mat
4709 
4710    Input Parameters:
4711 +  mat - the matrix
4712 -  op - One of MAT_DO_NOT_COPY_VALUES, MAT_COPY_VALUES, or MAT_SHARE_NONZERO_PATTERN.
4713         See the manual page for MatDuplicateOption for an explanation of these options.
4714 
4715    Output Parameter:
4716 .  M - pointer to place new matrix
4717 
4718    Level: intermediate
4719 
4720    Notes:
4721     You cannot change the nonzero pattern for the parent or child matrix if you use MAT_SHARE_NONZERO_PATTERN.
4722     May be called with an unassembled input Mat if MAT_DO_NOT_COPY_VALUES is used, in which case the output Mat is unassembled as well.
4723     When original mat is a product of matrix operation, e.g., an output of MatMatMult() or MatCreateSubMatrix(), only the simple matrix data structure of mat is duplicated and the internal data structures created for the reuse of previous matrix operations are not duplicated. User should not use MatDuplicate() to create new matrix M if M is intended to be reused as the product of matrix operation.
4724 
4725 .seealso: `MatCopy()`, `MatConvert()`, `MatDuplicateOption`
4726 @*/
4727 PetscErrorCode MatDuplicate(Mat mat,MatDuplicateOption op,Mat *M)
4728 {
4729   Mat            B;
4730   VecType        vtype;
4731   PetscInt       i;
4732   PetscObject    dm;
4733   void           (*viewf)(void);
4734 
4735   PetscFunctionBegin;
4736   PetscValidHeaderSpecific(mat,MAT_CLASSID,1);
4737   PetscValidType(mat,1);
4738   PetscValidPointer(M,3);
4739   PetscCheck(op != MAT_COPY_VALUES || mat->assembled,PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"MAT_COPY_VALUES not allowed for unassembled matrix");
4740   PetscCheck(!mat->factortype,PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Not for factored matrix");
4741   MatCheckPreallocated(mat,1);
4742 
4743   *M = NULL;
4744   PetscCheck(mat->ops->duplicate,PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Not written for matrix type %s",((PetscObject)mat)->type_name);
4745   PetscCall(PetscLogEventBegin(MAT_Convert,mat,0,0,0));
4746   PetscCall((*mat->ops->duplicate)(mat,op,M));
4747   PetscCall(PetscLogEventEnd(MAT_Convert,mat,0,0,0));
4748   B    = *M;
4749 
4750   PetscCall(MatGetOperation(mat,MATOP_VIEW,&viewf));
4751   if (viewf) {
4752     PetscCall(MatSetOperation(B,MATOP_VIEW,viewf));
4753   }
4754   PetscCall(MatGetVecType(mat,&vtype));
4755   PetscCall(MatSetVecType(B,vtype));
4756 
4757   B->stencil.dim = mat->stencil.dim;
4758   B->stencil.noc = mat->stencil.noc;
4759   for (i=0; i<=mat->stencil.dim; i++) {
4760     B->stencil.dims[i]   = mat->stencil.dims[i];
4761     B->stencil.starts[i] = mat->stencil.starts[i];
4762   }
4763 
4764   B->nooffproczerorows = mat->nooffproczerorows;
4765   B->nooffprocentries  = mat->nooffprocentries;
4766 
4767   PetscCall(PetscObjectQuery((PetscObject) mat, "__PETSc_dm", &dm));
4768   if (dm) {
4769     PetscCall(PetscObjectCompose((PetscObject) B, "__PETSc_dm", dm));
4770   }
4771   PetscCall(PetscObjectStateIncrease((PetscObject)B));
4772   PetscFunctionReturn(0);
4773 }
4774 
4775 /*@
4776    MatGetDiagonal - Gets the diagonal of a matrix.
4777 
4778    Logically Collective on Mat
4779 
4780    Input Parameters:
4781 +  mat - the matrix
4782 -  v - the vector for storing the diagonal
4783 
4784    Output Parameter:
4785 .  v - the diagonal of the matrix
4786 
4787    Level: intermediate
4788 
4789    Note:
4790    Currently only correct in parallel for square matrices.
4791 
4792 .seealso: `MatGetRow()`, `MatCreateSubMatrices()`, `MatCreateSubMatrix()`, `MatGetRowMaxAbs()`
4793 @*/
4794 PetscErrorCode MatGetDiagonal(Mat mat,Vec v)
4795 {
4796   PetscFunctionBegin;
4797   PetscValidHeaderSpecific(mat,MAT_CLASSID,1);
4798   PetscValidType(mat,1);
4799   PetscValidHeaderSpecific(v,VEC_CLASSID,2);
4800   PetscCheck(mat->assembled,PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Not for unassembled matrix");
4801   PetscCheck(mat->ops->getdiagonal,PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Mat type %s",((PetscObject)mat)->type_name);
4802   MatCheckPreallocated(mat,1);
4803 
4804   PetscCall((*mat->ops->getdiagonal)(mat,v));
4805   PetscCall(PetscObjectStateIncrease((PetscObject)v));
4806   PetscFunctionReturn(0);
4807 }
4808 
4809 /*@C
4810    MatGetRowMin - Gets the minimum value (of the real part) of each
4811         row of the matrix
4812 
4813    Logically Collective on Mat
4814 
4815    Input Parameter:
4816 .  mat - the matrix
4817 
4818    Output Parameters:
4819 +  v - the vector for storing the maximums
4820 -  idx - the indices of the column found for each row (optional)
4821 
4822    Level: intermediate
4823 
4824    Notes:
4825     The result of this call are the same as if one converted the matrix to dense format
4826       and found the minimum value in each row (i.e. the implicit zeros are counted as zeros).
4827 
4828     This code is only implemented for a couple of matrix formats.
4829 
4830 .seealso: `MatGetDiagonal()`, `MatCreateSubMatrices()`, `MatCreateSubMatrix()`, `MatGetRowMaxAbs()`,
4831           `MatGetRowMax()`
4832 @*/
4833 PetscErrorCode MatGetRowMin(Mat mat,Vec v,PetscInt idx[])
4834 {
4835   PetscFunctionBegin;
4836   PetscValidHeaderSpecific(mat,MAT_CLASSID,1);
4837   PetscValidType(mat,1);
4838   PetscValidHeaderSpecific(v,VEC_CLASSID,2);
4839   PetscCheck(mat->assembled,PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Not for unassembled matrix");
4840 
4841   if (!mat->cmap->N) {
4842     PetscCall(VecSet(v,PETSC_MAX_REAL));
4843     if (idx) {
4844       PetscInt i,m = mat->rmap->n;
4845       for (i=0; i<m; i++) idx[i] = -1;
4846     }
4847   } else {
4848     PetscCheck(mat->ops->getrowmin,PETSC_COMM_SELF,PETSC_ERR_SUP,"Mat type %s",((PetscObject)mat)->type_name);
4849     MatCheckPreallocated(mat,1);
4850   }
4851   PetscCall((*mat->ops->getrowmin)(mat,v,idx));
4852   PetscCall(PetscObjectStateIncrease((PetscObject)v));
4853   PetscFunctionReturn(0);
4854 }
4855 
4856 /*@C
4857    MatGetRowMinAbs - Gets the minimum value (in absolute value) of each
4858         row of the matrix
4859 
4860    Logically Collective on Mat
4861 
4862    Input Parameter:
4863 .  mat - the matrix
4864 
4865    Output Parameters:
4866 +  v - the vector for storing the minimums
4867 -  idx - the indices of the column found for each row (or NULL if not needed)
4868 
4869    Level: intermediate
4870 
4871    Notes:
4872     if a row is completely empty or has only 0.0 values then the idx[] value for that
4873     row is 0 (the first column).
4874 
4875     This code is only implemented for a couple of matrix formats.
4876 
4877 .seealso: `MatGetDiagonal()`, `MatCreateSubMatrices()`, `MatCreateSubMatrix()`, `MatGetRowMax()`, `MatGetRowMaxAbs()`, `MatGetRowMin()`
4878 @*/
4879 PetscErrorCode MatGetRowMinAbs(Mat mat,Vec v,PetscInt idx[])
4880 {
4881   PetscFunctionBegin;
4882   PetscValidHeaderSpecific(mat,MAT_CLASSID,1);
4883   PetscValidType(mat,1);
4884   PetscValidHeaderSpecific(v,VEC_CLASSID,2);
4885   PetscCheck(mat->assembled,PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Not for unassembled matrix");
4886   PetscCheck(!mat->factortype,PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Not for factored matrix");
4887 
4888   if (!mat->cmap->N) {
4889     PetscCall(VecSet(v,0.0));
4890     if (idx) {
4891       PetscInt i,m = mat->rmap->n;
4892       for (i=0; i<m; i++) idx[i] = -1;
4893     }
4894   } else {
4895     PetscCheck(mat->ops->getrowminabs,PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Mat type %s",((PetscObject)mat)->type_name);
4896     MatCheckPreallocated(mat,1);
4897     if (idx) PetscCall(PetscArrayzero(idx,mat->rmap->n));
4898     PetscCall((*mat->ops->getrowminabs)(mat,v,idx));
4899   }
4900   PetscCall(PetscObjectStateIncrease((PetscObject)v));
4901   PetscFunctionReturn(0);
4902 }
4903 
4904 /*@C
4905    MatGetRowMax - Gets the maximum value (of the real part) of each
4906         row of the matrix
4907 
4908    Logically Collective on Mat
4909 
4910    Input Parameter:
4911 .  mat - the matrix
4912 
4913    Output Parameters:
4914 +  v - the vector for storing the maximums
4915 -  idx - the indices of the column found for each row (optional)
4916 
4917    Level: intermediate
4918 
4919    Notes:
4920     The result of this call are the same as if one converted the matrix to dense format
4921       and found the minimum value in each row (i.e. the implicit zeros are counted as zeros).
4922 
4923     This code is only implemented for a couple of matrix formats.
4924 
4925 .seealso: `MatGetDiagonal()`, `MatCreateSubMatrices()`, `MatCreateSubMatrix()`, `MatGetRowMaxAbs()`, `MatGetRowMin()`
4926 @*/
4927 PetscErrorCode MatGetRowMax(Mat mat,Vec v,PetscInt idx[])
4928 {
4929   PetscFunctionBegin;
4930   PetscValidHeaderSpecific(mat,MAT_CLASSID,1);
4931   PetscValidType(mat,1);
4932   PetscValidHeaderSpecific(v,VEC_CLASSID,2);
4933   PetscCheck(mat->assembled,PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Not for unassembled matrix");
4934 
4935   if (!mat->cmap->N) {
4936     PetscCall(VecSet(v,PETSC_MIN_REAL));
4937     if (idx) {
4938       PetscInt i,m = mat->rmap->n;
4939       for (i=0; i<m; i++) idx[i] = -1;
4940     }
4941   } else {
4942     PetscCheck(mat->ops->getrowmax,PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Mat type %s",((PetscObject)mat)->type_name);
4943     MatCheckPreallocated(mat,1);
4944     PetscCall((*mat->ops->getrowmax)(mat,v,idx));
4945   }
4946   PetscCall(PetscObjectStateIncrease((PetscObject)v));
4947   PetscFunctionReturn(0);
4948 }
4949 
4950 /*@C
4951    MatGetRowMaxAbs - Gets the maximum value (in absolute value) of each
4952         row of the matrix
4953 
4954    Logically Collective on Mat
4955 
4956    Input Parameter:
4957 .  mat - the matrix
4958 
4959    Output Parameters:
4960 +  v - the vector for storing the maximums
4961 -  idx - the indices of the column found for each row (or NULL if not needed)
4962 
4963    Level: intermediate
4964 
4965    Notes:
4966     if a row is completely empty or has only 0.0 values then the idx[] value for that
4967     row is 0 (the first column).
4968 
4969     This code is only implemented for a couple of matrix formats.
4970 
4971 .seealso: `MatGetDiagonal()`, `MatCreateSubMatrices()`, `MatCreateSubMatrix()`, `MatGetRowMax()`, `MatGetRowMin()`
4972 @*/
4973 PetscErrorCode MatGetRowMaxAbs(Mat mat,Vec v,PetscInt idx[])
4974 {
4975   PetscFunctionBegin;
4976   PetscValidHeaderSpecific(mat,MAT_CLASSID,1);
4977   PetscValidType(mat,1);
4978   PetscValidHeaderSpecific(v,VEC_CLASSID,2);
4979   PetscCheck(mat->assembled,PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Not for unassembled matrix");
4980 
4981   if (!mat->cmap->N) {
4982     PetscCall(VecSet(v,0.0));
4983     if (idx) {
4984       PetscInt i,m = mat->rmap->n;
4985       for (i=0; i<m; i++) idx[i] = -1;
4986     }
4987   } else {
4988     PetscCheck(mat->ops->getrowmaxabs,PETSC_COMM_SELF,PETSC_ERR_SUP,"Mat type %s",((PetscObject)mat)->type_name);
4989     MatCheckPreallocated(mat,1);
4990     if (idx) PetscCall(PetscArrayzero(idx,mat->rmap->n));
4991     PetscCall((*mat->ops->getrowmaxabs)(mat,v,idx));
4992   }
4993   PetscCall(PetscObjectStateIncrease((PetscObject)v));
4994   PetscFunctionReturn(0);
4995 }
4996 
4997 /*@
4998    MatGetRowSum - Gets the sum of each row of the matrix
4999 
5000    Logically or Neighborhood Collective on Mat
5001 
5002    Input Parameters:
5003 .  mat - the matrix
5004 
5005    Output Parameter:
5006 .  v - the vector for storing the sum of rows
5007 
5008    Level: intermediate
5009 
5010    Notes:
5011     This code is slow since it is not currently specialized for different formats
5012 
5013 .seealso: `MatGetDiagonal()`, `MatCreateSubMatrices()`, `MatCreateSubMatrix()`, `MatGetRowMax()`, `MatGetRowMin()`
5014 @*/
5015 PetscErrorCode MatGetRowSum(Mat mat, Vec v)
5016 {
5017   Vec            ones;
5018 
5019   PetscFunctionBegin;
5020   PetscValidHeaderSpecific(mat,MAT_CLASSID,1);
5021   PetscValidType(mat,1);
5022   PetscValidHeaderSpecific(v,VEC_CLASSID,2);
5023   PetscCheck(mat->assembled,PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Not for unassembled matrix");
5024   MatCheckPreallocated(mat,1);
5025   PetscCall(MatCreateVecs(mat,&ones,NULL));
5026   PetscCall(VecSet(ones,1.));
5027   PetscCall(MatMult(mat,ones,v));
5028   PetscCall(VecDestroy(&ones));
5029   PetscFunctionReturn(0);
5030 }
5031 
5032 /*@
5033    MatTranspose - Computes an in-place or out-of-place transpose of a matrix.
5034 
5035    Collective on Mat
5036 
5037    Input Parameters:
5038 +  mat - the matrix to transpose
5039 -  reuse - either MAT_INITIAL_MATRIX, MAT_REUSE_MATRIX, or MAT_INPLACE_MATRIX
5040 
5041    Output Parameter:
5042 .  B - the transpose
5043 
5044    Notes:
5045      If you use MAT_INPLACE_MATRIX then you must pass in &mat for B
5046 
5047      MAT_REUSE_MATRIX causes the B matrix from a previous call to this function with MAT_INITIAL_MATRIX to be used
5048 
5049      Consider using MatCreateTranspose() instead if you only need a matrix that behaves like the transpose, but don't need the storage to be changed.
5050 
5051    Level: intermediate
5052 
5053 .seealso: `MatMultTranspose()`, `MatMultTransposeAdd()`, `MatIsTranspose()`, `MatReuse`
5054 @*/
5055 PetscErrorCode MatTranspose(Mat mat,MatReuse reuse,Mat *B)
5056 {
5057   PetscFunctionBegin;
5058   PetscValidHeaderSpecific(mat,MAT_CLASSID,1);
5059   PetscValidType(mat,1);
5060   PetscCheck(mat->assembled,PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Not for unassembled matrix");
5061   PetscCheck(!mat->factortype,PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Not for factored matrix");
5062   PetscCheck(mat->ops->transpose,PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Mat type %s",((PetscObject)mat)->type_name);
5063   PetscCheck(reuse != MAT_INPLACE_MATRIX || mat == *B,PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"MAT_INPLACE_MATRIX requires last matrix to match first");
5064   PetscCheck(reuse != MAT_REUSE_MATRIX || mat != *B,PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Perhaps you mean MAT_INPLACE_MATRIX");
5065   MatCheckPreallocated(mat,1);
5066 
5067   PetscCall(PetscLogEventBegin(MAT_Transpose,mat,0,0,0));
5068   PetscCall((*mat->ops->transpose)(mat,reuse,B));
5069   PetscCall(PetscLogEventEnd(MAT_Transpose,mat,0,0,0));
5070   if (B) PetscCall(PetscObjectStateIncrease((PetscObject)*B));
5071   PetscFunctionReturn(0);
5072 }
5073 
5074 /*@
5075    MatIsTranspose - Test whether a matrix is another one's transpose,
5076         or its own, in which case it tests symmetry.
5077 
5078    Collective on Mat
5079 
5080    Input Parameters:
5081 +  A - the matrix to test
5082 -  B - the matrix to test against, this can equal the first parameter
5083 
5084    Output Parameters:
5085 .  flg - the result
5086 
5087    Notes:
5088    Only available for SeqAIJ/MPIAIJ matrices. The sequential algorithm
5089    has a running time of the order of the number of nonzeros; the parallel
5090    test involves parallel copies of the block-offdiagonal parts of the matrix.
5091 
5092    Level: intermediate
5093 
5094 .seealso: `MatTranspose()`, `MatIsSymmetric()`, `MatIsHermitian()`
5095 @*/
5096 PetscErrorCode MatIsTranspose(Mat A,Mat B,PetscReal tol,PetscBool *flg)
5097 {
5098   PetscErrorCode (*f)(Mat,Mat,PetscReal,PetscBool*),(*g)(Mat,Mat,PetscReal,PetscBool*);
5099 
5100   PetscFunctionBegin;
5101   PetscValidHeaderSpecific(A,MAT_CLASSID,1);
5102   PetscValidHeaderSpecific(B,MAT_CLASSID,2);
5103   PetscValidBoolPointer(flg,4);
5104   PetscCall(PetscObjectQueryFunction((PetscObject)A,"MatIsTranspose_C",&f));
5105   PetscCall(PetscObjectQueryFunction((PetscObject)B,"MatIsTranspose_C",&g));
5106   *flg = PETSC_FALSE;
5107   if (f && g) {
5108     PetscCheck(f == g,PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_NOTSAMETYPE,"Matrices do not have the same comparator for symmetry test");
5109     PetscCall((*f)(A,B,tol,flg));
5110   } else {
5111     MatType mattype;
5112 
5113     PetscCall(MatGetType(f ? B : A,&mattype));
5114     SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Matrix of type %s does not support checking for transpose",mattype);
5115   }
5116   PetscFunctionReturn(0);
5117 }
5118 
5119 /*@
5120    MatHermitianTranspose - Computes an in-place or out-of-place transpose of a matrix in complex conjugate.
5121 
5122    Collective on Mat
5123 
5124    Input Parameters:
5125 +  mat - the matrix to transpose and complex conjugate
5126 -  reuse - either MAT_INITIAL_MATRIX, MAT_REUSE_MATRIX, or MAT_INPLACE_MATRIX
5127 
5128    Output Parameter:
5129 .  B - the Hermitian
5130 
5131    Level: intermediate
5132 
5133 .seealso: `MatTranspose()`, `MatMultTranspose()`, `MatMultTransposeAdd()`, `MatIsTranspose()`, `MatReuse`
5134 @*/
5135 PetscErrorCode MatHermitianTranspose(Mat mat,MatReuse reuse,Mat *B)
5136 {
5137   PetscFunctionBegin;
5138   PetscCall(MatTranspose(mat,reuse,B));
5139 #if defined(PETSC_USE_COMPLEX)
5140   PetscCall(MatConjugate(*B));
5141 #endif
5142   PetscFunctionReturn(0);
5143 }
5144 
5145 /*@
5146    MatIsHermitianTranspose - Test whether a matrix is another one's Hermitian transpose,
5147 
5148    Collective on Mat
5149 
5150    Input Parameters:
5151 +  A - the matrix to test
5152 -  B - the matrix to test against, this can equal the first parameter
5153 
5154    Output Parameters:
5155 .  flg - the result
5156 
5157    Notes:
5158    Only available for SeqAIJ/MPIAIJ matrices. The sequential algorithm
5159    has a running time of the order of the number of nonzeros; the parallel
5160    test involves parallel copies of the block-offdiagonal parts of the matrix.
5161 
5162    Level: intermediate
5163 
5164 .seealso: `MatTranspose()`, `MatIsSymmetric()`, `MatIsHermitian()`, `MatIsTranspose()`
5165 @*/
5166 PetscErrorCode MatIsHermitianTranspose(Mat A,Mat B,PetscReal tol,PetscBool *flg)
5167 {
5168   PetscErrorCode (*f)(Mat,Mat,PetscReal,PetscBool*),(*g)(Mat,Mat,PetscReal,PetscBool*);
5169 
5170   PetscFunctionBegin;
5171   PetscValidHeaderSpecific(A,MAT_CLASSID,1);
5172   PetscValidHeaderSpecific(B,MAT_CLASSID,2);
5173   PetscValidBoolPointer(flg,4);
5174   PetscCall(PetscObjectQueryFunction((PetscObject)A,"MatIsHermitianTranspose_C",&f));
5175   PetscCall(PetscObjectQueryFunction((PetscObject)B,"MatIsHermitianTranspose_C",&g));
5176   if (f && g) {
5177     PetscCheck(f != g,PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_NOTSAMETYPE,"Matrices do not have the same comparator for Hermitian test");
5178     PetscCall((*f)(A,B,tol,flg));
5179   }
5180   PetscFunctionReturn(0);
5181 }
5182 
5183 /*@
5184    MatPermute - Creates a new matrix with rows and columns permuted from the
5185    original.
5186 
5187    Collective on Mat
5188 
5189    Input Parameters:
5190 +  mat - the matrix to permute
5191 .  row - row permutation, each processor supplies only the permutation for its rows
5192 -  col - column permutation, each processor supplies only the permutation for its columns
5193 
5194    Output Parameters:
5195 .  B - the permuted matrix
5196 
5197    Level: advanced
5198 
5199    Note:
5200    The index sets map from row/col of permuted matrix to row/col of original matrix.
5201    The index sets should be on the same communicator as Mat and have the same local sizes.
5202 
5203    Developer Note:
5204      If you want to implement MatPermute for a matrix type, and your approach doesn't
5205      exploit the fact that row and col are permutations, consider implementing the
5206      more general MatCreateSubMatrix() instead.
5207 
5208 .seealso: `MatGetOrdering()`, `ISAllGather()`
5209 
5210 @*/
5211 PetscErrorCode MatPermute(Mat mat,IS row,IS col,Mat *B)
5212 {
5213   PetscFunctionBegin;
5214   PetscValidHeaderSpecific(mat,MAT_CLASSID,1);
5215   PetscValidType(mat,1);
5216   PetscValidHeaderSpecific(row,IS_CLASSID,2);
5217   PetscValidHeaderSpecific(col,IS_CLASSID,3);
5218   PetscValidPointer(B,4);
5219   PetscCheckSameComm(mat,1,row,2);
5220   if (row != col) PetscCheckSameComm(row,2,col,3);
5221   PetscCheck(mat->assembled,PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Not for unassembled matrix");
5222   PetscCheck(!mat->factortype,PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Not for factored matrix");
5223   PetscCheck(mat->ops->permute || mat->ops->createsubmatrix,PETSC_COMM_SELF,PETSC_ERR_SUP,"MatPermute not available for Mat type %s",((PetscObject)mat)->type_name);
5224   MatCheckPreallocated(mat,1);
5225 
5226   if (mat->ops->permute) {
5227     PetscCall((*mat->ops->permute)(mat,row,col,B));
5228     PetscCall(PetscObjectStateIncrease((PetscObject)*B));
5229   } else {
5230     PetscCall(MatCreateSubMatrix(mat, row, col, MAT_INITIAL_MATRIX, B));
5231   }
5232   PetscFunctionReturn(0);
5233 }
5234 
5235 /*@
5236    MatEqual - Compares two matrices.
5237 
5238    Collective on Mat
5239 
5240    Input Parameters:
5241 +  A - the first matrix
5242 -  B - the second matrix
5243 
5244    Output Parameter:
5245 .  flg - PETSC_TRUE if the matrices are equal; PETSC_FALSE otherwise.
5246 
5247    Level: intermediate
5248 
5249 @*/
5250 PetscErrorCode MatEqual(Mat A,Mat B,PetscBool *flg)
5251 {
5252   PetscFunctionBegin;
5253   PetscValidHeaderSpecific(A,MAT_CLASSID,1);
5254   PetscValidHeaderSpecific(B,MAT_CLASSID,2);
5255   PetscValidType(A,1);
5256   PetscValidType(B,2);
5257   PetscValidBoolPointer(flg,3);
5258   PetscCheckSameComm(A,1,B,2);
5259   MatCheckPreallocated(A,1);
5260   MatCheckPreallocated(B,2);
5261   PetscCheck(A->assembled,PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_WRONGSTATE,"Not for unassembled matrix");
5262   PetscCheck(B->assembled,PetscObjectComm((PetscObject)B),PETSC_ERR_ARG_WRONGSTATE,"Not for unassembled matrix");
5263   PetscCheck(A->rmap->N == B->rmap->N && A->cmap->N == B->cmap->N,PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_SIZ,"Mat A,Mat B: global dim %" PetscInt_FMT " %" PetscInt_FMT " %" PetscInt_FMT " %" PetscInt_FMT,A->rmap->N,B->rmap->N,A->cmap->N,B->cmap->N);
5264   if (A->ops->equal && A->ops->equal == B->ops->equal) {
5265     PetscCall((*A->ops->equal)(A,B,flg));
5266   } else {
5267     PetscCall(MatMultEqual(A,B,10,flg));
5268   }
5269   PetscFunctionReturn(0);
5270 }
5271 
5272 /*@
5273    MatDiagonalScale - Scales a matrix on the left and right by diagonal
5274    matrices that are stored as vectors.  Either of the two scaling
5275    matrices can be NULL.
5276 
5277    Collective on Mat
5278 
5279    Input Parameters:
5280 +  mat - the matrix to be scaled
5281 .  l - the left scaling vector (or NULL)
5282 -  r - the right scaling vector (or NULL)
5283 
5284    Notes:
5285    MatDiagonalScale() computes A = LAR, where
5286    L = a diagonal matrix (stored as a vector), R = a diagonal matrix (stored as a vector)
5287    The L scales the rows of the matrix, the R scales the columns of the matrix.
5288 
5289    Level: intermediate
5290 
5291 .seealso: `MatScale()`, `MatShift()`, `MatDiagonalSet()`
5292 @*/
5293 PetscErrorCode MatDiagonalScale(Mat mat,Vec l,Vec r)
5294 {
5295   PetscFunctionBegin;
5296   PetscValidHeaderSpecific(mat,MAT_CLASSID,1);
5297   PetscValidType(mat,1);
5298   if (l) {PetscValidHeaderSpecific(l,VEC_CLASSID,2);PetscCheckSameComm(mat,1,l,2);}
5299   if (r) {PetscValidHeaderSpecific(r,VEC_CLASSID,3);PetscCheckSameComm(mat,1,r,3);}
5300   PetscCheck(mat->assembled,PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Not for unassembled matrix");
5301   PetscCheck(!mat->factortype,PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Not for factored matrix");
5302   MatCheckPreallocated(mat,1);
5303   if (!l && !r) PetscFunctionReturn(0);
5304 
5305   PetscCheck(mat->ops->diagonalscale,PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Mat type %s",((PetscObject)mat)->type_name);
5306   PetscCall(PetscLogEventBegin(MAT_Scale,mat,0,0,0));
5307   PetscCall((*mat->ops->diagonalscale)(mat,l,r));
5308   PetscCall(PetscLogEventEnd(MAT_Scale,mat,0,0,0));
5309   PetscCall(PetscObjectStateIncrease((PetscObject)mat));
5310   if (l != r && mat->symmetric) mat->symmetric = PETSC_FALSE;
5311   PetscFunctionReturn(0);
5312 }
5313 
5314 /*@
5315     MatScale - Scales all elements of a matrix by a given number.
5316 
5317     Logically Collective on Mat
5318 
5319     Input Parameters:
5320 +   mat - the matrix to be scaled
5321 -   a  - the scaling value
5322 
5323     Output Parameter:
5324 .   mat - the scaled matrix
5325 
5326     Level: intermediate
5327 
5328 .seealso: `MatDiagonalScale()`
5329 @*/
5330 PetscErrorCode MatScale(Mat mat,PetscScalar a)
5331 {
5332   PetscFunctionBegin;
5333   PetscValidHeaderSpecific(mat,MAT_CLASSID,1);
5334   PetscValidType(mat,1);
5335   PetscCheck(a == (PetscScalar)1.0 || mat->ops->scale,PETSC_COMM_SELF,PETSC_ERR_SUP,"Mat type %s",((PetscObject)mat)->type_name);
5336   PetscCheck(mat->assembled,PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Not for unassembled matrix");
5337   PetscCheck(!mat->factortype,PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Not for factored matrix");
5338   PetscValidLogicalCollectiveScalar(mat,a,2);
5339   MatCheckPreallocated(mat,1);
5340 
5341   PetscCall(PetscLogEventBegin(MAT_Scale,mat,0,0,0));
5342   if (a != (PetscScalar)1.0) {
5343     PetscCall((*mat->ops->scale)(mat,a));
5344     PetscCall(PetscObjectStateIncrease((PetscObject)mat));
5345   }
5346   PetscCall(PetscLogEventEnd(MAT_Scale,mat,0,0,0));
5347   PetscFunctionReturn(0);
5348 }
5349 
5350 /*@
5351    MatNorm - Calculates various norms of a matrix.
5352 
5353    Collective on Mat
5354 
5355    Input Parameters:
5356 +  mat - the matrix
5357 -  type - the type of norm, NORM_1, NORM_FROBENIUS, NORM_INFINITY
5358 
5359    Output Parameter:
5360 .  nrm - the resulting norm
5361 
5362    Level: intermediate
5363 
5364 @*/
5365 PetscErrorCode MatNorm(Mat mat,NormType type,PetscReal *nrm)
5366 {
5367   PetscFunctionBegin;
5368   PetscValidHeaderSpecific(mat,MAT_CLASSID,1);
5369   PetscValidType(mat,1);
5370   PetscValidRealPointer(nrm,3);
5371 
5372   PetscCheck(mat->assembled,PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Not for unassembled matrix");
5373   PetscCheck(!mat->factortype,PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Not for factored matrix");
5374   PetscCheck(mat->ops->norm,PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Mat type %s",((PetscObject)mat)->type_name);
5375   MatCheckPreallocated(mat,1);
5376 
5377   PetscCall((*mat->ops->norm)(mat,type,nrm));
5378   PetscFunctionReturn(0);
5379 }
5380 
5381 /*
5382      This variable is used to prevent counting of MatAssemblyBegin() that
5383    are called from within a MatAssemblyEnd().
5384 */
5385 static PetscInt MatAssemblyEnd_InUse = 0;
5386 /*@
5387    MatAssemblyBegin - Begins assembling the matrix.  This routine should
5388    be called after completing all calls to MatSetValues().
5389 
5390    Collective on Mat
5391 
5392    Input Parameters:
5393 +  mat - the matrix
5394 -  type - type of assembly, either MAT_FLUSH_ASSEMBLY or MAT_FINAL_ASSEMBLY
5395 
5396    Notes:
5397    MatSetValues() generally caches the values.  The matrix is ready to
5398    use only after MatAssemblyBegin() and MatAssemblyEnd() have been called.
5399    Use MAT_FLUSH_ASSEMBLY when switching between ADD_VALUES and INSERT_VALUES
5400    in MatSetValues(); use MAT_FINAL_ASSEMBLY for the final assembly before
5401    using the matrix.
5402 
5403    ALL processes that share a matrix MUST call MatAssemblyBegin() and MatAssemblyEnd() the SAME NUMBER of times, and each time with the
5404    same flag of MAT_FLUSH_ASSEMBLY or MAT_FINAL_ASSEMBLY for all processes. Thus you CANNOT locally change from ADD_VALUES to INSERT_VALUES, that is
5405    a global collective operation requring all processes that share the matrix.
5406 
5407    Space for preallocated nonzeros that is not filled by a call to MatSetValues() or a related routine are compressed
5408    out by assembly. If you intend to use that extra space on a subsequent assembly, be sure to insert explicit zeros
5409    before MAT_FINAL_ASSEMBLY so the space is not compressed out.
5410 
5411    Level: beginner
5412 
5413 .seealso: `MatAssemblyEnd()`, `MatSetValues()`, `MatAssembled()`
5414 @*/
5415 PetscErrorCode MatAssemblyBegin(Mat mat,MatAssemblyType type)
5416 {
5417   PetscFunctionBegin;
5418   PetscValidHeaderSpecific(mat,MAT_CLASSID,1);
5419   PetscValidType(mat,1);
5420   MatCheckPreallocated(mat,1);
5421   PetscCheck(!mat->factortype,PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Not for factored matrix.\nDid you forget to call MatSetUnfactored()?");
5422   if (mat->assembled) {
5423     mat->was_assembled = PETSC_TRUE;
5424     mat->assembled     = PETSC_FALSE;
5425   }
5426 
5427   if (!MatAssemblyEnd_InUse) {
5428     PetscCall(PetscLogEventBegin(MAT_AssemblyBegin,mat,0,0,0));
5429     if (mat->ops->assemblybegin) PetscCall((*mat->ops->assemblybegin)(mat,type));
5430     PetscCall(PetscLogEventEnd(MAT_AssemblyBegin,mat,0,0,0));
5431   } else if (mat->ops->assemblybegin) {
5432     PetscCall((*mat->ops->assemblybegin)(mat,type));
5433   }
5434   PetscFunctionReturn(0);
5435 }
5436 
5437 /*@
5438    MatAssembled - Indicates if a matrix has been assembled and is ready for
5439      use; for example, in matrix-vector product.
5440 
5441    Not Collective
5442 
5443    Input Parameter:
5444 .  mat - the matrix
5445 
5446    Output Parameter:
5447 .  assembled - PETSC_TRUE or PETSC_FALSE
5448 
5449    Level: advanced
5450 
5451 .seealso: `MatAssemblyEnd()`, `MatSetValues()`, `MatAssemblyBegin()`
5452 @*/
5453 PetscErrorCode MatAssembled(Mat mat,PetscBool *assembled)
5454 {
5455   PetscFunctionBegin;
5456   PetscValidHeaderSpecific(mat,MAT_CLASSID,1);
5457   PetscValidBoolPointer(assembled,2);
5458   *assembled = mat->assembled;
5459   PetscFunctionReturn(0);
5460 }
5461 
5462 /*@
5463    MatAssemblyEnd - Completes assembling the matrix.  This routine should
5464    be called after MatAssemblyBegin().
5465 
5466    Collective on Mat
5467 
5468    Input Parameters:
5469 +  mat - the matrix
5470 -  type - type of assembly, either MAT_FLUSH_ASSEMBLY or MAT_FINAL_ASSEMBLY
5471 
5472    Options Database Keys:
5473 +  -mat_view ::ascii_info - Prints info on matrix at conclusion of MatEndAssembly()
5474 .  -mat_view ::ascii_info_detail - Prints more detailed info
5475 .  -mat_view - Prints matrix in ASCII format
5476 .  -mat_view ::ascii_matlab - Prints matrix in Matlab format
5477 .  -mat_view draw - PetscDraws nonzero structure of matrix, using MatView() and PetscDrawOpenX().
5478 .  -display <name> - Sets display name (default is host)
5479 .  -draw_pause <sec> - Sets number of seconds to pause after display
5480 .  -mat_view socket - Sends matrix to socket, can be accessed from Matlab (See Users-Manual: ch_matlab)
5481 .  -viewer_socket_machine <machine> - Machine to use for socket
5482 .  -viewer_socket_port <port> - Port number to use for socket
5483 -  -mat_view binary:filename[:append] - Save matrix to file in binary format
5484 
5485    Notes:
5486    MatSetValues() generally caches the values.  The matrix is ready to
5487    use only after MatAssemblyBegin() and MatAssemblyEnd() have been called.
5488    Use MAT_FLUSH_ASSEMBLY when switching between ADD_VALUES and INSERT_VALUES
5489    in MatSetValues(); use MAT_FINAL_ASSEMBLY for the final assembly before
5490    using the matrix.
5491 
5492    Space for preallocated nonzeros that is not filled by a call to MatSetValues() or a related routine are compressed
5493    out by assembly. If you intend to use that extra space on a subsequent assembly, be sure to insert explicit zeros
5494    before MAT_FINAL_ASSEMBLY so the space is not compressed out.
5495 
5496    Level: beginner
5497 
5498 .seealso: `MatAssemblyBegin()`, `MatSetValues()`, `PetscDrawOpenX()`, `PetscDrawCreate()`, `MatView()`, `MatAssembled()`, `PetscViewerSocketOpen()`
5499 @*/
5500 PetscErrorCode MatAssemblyEnd(Mat mat,MatAssemblyType type)
5501 {
5502   static PetscInt inassm = 0;
5503   PetscBool       flg    = PETSC_FALSE;
5504 
5505   PetscFunctionBegin;
5506   PetscValidHeaderSpecific(mat,MAT_CLASSID,1);
5507   PetscValidType(mat,1);
5508 
5509   inassm++;
5510   MatAssemblyEnd_InUse++;
5511   if (MatAssemblyEnd_InUse == 1) { /* Do the logging only the first time through */
5512     PetscCall(PetscLogEventBegin(MAT_AssemblyEnd,mat,0,0,0));
5513     if (mat->ops->assemblyend) {
5514       PetscCall((*mat->ops->assemblyend)(mat,type));
5515     }
5516     PetscCall(PetscLogEventEnd(MAT_AssemblyEnd,mat,0,0,0));
5517   } else if (mat->ops->assemblyend) {
5518     PetscCall((*mat->ops->assemblyend)(mat,type));
5519   }
5520 
5521   /* Flush assembly is not a true assembly */
5522   if (type != MAT_FLUSH_ASSEMBLY) {
5523     mat->num_ass++;
5524     mat->assembled        = PETSC_TRUE;
5525     mat->ass_nonzerostate = mat->nonzerostate;
5526   }
5527 
5528   mat->insertmode = NOT_SET_VALUES;
5529   MatAssemblyEnd_InUse--;
5530   PetscCall(PetscObjectStateIncrease((PetscObject)mat));
5531   if (!mat->symmetric_eternal) {
5532     mat->symmetric_set              = PETSC_FALSE;
5533     mat->hermitian_set              = PETSC_FALSE;
5534     mat->structurally_symmetric_set = PETSC_FALSE;
5535   }
5536   if (inassm == 1 && type != MAT_FLUSH_ASSEMBLY) {
5537     PetscCall(MatViewFromOptions(mat,NULL,"-mat_view"));
5538 
5539     if (mat->checksymmetryonassembly) {
5540       PetscCall(MatIsSymmetric(mat,mat->checksymmetrytol,&flg));
5541       if (flg) {
5542         PetscCall(PetscPrintf(PetscObjectComm((PetscObject)mat),"Matrix is symmetric (tolerance %g)\n",(double)mat->checksymmetrytol));
5543       } else {
5544         PetscCall(PetscPrintf(PetscObjectComm((PetscObject)mat),"Matrix is not symmetric (tolerance %g)\n",(double)mat->checksymmetrytol));
5545       }
5546     }
5547     if (mat->nullsp && mat->checknullspaceonassembly) {
5548       PetscCall(MatNullSpaceTest(mat->nullsp,mat,NULL));
5549     }
5550   }
5551   inassm--;
5552   PetscFunctionReturn(0);
5553 }
5554 
5555 /*@
5556    MatSetOption - Sets a parameter option for a matrix. Some options
5557    may be specific to certain storage formats.  Some options
5558    determine how values will be inserted (or added). Sorted,
5559    row-oriented input will generally assemble the fastest. The default
5560    is row-oriented.
5561 
5562    Logically Collective on Mat for certain operations, such as MAT_SPD, not collective for MAT_ROW_ORIENTED, see MatOption
5563 
5564    Input Parameters:
5565 +  mat - the matrix
5566 .  option - the option, one of those listed below (and possibly others),
5567 -  flg - turn the option on (PETSC_TRUE) or off (PETSC_FALSE)
5568 
5569   Options Describing Matrix Structure:
5570 +    MAT_SPD - symmetric positive definite
5571 .    MAT_SYMMETRIC - symmetric in terms of both structure and value
5572 .    MAT_HERMITIAN - transpose is the complex conjugation
5573 .    MAT_STRUCTURALLY_SYMMETRIC - symmetric nonzero structure
5574 -    MAT_SYMMETRY_ETERNAL - if you would like the symmetry/Hermitian flag
5575                             you set to be kept with all future use of the matrix
5576                             including after MatAssemblyBegin/End() which could
5577                             potentially change the symmetry structure, i.e. you
5578                             KNOW the matrix will ALWAYS have the property you set.
5579                             Note that setting this flag alone implies nothing about whether the matrix is symmetric/Hermitian;
5580                             the relevant flags must be set independently.
5581 
5582    Options For Use with MatSetValues():
5583    Insert a logically dense subblock, which can be
5584 .    MAT_ROW_ORIENTED - row-oriented (default)
5585 
5586    Note these options reflect the data you pass in with MatSetValues(); it has
5587    nothing to do with how the data is stored internally in the matrix
5588    data structure.
5589 
5590    When (re)assembling a matrix, we can restrict the input for
5591    efficiency/debugging purposes.  These options include
5592 +    MAT_NEW_NONZERO_LOCATIONS - additional insertions will be allowed if they generate a new nonzero (slow)
5593 .    MAT_FORCE_DIAGONAL_ENTRIES - forces diagonal entries to be allocated
5594 .    MAT_IGNORE_OFF_PROC_ENTRIES - drops off-processor entries
5595 .    MAT_NEW_NONZERO_LOCATION_ERR - generates an error for new matrix entry
5596 .    MAT_USE_HASH_TABLE - uses a hash table to speed up matrix assembly
5597 .    MAT_NO_OFF_PROC_ENTRIES - you know each process will only set values for its own rows, will generate an error if
5598         any process sets values for another process. This avoids all reductions in the MatAssembly routines and thus improves
5599         performance for very large process counts.
5600 -    MAT_SUBSET_OFF_PROC_ENTRIES - you know that the first assembly after setting this flag will set a superset
5601         of the off-process entries required for all subsequent assemblies. This avoids a rendezvous step in the MatAssembly
5602         functions, instead sending only neighbor messages.
5603 
5604    Notes:
5605    Except for MAT_UNUSED_NONZERO_LOCATION_ERR and  MAT_ROW_ORIENTED all processes that share the matrix must pass the same value in flg!
5606 
5607    Some options are relevant only for particular matrix types and
5608    are thus ignored by others.  Other options are not supported by
5609    certain matrix types and will generate an error message if set.
5610 
5611    If using a Fortran 77 module to compute a matrix, one may need to
5612    use the column-oriented option (or convert to the row-oriented
5613    format).
5614 
5615    MAT_NEW_NONZERO_LOCATIONS set to PETSC_FALSE indicates that any add or insertion
5616    that would generate a new entry in the nonzero structure is instead
5617    ignored.  Thus, if memory has not alredy been allocated for this particular
5618    data, then the insertion is ignored. For dense matrices, in which
5619    the entire array is allocated, no entries are ever ignored.
5620    Set after the first MatAssemblyEnd(). If this option is set then the MatAssemblyBegin/End() processes has one less global reduction
5621 
5622    MAT_NEW_NONZERO_LOCATION_ERR set to PETSC_TRUE indicates that any add or insertion
5623    that would generate a new entry in the nonzero structure instead produces
5624    an error. (Currently supported for AIJ and BAIJ formats only.) If this option is set then the MatAssemblyBegin/End() processes has one less global reduction
5625 
5626    MAT_NEW_NONZERO_ALLOCATION_ERR set to PETSC_TRUE indicates that any add or insertion
5627    that would generate a new entry that has not been preallocated will
5628    instead produce an error. (Currently supported for AIJ and BAIJ formats
5629    only.) This is a useful flag when debugging matrix memory preallocation.
5630    If this option is set then the MatAssemblyBegin/End() processes has one less global reduction
5631 
5632    MAT_IGNORE_OFF_PROC_ENTRIES set to PETSC_TRUE indicates entries destined for
5633    other processors should be dropped, rather than stashed.
5634    This is useful if you know that the "owning" processor is also
5635    always generating the correct matrix entries, so that PETSc need
5636    not transfer duplicate entries generated on another processor.
5637 
5638    MAT_USE_HASH_TABLE indicates that a hash table be used to improve the
5639    searches during matrix assembly. When this flag is set, the hash table
5640    is created during the first Matrix Assembly. This hash table is
5641    used the next time through, during MatSetVaules()/MatSetVaulesBlocked()
5642    to improve the searching of indices. MAT_NEW_NONZERO_LOCATIONS flag
5643    should be used with MAT_USE_HASH_TABLE flag. This option is currently
5644    supported by MATMPIBAIJ format only.
5645 
5646    MAT_KEEP_NONZERO_PATTERN indicates when MatZeroRows() is called the zeroed entries
5647    are kept in the nonzero structure
5648 
5649    MAT_IGNORE_ZERO_ENTRIES - for AIJ/IS matrices this will stop zero values from creating
5650    a zero location in the matrix
5651 
5652    MAT_USE_INODES - indicates using inode version of the code - works with AIJ matrix types
5653 
5654    MAT_NO_OFF_PROC_ZERO_ROWS - you know each process will only zero its own rows. This avoids all reductions in the
5655         zero row routines and thus improves performance for very large process counts.
5656 
5657    MAT_IGNORE_LOWER_TRIANGULAR - For SBAIJ matrices will ignore any insertions you make in the lower triangular
5658         part of the matrix (since they should match the upper triangular part).
5659 
5660    MAT_SORTED_FULL - each process provides exactly its local rows; all column indices for a given row are passed in a
5661                      single call to MatSetValues(), preallocation is perfect, row oriented, INSERT_VALUES is used. Common
5662                      with finite difference schemes with non-periodic boundary conditions.
5663 
5664    Level: intermediate
5665 
5666 .seealso: `MatOption`, `Mat`
5667 
5668 @*/
5669 PetscErrorCode MatSetOption(Mat mat,MatOption op,PetscBool flg)
5670 {
5671   PetscFunctionBegin;
5672   PetscValidHeaderSpecific(mat,MAT_CLASSID,1);
5673   if (op > 0) {
5674     PetscValidLogicalCollectiveEnum(mat,op,2);
5675     PetscValidLogicalCollectiveBool(mat,flg,3);
5676   }
5677 
5678   PetscCheck(((int) op) > MAT_OPTION_MIN && ((int) op) < MAT_OPTION_MAX,PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_OUTOFRANGE,"Options %d is out of range",(int)op);
5679 
5680   switch (op) {
5681   case MAT_FORCE_DIAGONAL_ENTRIES:
5682     mat->force_diagonals = flg;
5683     PetscFunctionReturn(0);
5684   case MAT_NO_OFF_PROC_ENTRIES:
5685     mat->nooffprocentries = flg;
5686     PetscFunctionReturn(0);
5687   case MAT_SUBSET_OFF_PROC_ENTRIES:
5688     mat->assembly_subset = flg;
5689     if (!mat->assembly_subset) { /* See the same logic in VecAssembly wrt VEC_SUBSET_OFF_PROC_ENTRIES */
5690 #if !defined(PETSC_HAVE_MPIUNI)
5691       PetscCall(MatStashScatterDestroy_BTS(&mat->stash));
5692 #endif
5693       mat->stash.first_assembly_done = PETSC_FALSE;
5694     }
5695     PetscFunctionReturn(0);
5696   case MAT_NO_OFF_PROC_ZERO_ROWS:
5697     mat->nooffproczerorows = flg;
5698     PetscFunctionReturn(0);
5699   case MAT_SPD:
5700     mat->spd_set = PETSC_TRUE;
5701     mat->spd     = flg;
5702     if (flg) {
5703       mat->symmetric                  = PETSC_TRUE;
5704       mat->structurally_symmetric     = PETSC_TRUE;
5705       mat->symmetric_set              = PETSC_TRUE;
5706       mat->structurally_symmetric_set = PETSC_TRUE;
5707     }
5708     break;
5709   case MAT_SYMMETRIC:
5710     mat->symmetric = flg;
5711     if (flg) mat->structurally_symmetric = PETSC_TRUE;
5712     mat->symmetric_set              = PETSC_TRUE;
5713     mat->structurally_symmetric_set = flg;
5714 #if !defined(PETSC_USE_COMPLEX)
5715     mat->hermitian     = flg;
5716     mat->hermitian_set = PETSC_TRUE;
5717 #endif
5718     break;
5719   case MAT_HERMITIAN:
5720     mat->hermitian = flg;
5721     if (flg) mat->structurally_symmetric = PETSC_TRUE;
5722     mat->hermitian_set              = PETSC_TRUE;
5723     mat->structurally_symmetric_set = flg;
5724 #if !defined(PETSC_USE_COMPLEX)
5725     mat->symmetric     = flg;
5726     mat->symmetric_set = PETSC_TRUE;
5727 #endif
5728     break;
5729   case MAT_STRUCTURALLY_SYMMETRIC:
5730     mat->structurally_symmetric     = flg;
5731     mat->structurally_symmetric_set = PETSC_TRUE;
5732     break;
5733   case MAT_SYMMETRY_ETERNAL:
5734     mat->symmetric_eternal = flg;
5735     break;
5736   case MAT_STRUCTURE_ONLY:
5737     mat->structure_only = flg;
5738     break;
5739   case MAT_SORTED_FULL:
5740     mat->sortedfull = flg;
5741     break;
5742   default:
5743     break;
5744   }
5745   if (mat->ops->setoption) {
5746     PetscCall((*mat->ops->setoption)(mat,op,flg));
5747   }
5748   PetscFunctionReturn(0);
5749 }
5750 
5751 /*@
5752    MatGetOption - Gets a parameter option that has been set for a matrix.
5753 
5754    Logically Collective on Mat for certain operations, such as MAT_SPD, not collective for MAT_ROW_ORIENTED, see MatOption
5755 
5756    Input Parameters:
5757 +  mat - the matrix
5758 -  option - the option, this only responds to certain options, check the code for which ones
5759 
5760    Output Parameter:
5761 .  flg - turn the option on (PETSC_TRUE) or off (PETSC_FALSE)
5762 
5763     Notes:
5764     Can only be called after MatSetSizes() and MatSetType() have been set.
5765 
5766    Level: intermediate
5767 
5768 .seealso: `MatOption`, `MatSetOption()`
5769 
5770 @*/
5771 PetscErrorCode MatGetOption(Mat mat,MatOption op,PetscBool *flg)
5772 {
5773   PetscFunctionBegin;
5774   PetscValidHeaderSpecific(mat,MAT_CLASSID,1);
5775   PetscValidType(mat,1);
5776 
5777   PetscCheck(((int) op) > MAT_OPTION_MIN && ((int) op) < MAT_OPTION_MAX,PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_OUTOFRANGE,"Options %d is out of range",(int)op);
5778   PetscCheck(((PetscObject)mat)->type_name,PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_TYPENOTSET,"Cannot get options until type and size have been set, see MatSetType() and MatSetSizes()");
5779 
5780   switch (op) {
5781   case MAT_NO_OFF_PROC_ENTRIES:
5782     *flg = mat->nooffprocentries;
5783     break;
5784   case MAT_NO_OFF_PROC_ZERO_ROWS:
5785     *flg = mat->nooffproczerorows;
5786     break;
5787   case MAT_SYMMETRIC:
5788     *flg = mat->symmetric;
5789     break;
5790   case MAT_HERMITIAN:
5791     *flg = mat->hermitian;
5792     break;
5793   case MAT_STRUCTURALLY_SYMMETRIC:
5794     *flg = mat->structurally_symmetric;
5795     break;
5796   case MAT_SYMMETRY_ETERNAL:
5797     *flg = mat->symmetric_eternal;
5798     break;
5799   case MAT_SPD:
5800     *flg = mat->spd;
5801     break;
5802   default:
5803     break;
5804   }
5805   PetscFunctionReturn(0);
5806 }
5807 
5808 /*@
5809    MatZeroEntries - Zeros all entries of a matrix.  For sparse matrices
5810    this routine retains the old nonzero structure.
5811 
5812    Logically Collective on Mat
5813 
5814    Input Parameters:
5815 .  mat - the matrix
5816 
5817    Level: intermediate
5818 
5819    Notes:
5820     If the matrix was not preallocated then a default, likely poor preallocation will be set in the matrix, so this should be called after the preallocation phase.
5821    See the Performance chapter of the users manual for information on preallocating matrices.
5822 
5823 .seealso: `MatZeroRows()`
5824 @*/
5825 PetscErrorCode MatZeroEntries(Mat mat)
5826 {
5827   PetscFunctionBegin;
5828   PetscValidHeaderSpecific(mat,MAT_CLASSID,1);
5829   PetscValidType(mat,1);
5830   PetscCheck(!mat->factortype,PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Not for factored matrix");
5831   PetscCheck(mat->insertmode == NOT_SET_VALUES,PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Not for matrices where you have set values but not yet assembled");
5832   PetscCheck(mat->ops->zeroentries,PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Mat type %s",((PetscObject)mat)->type_name);
5833   MatCheckPreallocated(mat,1);
5834 
5835   PetscCall(PetscLogEventBegin(MAT_ZeroEntries,mat,0,0,0));
5836   PetscCall((*mat->ops->zeroentries)(mat));
5837   PetscCall(PetscLogEventEnd(MAT_ZeroEntries,mat,0,0,0));
5838   PetscCall(PetscObjectStateIncrease((PetscObject)mat));
5839   PetscFunctionReturn(0);
5840 }
5841 
5842 /*@
5843    MatZeroRowsColumns - Zeros all entries (except possibly the main diagonal)
5844    of a set of rows and columns of a matrix.
5845 
5846    Collective on Mat
5847 
5848    Input Parameters:
5849 +  mat - the matrix
5850 .  numRows - the number of rows to remove
5851 .  rows - the global row indices
5852 .  diag - value put in all diagonals of eliminated rows (0.0 will even eliminate diagonal entry)
5853 .  x - optional vector of solutions for zeroed rows (other entries in vector are not used)
5854 -  b - optional vector of right hand side, that will be adjusted by provided solution
5855 
5856    Notes:
5857    This does not change the nonzero structure of the matrix, it merely zeros those entries in the matrix.
5858 
5859    The user can set a value in the diagonal entry (or for the AIJ and
5860    row formats can optionally remove the main diagonal entry from the
5861    nonzero structure as well, by passing 0.0 as the final argument).
5862 
5863    For the parallel case, all processes that share the matrix (i.e.,
5864    those in the communicator used for matrix creation) MUST call this
5865    routine, regardless of whether any rows being zeroed are owned by
5866    them.
5867 
5868    Each processor can indicate any rows in the entire matrix to be zeroed (i.e. each process does NOT have to
5869    list only rows local to itself).
5870 
5871    The option MAT_NO_OFF_PROC_ZERO_ROWS does not apply to this routine.
5872 
5873    Level: intermediate
5874 
5875 .seealso: `MatZeroRowsIS()`, `MatZeroRows()`, `MatZeroRowsLocalIS()`, `MatZeroRowsStencil()`, `MatZeroEntries()`, `MatZeroRowsLocal()`, `MatSetOption()`,
5876           `MatZeroRowsColumnsLocal()`, `MatZeroRowsColumnsLocalIS()`, `MatZeroRowsColumnsIS()`, `MatZeroRowsColumnsStencil()`
5877 @*/
5878 PetscErrorCode MatZeroRowsColumns(Mat mat,PetscInt numRows,const PetscInt rows[],PetscScalar diag,Vec x,Vec b)
5879 {
5880   PetscFunctionBegin;
5881   PetscValidHeaderSpecific(mat,MAT_CLASSID,1);
5882   PetscValidType(mat,1);
5883   if (numRows) PetscValidIntPointer(rows,3);
5884   PetscCheck(mat->assembled,PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Not for unassembled matrix");
5885   PetscCheck(!mat->factortype,PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Not for factored matrix");
5886   PetscCheck(mat->ops->zerorowscolumns,PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Mat type %s",((PetscObject)mat)->type_name);
5887   MatCheckPreallocated(mat,1);
5888 
5889   PetscCall((*mat->ops->zerorowscolumns)(mat,numRows,rows,diag,x,b));
5890   PetscCall(MatViewFromOptions(mat,NULL,"-mat_view"));
5891   PetscCall(PetscObjectStateIncrease((PetscObject)mat));
5892   PetscFunctionReturn(0);
5893 }
5894 
5895 /*@
5896    MatZeroRowsColumnsIS - Zeros all entries (except possibly the main diagonal)
5897    of a set of rows and columns of a matrix.
5898 
5899    Collective on Mat
5900 
5901    Input Parameters:
5902 +  mat - the matrix
5903 .  is - the rows to zero
5904 .  diag - value put in all diagonals of eliminated rows (0.0 will even eliminate diagonal entry)
5905 .  x - optional vector of solutions for zeroed rows (other entries in vector are not used)
5906 -  b - optional vector of right hand side, that will be adjusted by provided solution
5907 
5908    Notes:
5909    This does not change the nonzero structure of the matrix, it merely zeros those entries in the matrix.
5910 
5911    The user can set a value in the diagonal entry (or for the AIJ and
5912    row formats can optionally remove the main diagonal entry from the
5913    nonzero structure as well, by passing 0.0 as the final argument).
5914 
5915    For the parallel case, all processes that share the matrix (i.e.,
5916    those in the communicator used for matrix creation) MUST call this
5917    routine, regardless of whether any rows being zeroed are owned by
5918    them.
5919 
5920    Each processor can indicate any rows in the entire matrix to be zeroed (i.e. each process does NOT have to
5921    list only rows local to itself).
5922 
5923    The option MAT_NO_OFF_PROC_ZERO_ROWS does not apply to this routine.
5924 
5925    Level: intermediate
5926 
5927 .seealso: `MatZeroRowsIS()`, `MatZeroRowsColumns()`, `MatZeroRowsLocalIS()`, `MatZeroRowsStencil()`, `MatZeroEntries()`, `MatZeroRowsLocal()`, `MatSetOption()`,
5928           `MatZeroRowsColumnsLocal()`, `MatZeroRowsColumnsLocalIS()`, `MatZeroRows()`, `MatZeroRowsColumnsStencil()`
5929 @*/
5930 PetscErrorCode MatZeroRowsColumnsIS(Mat mat,IS is,PetscScalar diag,Vec x,Vec b)
5931 {
5932   PetscInt       numRows;
5933   const PetscInt *rows;
5934 
5935   PetscFunctionBegin;
5936   PetscValidHeaderSpecific(mat,MAT_CLASSID,1);
5937   PetscValidHeaderSpecific(is,IS_CLASSID,2);
5938   PetscValidType(mat,1);
5939   PetscValidType(is,2);
5940   PetscCall(ISGetLocalSize(is,&numRows));
5941   PetscCall(ISGetIndices(is,&rows));
5942   PetscCall(MatZeroRowsColumns(mat,numRows,rows,diag,x,b));
5943   PetscCall(ISRestoreIndices(is,&rows));
5944   PetscFunctionReturn(0);
5945 }
5946 
5947 /*@
5948    MatZeroRows - Zeros all entries (except possibly the main diagonal)
5949    of a set of rows of a matrix.
5950 
5951    Collective on Mat
5952 
5953    Input Parameters:
5954 +  mat - the matrix
5955 .  numRows - the number of rows to remove
5956 .  rows - the global row indices
5957 .  diag - value put in all diagonals of eliminated rows (0.0 will even eliminate diagonal entry)
5958 .  x - optional vector of solutions for zeroed rows (other entries in vector are not used)
5959 -  b - optional vector of right hand side, that will be adjusted by provided solution
5960 
5961    Notes:
5962    For the AIJ and BAIJ matrix formats this removes the old nonzero structure,
5963    but does not release memory.  For the dense and block diagonal
5964    formats this does not alter the nonzero structure.
5965 
5966    If the option MatSetOption(mat,MAT_KEEP_NONZERO_PATTERN,PETSC_TRUE) the nonzero structure
5967    of the matrix is not changed (even for AIJ and BAIJ matrices) the values are
5968    merely zeroed.
5969 
5970    The user can set a value in the diagonal entry (or for the AIJ and
5971    row formats can optionally remove the main diagonal entry from the
5972    nonzero structure as well, by passing 0.0 as the final argument).
5973 
5974    For the parallel case, all processes that share the matrix (i.e.,
5975    those in the communicator used for matrix creation) MUST call this
5976    routine, regardless of whether any rows being zeroed are owned by
5977    them.
5978 
5979    Each processor can indicate any rows in the entire matrix to be zeroed (i.e. each process does NOT have to
5980    list only rows local to itself).
5981 
5982    You can call MatSetOption(mat,MAT_NO_OFF_PROC_ZERO_ROWS,PETSC_TRUE) if each process indicates only rows it
5983    owns that are to be zeroed. This saves a global synchronization in the implementation.
5984 
5985    Level: intermediate
5986 
5987 .seealso: `MatZeroRowsIS()`, `MatZeroRowsColumns()`, `MatZeroRowsLocalIS()`, `MatZeroRowsStencil()`, `MatZeroEntries()`, `MatZeroRowsLocal()`, `MatSetOption()`,
5988           `MatZeroRowsColumnsLocal()`, `MatZeroRowsColumnsLocalIS()`, `MatZeroRowsColumnsIS()`, `MatZeroRowsColumnsStencil()`
5989 @*/
5990 PetscErrorCode MatZeroRows(Mat mat,PetscInt numRows,const PetscInt rows[],PetscScalar diag,Vec x,Vec b)
5991 {
5992   PetscFunctionBegin;
5993   PetscValidHeaderSpecific(mat,MAT_CLASSID,1);
5994   PetscValidType(mat,1);
5995   if (numRows) PetscValidIntPointer(rows,3);
5996   PetscCheck(mat->assembled,PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Not for unassembled matrix");
5997   PetscCheck(!mat->factortype,PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Not for factored matrix");
5998   PetscCheck(mat->ops->zerorows,PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Mat type %s",((PetscObject)mat)->type_name);
5999   MatCheckPreallocated(mat,1);
6000 
6001   PetscCall((*mat->ops->zerorows)(mat,numRows,rows,diag,x,b));
6002   PetscCall(MatViewFromOptions(mat,NULL,"-mat_view"));
6003   PetscCall(PetscObjectStateIncrease((PetscObject)mat));
6004   PetscFunctionReturn(0);
6005 }
6006 
6007 /*@
6008    MatZeroRowsIS - Zeros all entries (except possibly the main diagonal)
6009    of a set of rows of a matrix.
6010 
6011    Collective on Mat
6012 
6013    Input Parameters:
6014 +  mat - the matrix
6015 .  is - index set of rows to remove (if NULL then no row is removed)
6016 .  diag - value put in all diagonals of eliminated rows
6017 .  x - optional vector of solutions for zeroed rows (other entries in vector are not used)
6018 -  b - optional vector of right hand side, that will be adjusted by provided solution
6019 
6020    Notes:
6021    For the AIJ and BAIJ matrix formats this removes the old nonzero structure,
6022    but does not release memory.  For the dense and block diagonal
6023    formats this does not alter the nonzero structure.
6024 
6025    If the option MatSetOption(mat,MAT_KEEP_NONZERO_PATTERN,PETSC_TRUE) the nonzero structure
6026    of the matrix is not changed (even for AIJ and BAIJ matrices) the values are
6027    merely zeroed.
6028 
6029    The user can set a value in the diagonal entry (or for the AIJ and
6030    row formats can optionally remove the main diagonal entry from the
6031    nonzero structure as well, by passing 0.0 as the final argument).
6032 
6033    For the parallel case, all processes that share the matrix (i.e.,
6034    those in the communicator used for matrix creation) MUST call this
6035    routine, regardless of whether any rows being zeroed are owned by
6036    them.
6037 
6038    Each processor can indicate any rows in the entire matrix to be zeroed (i.e. each process does NOT have to
6039    list only rows local to itself).
6040 
6041    You can call MatSetOption(mat,MAT_NO_OFF_PROC_ZERO_ROWS,PETSC_TRUE) if each process indicates only rows it
6042    owns that are to be zeroed. This saves a global synchronization in the implementation.
6043 
6044    Level: intermediate
6045 
6046 .seealso: `MatZeroRows()`, `MatZeroRowsColumns()`, `MatZeroRowsLocalIS()`, `MatZeroRowsStencil()`, `MatZeroEntries()`, `MatZeroRowsLocal()`, `MatSetOption()`,
6047           `MatZeroRowsColumnsLocal()`, `MatZeroRowsColumnsLocalIS()`, `MatZeroRowsColumnsIS()`, `MatZeroRowsColumnsStencil()`
6048 @*/
6049 PetscErrorCode MatZeroRowsIS(Mat mat,IS is,PetscScalar diag,Vec x,Vec b)
6050 {
6051   PetscInt       numRows = 0;
6052   const PetscInt *rows = NULL;
6053 
6054   PetscFunctionBegin;
6055   PetscValidHeaderSpecific(mat,MAT_CLASSID,1);
6056   PetscValidType(mat,1);
6057   if (is) {
6058     PetscValidHeaderSpecific(is,IS_CLASSID,2);
6059     PetscCall(ISGetLocalSize(is,&numRows));
6060     PetscCall(ISGetIndices(is,&rows));
6061   }
6062   PetscCall(MatZeroRows(mat,numRows,rows,diag,x,b));
6063   if (is) {
6064     PetscCall(ISRestoreIndices(is,&rows));
6065   }
6066   PetscFunctionReturn(0);
6067 }
6068 
6069 /*@
6070    MatZeroRowsStencil - Zeros all entries (except possibly the main diagonal)
6071    of a set of rows of a matrix. These rows must be local to the process.
6072 
6073    Collective on Mat
6074 
6075    Input Parameters:
6076 +  mat - the matrix
6077 .  numRows - the number of rows to remove
6078 .  rows - the grid coordinates (and component number when dof > 1) for matrix rows
6079 .  diag - value put in all diagonals of eliminated rows (0.0 will even eliminate diagonal entry)
6080 .  x - optional vector of solutions for zeroed rows (other entries in vector are not used)
6081 -  b - optional vector of right hand side, that will be adjusted by provided solution
6082 
6083    Notes:
6084    For the AIJ and BAIJ matrix formats this removes the old nonzero structure,
6085    but does not release memory.  For the dense and block diagonal
6086    formats this does not alter the nonzero structure.
6087 
6088    If the option MatSetOption(mat,MAT_KEEP_NONZERO_PATTERN,PETSC_TRUE) the nonzero structure
6089    of the matrix is not changed (even for AIJ and BAIJ matrices) the values are
6090    merely zeroed.
6091 
6092    The user can set a value in the diagonal entry (or for the AIJ and
6093    row formats can optionally remove the main diagonal entry from the
6094    nonzero structure as well, by passing 0.0 as the final argument).
6095 
6096    For the parallel case, all processes that share the matrix (i.e.,
6097    those in the communicator used for matrix creation) MUST call this
6098    routine, regardless of whether any rows being zeroed are owned by
6099    them.
6100 
6101    Each processor can indicate any rows in the entire matrix to be zeroed (i.e. each process does NOT have to
6102    list only rows local to itself).
6103 
6104    The grid coordinates are across the entire grid, not just the local portion
6105 
6106    In Fortran idxm and idxn should be declared as
6107 $     MatStencil idxm(4,m)
6108    and the values inserted using
6109 $    idxm(MatStencil_i,1) = i
6110 $    idxm(MatStencil_j,1) = j
6111 $    idxm(MatStencil_k,1) = k
6112 $    idxm(MatStencil_c,1) = c
6113    etc
6114 
6115    For periodic boundary conditions use negative indices for values to the left (below 0; that are to be
6116    obtained by wrapping values from right edge). For values to the right of the last entry using that index plus one
6117    etc to obtain values that obtained by wrapping the values from the left edge. This does not work for anything but the
6118    DM_BOUNDARY_PERIODIC boundary type.
6119 
6120    For indices that don't mean anything for your case (like the k index when working in 2d) or the c index when you have
6121    a single value per point) you can skip filling those indices.
6122 
6123    Level: intermediate
6124 
6125 .seealso: `MatZeroRowsIS()`, `MatZeroRowsColumns()`, `MatZeroRowsLocalIS()`, `MatZeroRowsl()`, `MatZeroEntries()`, `MatZeroRowsLocal()`, `MatSetOption()`,
6126           `MatZeroRowsColumnsLocal()`, `MatZeroRowsColumnsLocalIS()`, `MatZeroRowsColumnsIS()`, `MatZeroRowsColumnsStencil()`
6127 @*/
6128 PetscErrorCode MatZeroRowsStencil(Mat mat,PetscInt numRows,const MatStencil rows[],PetscScalar diag,Vec x,Vec b)
6129 {
6130   PetscInt       dim     = mat->stencil.dim;
6131   PetscInt       sdim    = dim - (1 - (PetscInt) mat->stencil.noc);
6132   PetscInt       *dims   = mat->stencil.dims+1;
6133   PetscInt       *starts = mat->stencil.starts;
6134   PetscInt       *dxm    = (PetscInt*) rows;
6135   PetscInt       *jdxm, i, j, tmp, numNewRows = 0;
6136 
6137   PetscFunctionBegin;
6138   PetscValidHeaderSpecific(mat,MAT_CLASSID,1);
6139   PetscValidType(mat,1);
6140   if (numRows) PetscValidPointer(rows,3);
6141 
6142   PetscCall(PetscMalloc1(numRows, &jdxm));
6143   for (i = 0; i < numRows; ++i) {
6144     /* Skip unused dimensions (they are ordered k, j, i, c) */
6145     for (j = 0; j < 3-sdim; ++j) dxm++;
6146     /* Local index in X dir */
6147     tmp = *dxm++ - starts[0];
6148     /* Loop over remaining dimensions */
6149     for (j = 0; j < dim-1; ++j) {
6150       /* If nonlocal, set index to be negative */
6151       if ((*dxm++ - starts[j+1]) < 0 || tmp < 0) tmp = PETSC_MIN_INT;
6152       /* Update local index */
6153       else tmp = tmp*dims[j] + *(dxm-1) - starts[j+1];
6154     }
6155     /* Skip component slot if necessary */
6156     if (mat->stencil.noc) dxm++;
6157     /* Local row number */
6158     if (tmp >= 0) {
6159       jdxm[numNewRows++] = tmp;
6160     }
6161   }
6162   PetscCall(MatZeroRowsLocal(mat,numNewRows,jdxm,diag,x,b));
6163   PetscCall(PetscFree(jdxm));
6164   PetscFunctionReturn(0);
6165 }
6166 
6167 /*@
6168    MatZeroRowsColumnsStencil - Zeros all row and column entries (except possibly the main diagonal)
6169    of a set of rows and columns of a matrix.
6170 
6171    Collective on Mat
6172 
6173    Input Parameters:
6174 +  mat - the matrix
6175 .  numRows - the number of rows/columns to remove
6176 .  rows - the grid coordinates (and component number when dof > 1) for matrix rows
6177 .  diag - value put in all diagonals of eliminated rows (0.0 will even eliminate diagonal entry)
6178 .  x - optional vector of solutions for zeroed rows (other entries in vector are not used)
6179 -  b - optional vector of right hand side, that will be adjusted by provided solution
6180 
6181    Notes:
6182    For the AIJ and BAIJ matrix formats this removes the old nonzero structure,
6183    but does not release memory.  For the dense and block diagonal
6184    formats this does not alter the nonzero structure.
6185 
6186    If the option MatSetOption(mat,MAT_KEEP_NONZERO_PATTERN,PETSC_TRUE) the nonzero structure
6187    of the matrix is not changed (even for AIJ and BAIJ matrices) the values are
6188    merely zeroed.
6189 
6190    The user can set a value in the diagonal entry (or for the AIJ and
6191    row formats can optionally remove the main diagonal entry from the
6192    nonzero structure as well, by passing 0.0 as the final argument).
6193 
6194    For the parallel case, all processes that share the matrix (i.e.,
6195    those in the communicator used for matrix creation) MUST call this
6196    routine, regardless of whether any rows being zeroed are owned by
6197    them.
6198 
6199    Each processor can indicate any rows in the entire matrix to be zeroed (i.e. each process does NOT have to
6200    list only rows local to itself, but the row/column numbers are given in local numbering).
6201 
6202    The grid coordinates are across the entire grid, not just the local portion
6203 
6204    In Fortran idxm and idxn should be declared as
6205 $     MatStencil idxm(4,m)
6206    and the values inserted using
6207 $    idxm(MatStencil_i,1) = i
6208 $    idxm(MatStencil_j,1) = j
6209 $    idxm(MatStencil_k,1) = k
6210 $    idxm(MatStencil_c,1) = c
6211    etc
6212 
6213    For periodic boundary conditions use negative indices for values to the left (below 0; that are to be
6214    obtained by wrapping values from right edge). For values to the right of the last entry using that index plus one
6215    etc to obtain values that obtained by wrapping the values from the left edge. This does not work for anything but the
6216    DM_BOUNDARY_PERIODIC boundary type.
6217 
6218    For indices that don't mean anything for your case (like the k index when working in 2d) or the c index when you have
6219    a single value per point) you can skip filling those indices.
6220 
6221    Level: intermediate
6222 
6223 .seealso: `MatZeroRowsIS()`, `MatZeroRowsColumns()`, `MatZeroRowsLocalIS()`, `MatZeroRowsStencil()`, `MatZeroEntries()`, `MatZeroRowsLocal()`, `MatSetOption()`,
6224           `MatZeroRowsColumnsLocal()`, `MatZeroRowsColumnsLocalIS()`, `MatZeroRowsColumnsIS()`, `MatZeroRows()`
6225 @*/
6226 PetscErrorCode MatZeroRowsColumnsStencil(Mat mat,PetscInt numRows,const MatStencil rows[],PetscScalar diag,Vec x,Vec b)
6227 {
6228   PetscInt       dim     = mat->stencil.dim;
6229   PetscInt       sdim    = dim - (1 - (PetscInt) mat->stencil.noc);
6230   PetscInt       *dims   = mat->stencil.dims+1;
6231   PetscInt       *starts = mat->stencil.starts;
6232   PetscInt       *dxm    = (PetscInt*) rows;
6233   PetscInt       *jdxm, i, j, tmp, numNewRows = 0;
6234 
6235   PetscFunctionBegin;
6236   PetscValidHeaderSpecific(mat,MAT_CLASSID,1);
6237   PetscValidType(mat,1);
6238   if (numRows) PetscValidPointer(rows,3);
6239 
6240   PetscCall(PetscMalloc1(numRows, &jdxm));
6241   for (i = 0; i < numRows; ++i) {
6242     /* Skip unused dimensions (they are ordered k, j, i, c) */
6243     for (j = 0; j < 3-sdim; ++j) dxm++;
6244     /* Local index in X dir */
6245     tmp = *dxm++ - starts[0];
6246     /* Loop over remaining dimensions */
6247     for (j = 0; j < dim-1; ++j) {
6248       /* If nonlocal, set index to be negative */
6249       if ((*dxm++ - starts[j+1]) < 0 || tmp < 0) tmp = PETSC_MIN_INT;
6250       /* Update local index */
6251       else tmp = tmp*dims[j] + *(dxm-1) - starts[j+1];
6252     }
6253     /* Skip component slot if necessary */
6254     if (mat->stencil.noc) dxm++;
6255     /* Local row number */
6256     if (tmp >= 0) {
6257       jdxm[numNewRows++] = tmp;
6258     }
6259   }
6260   PetscCall(MatZeroRowsColumnsLocal(mat,numNewRows,jdxm,diag,x,b));
6261   PetscCall(PetscFree(jdxm));
6262   PetscFunctionReturn(0);
6263 }
6264 
6265 /*@C
6266    MatZeroRowsLocal - Zeros all entries (except possibly the main diagonal)
6267    of a set of rows of a matrix; using local numbering of rows.
6268 
6269    Collective on Mat
6270 
6271    Input Parameters:
6272 +  mat - the matrix
6273 .  numRows - the number of rows to remove
6274 .  rows - the local row indices
6275 .  diag - value put in all diagonals of eliminated rows
6276 .  x - optional vector of solutions for zeroed rows (other entries in vector are not used)
6277 -  b - optional vector of right hand side, that will be adjusted by provided solution
6278 
6279    Notes:
6280    Before calling MatZeroRowsLocal(), the user must first set the
6281    local-to-global mapping by calling MatSetLocalToGlobalMapping().
6282 
6283    For the AIJ matrix formats this removes the old nonzero structure,
6284    but does not release memory.  For the dense and block diagonal
6285    formats this does not alter the nonzero structure.
6286 
6287    If the option MatSetOption(mat,MAT_KEEP_NONZERO_PATTERN,PETSC_TRUE) the nonzero structure
6288    of the matrix is not changed (even for AIJ and BAIJ matrices) the values are
6289    merely zeroed.
6290 
6291    The user can set a value in the diagonal entry (or for the AIJ and
6292    row formats can optionally remove the main diagonal entry from the
6293    nonzero structure as well, by passing 0.0 as the final argument).
6294 
6295    You can call MatSetOption(mat,MAT_NO_OFF_PROC_ZERO_ROWS,PETSC_TRUE) if each process indicates only rows it
6296    owns that are to be zeroed. This saves a global synchronization in the implementation.
6297 
6298    Level: intermediate
6299 
6300 .seealso: `MatZeroRowsIS()`, `MatZeroRowsColumns()`, `MatZeroRowsLocalIS()`, `MatZeroRowsStencil()`, `MatZeroEntries()`, `MatZeroRows()`, `MatSetOption()`,
6301           `MatZeroRowsColumnsLocal()`, `MatZeroRowsColumnsLocalIS()`, `MatZeroRowsColumnsIS()`, `MatZeroRowsColumnsStencil()`
6302 @*/
6303 PetscErrorCode MatZeroRowsLocal(Mat mat,PetscInt numRows,const PetscInt rows[],PetscScalar diag,Vec x,Vec b)
6304 {
6305   PetscFunctionBegin;
6306   PetscValidHeaderSpecific(mat,MAT_CLASSID,1);
6307   PetscValidType(mat,1);
6308   if (numRows) PetscValidIntPointer(rows,3);
6309   PetscCheck(mat->assembled,PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Not for unassembled matrix");
6310   PetscCheck(!mat->factortype,PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Not for factored matrix");
6311   MatCheckPreallocated(mat,1);
6312 
6313   if (mat->ops->zerorowslocal) {
6314     PetscCall((*mat->ops->zerorowslocal)(mat,numRows,rows,diag,x,b));
6315   } else {
6316     IS             is, newis;
6317     const PetscInt *newRows;
6318 
6319     PetscCheck(mat->rmap->mapping,PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Need to provide local to global mapping to matrix first");
6320     PetscCall(ISCreateGeneral(PETSC_COMM_SELF,numRows,rows,PETSC_COPY_VALUES,&is));
6321     PetscCall(ISLocalToGlobalMappingApplyIS(mat->rmap->mapping,is,&newis));
6322     PetscCall(ISGetIndices(newis,&newRows));
6323     PetscCall((*mat->ops->zerorows)(mat,numRows,newRows,diag,x,b));
6324     PetscCall(ISRestoreIndices(newis,&newRows));
6325     PetscCall(ISDestroy(&newis));
6326     PetscCall(ISDestroy(&is));
6327   }
6328   PetscCall(PetscObjectStateIncrease((PetscObject)mat));
6329   PetscFunctionReturn(0);
6330 }
6331 
6332 /*@
6333    MatZeroRowsLocalIS - Zeros all entries (except possibly the main diagonal)
6334    of a set of rows of a matrix; using local numbering of rows.
6335 
6336    Collective on Mat
6337 
6338    Input Parameters:
6339 +  mat - the matrix
6340 .  is - index set of rows to remove
6341 .  diag - value put in all diagonals of eliminated rows
6342 .  x - optional vector of solutions for zeroed rows (other entries in vector are not used)
6343 -  b - optional vector of right hand side, that will be adjusted by provided solution
6344 
6345    Notes:
6346    Before calling MatZeroRowsLocalIS(), the user must first set the
6347    local-to-global mapping by calling MatSetLocalToGlobalMapping().
6348 
6349    For the AIJ matrix formats this removes the old nonzero structure,
6350    but does not release memory.  For the dense and block diagonal
6351    formats this does not alter the nonzero structure.
6352 
6353    If the option MatSetOption(mat,MAT_KEEP_NONZERO_PATTERN,PETSC_TRUE) the nonzero structure
6354    of the matrix is not changed (even for AIJ and BAIJ matrices) the values are
6355    merely zeroed.
6356 
6357    The user can set a value in the diagonal entry (or for the AIJ and
6358    row formats can optionally remove the main diagonal entry from the
6359    nonzero structure as well, by passing 0.0 as the final argument).
6360 
6361    You can call MatSetOption(mat,MAT_NO_OFF_PROC_ZERO_ROWS,PETSC_TRUE) if each process indicates only rows it
6362    owns that are to be zeroed. This saves a global synchronization in the implementation.
6363 
6364    Level: intermediate
6365 
6366 .seealso: `MatZeroRowsIS()`, `MatZeroRowsColumns()`, `MatZeroRows()`, `MatZeroRowsStencil()`, `MatZeroEntries()`, `MatZeroRowsLocal()`, `MatSetOption()`,
6367           `MatZeroRowsColumnsLocal()`, `MatZeroRowsColumnsLocalIS()`, `MatZeroRowsColumnsIS()`, `MatZeroRowsColumnsStencil()`
6368 @*/
6369 PetscErrorCode MatZeroRowsLocalIS(Mat mat,IS is,PetscScalar diag,Vec x,Vec b)
6370 {
6371   PetscInt       numRows;
6372   const PetscInt *rows;
6373 
6374   PetscFunctionBegin;
6375   PetscValidHeaderSpecific(mat,MAT_CLASSID,1);
6376   PetscValidType(mat,1);
6377   PetscValidHeaderSpecific(is,IS_CLASSID,2);
6378   PetscCheck(mat->assembled,PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Not for unassembled matrix");
6379   PetscCheck(!mat->factortype,PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Not for factored matrix");
6380   MatCheckPreallocated(mat,1);
6381 
6382   PetscCall(ISGetLocalSize(is,&numRows));
6383   PetscCall(ISGetIndices(is,&rows));
6384   PetscCall(MatZeroRowsLocal(mat,numRows,rows,diag,x,b));
6385   PetscCall(ISRestoreIndices(is,&rows));
6386   PetscFunctionReturn(0);
6387 }
6388 
6389 /*@
6390    MatZeroRowsColumnsLocal - Zeros all entries (except possibly the main diagonal)
6391    of a set of rows and columns of a matrix; using local numbering of rows.
6392 
6393    Collective on Mat
6394 
6395    Input Parameters:
6396 +  mat - the matrix
6397 .  numRows - the number of rows to remove
6398 .  rows - the global row indices
6399 .  diag - value put in all diagonals of eliminated rows
6400 .  x - optional vector of solutions for zeroed rows (other entries in vector are not used)
6401 -  b - optional vector of right hand side, that will be adjusted by provided solution
6402 
6403    Notes:
6404    Before calling MatZeroRowsColumnsLocal(), the user must first set the
6405    local-to-global mapping by calling MatSetLocalToGlobalMapping().
6406 
6407    The user can set a value in the diagonal entry (or for the AIJ and
6408    row formats can optionally remove the main diagonal entry from the
6409    nonzero structure as well, by passing 0.0 as the final argument).
6410 
6411    Level: intermediate
6412 
6413 .seealso: `MatZeroRowsIS()`, `MatZeroRowsColumns()`, `MatZeroRowsLocalIS()`, `MatZeroRowsStencil()`, `MatZeroEntries()`, `MatZeroRowsLocal()`, `MatSetOption()`,
6414           `MatZeroRows()`, `MatZeroRowsColumnsLocalIS()`, `MatZeroRowsColumnsIS()`, `MatZeroRowsColumnsStencil()`
6415 @*/
6416 PetscErrorCode MatZeroRowsColumnsLocal(Mat mat,PetscInt numRows,const PetscInt rows[],PetscScalar diag,Vec x,Vec b)
6417 {
6418   IS             is, newis;
6419   const PetscInt *newRows;
6420 
6421   PetscFunctionBegin;
6422   PetscValidHeaderSpecific(mat,MAT_CLASSID,1);
6423   PetscValidType(mat,1);
6424   if (numRows) PetscValidIntPointer(rows,3);
6425   PetscCheck(mat->assembled,PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Not for unassembled matrix");
6426   PetscCheck(!mat->factortype,PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Not for factored matrix");
6427   MatCheckPreallocated(mat,1);
6428 
6429   PetscCheck(mat->cmap->mapping,PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Need to provide local to global mapping to matrix first");
6430   PetscCall(ISCreateGeneral(PETSC_COMM_SELF,numRows,rows,PETSC_COPY_VALUES,&is));
6431   PetscCall(ISLocalToGlobalMappingApplyIS(mat->cmap->mapping,is,&newis));
6432   PetscCall(ISGetIndices(newis,&newRows));
6433   PetscCall((*mat->ops->zerorowscolumns)(mat,numRows,newRows,diag,x,b));
6434   PetscCall(ISRestoreIndices(newis,&newRows));
6435   PetscCall(ISDestroy(&newis));
6436   PetscCall(ISDestroy(&is));
6437   PetscCall(PetscObjectStateIncrease((PetscObject)mat));
6438   PetscFunctionReturn(0);
6439 }
6440 
6441 /*@
6442    MatZeroRowsColumnsLocalIS - Zeros all entries (except possibly the main diagonal)
6443    of a set of rows and columns of a matrix; using local numbering of rows.
6444 
6445    Collective on Mat
6446 
6447    Input Parameters:
6448 +  mat - the matrix
6449 .  is - index set of rows to remove
6450 .  diag - value put in all diagonals of eliminated rows
6451 .  x - optional vector of solutions for zeroed rows (other entries in vector are not used)
6452 -  b - optional vector of right hand side, that will be adjusted by provided solution
6453 
6454    Notes:
6455    Before calling MatZeroRowsColumnsLocalIS(), the user must first set the
6456    local-to-global mapping by calling MatSetLocalToGlobalMapping().
6457 
6458    The user can set a value in the diagonal entry (or for the AIJ and
6459    row formats can optionally remove the main diagonal entry from the
6460    nonzero structure as well, by passing 0.0 as the final argument).
6461 
6462    Level: intermediate
6463 
6464 .seealso: `MatZeroRowsIS()`, `MatZeroRowsColumns()`, `MatZeroRowsLocalIS()`, `MatZeroRowsStencil()`, `MatZeroEntries()`, `MatZeroRowsLocal()`, `MatSetOption()`,
6465           `MatZeroRowsColumnsLocal()`, `MatZeroRows()`, `MatZeroRowsColumnsIS()`, `MatZeroRowsColumnsStencil()`
6466 @*/
6467 PetscErrorCode MatZeroRowsColumnsLocalIS(Mat mat,IS is,PetscScalar diag,Vec x,Vec b)
6468 {
6469   PetscInt       numRows;
6470   const PetscInt *rows;
6471 
6472   PetscFunctionBegin;
6473   PetscValidHeaderSpecific(mat,MAT_CLASSID,1);
6474   PetscValidType(mat,1);
6475   PetscValidHeaderSpecific(is,IS_CLASSID,2);
6476   PetscCheck(mat->assembled,PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Not for unassembled matrix");
6477   PetscCheck(!mat->factortype,PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Not for factored matrix");
6478   MatCheckPreallocated(mat,1);
6479 
6480   PetscCall(ISGetLocalSize(is,&numRows));
6481   PetscCall(ISGetIndices(is,&rows));
6482   PetscCall(MatZeroRowsColumnsLocal(mat,numRows,rows,diag,x,b));
6483   PetscCall(ISRestoreIndices(is,&rows));
6484   PetscFunctionReturn(0);
6485 }
6486 
6487 /*@C
6488    MatGetSize - Returns the numbers of rows and columns in a matrix.
6489 
6490    Not Collective
6491 
6492    Input Parameter:
6493 .  mat - the matrix
6494 
6495    Output Parameters:
6496 +  m - the number of global rows
6497 -  n - the number of global columns
6498 
6499    Note: both output parameters can be NULL on input.
6500 
6501    Level: beginner
6502 
6503 .seealso: `MatGetLocalSize()`
6504 @*/
6505 PetscErrorCode MatGetSize(Mat mat,PetscInt *m,PetscInt *n)
6506 {
6507   PetscFunctionBegin;
6508   PetscValidHeaderSpecific(mat,MAT_CLASSID,1);
6509   if (m) *m = mat->rmap->N;
6510   if (n) *n = mat->cmap->N;
6511   PetscFunctionReturn(0);
6512 }
6513 
6514 /*@C
6515    MatGetLocalSize - Returns the number of local rows and local columns
6516    of a matrix, that is the local size of the left and right vectors as returned by MatCreateVecs().
6517 
6518    Not Collective
6519 
6520    Input Parameter:
6521 .  mat - the matrix
6522 
6523    Output Parameters:
6524 +  m - the number of local rows
6525 -  n - the number of local columns
6526 
6527    Note: both output parameters can be NULL on input.
6528 
6529    Level: beginner
6530 
6531 .seealso: `MatGetSize()`
6532 @*/
6533 PetscErrorCode MatGetLocalSize(Mat mat,PetscInt *m,PetscInt *n)
6534 {
6535   PetscFunctionBegin;
6536   PetscValidHeaderSpecific(mat,MAT_CLASSID,1);
6537   if (m) PetscValidIntPointer(m,2);
6538   if (n) PetscValidIntPointer(n,3);
6539   if (m) *m = mat->rmap->n;
6540   if (n) *n = mat->cmap->n;
6541   PetscFunctionReturn(0);
6542 }
6543 
6544 /*@C
6545    MatGetOwnershipRangeColumn - Returns the range of matrix columns associated with rows of a vector one multiplies by that owned by
6546    this processor. (The columns of the "diagonal block")
6547 
6548    Not Collective, unless matrix has not been allocated, then collective on Mat
6549 
6550    Input Parameter:
6551 .  mat - the matrix
6552 
6553    Output Parameters:
6554 +  m - the global index of the first local column
6555 -  n - one more than the global index of the last local column
6556 
6557    Notes:
6558     both output parameters can be NULL on input.
6559 
6560    Level: developer
6561 
6562 .seealso: `MatGetOwnershipRange()`, `MatGetOwnershipRanges()`, `MatGetOwnershipRangesColumn()`
6563 
6564 @*/
6565 PetscErrorCode MatGetOwnershipRangeColumn(Mat mat,PetscInt *m,PetscInt *n)
6566 {
6567   PetscFunctionBegin;
6568   PetscValidHeaderSpecific(mat,MAT_CLASSID,1);
6569   PetscValidType(mat,1);
6570   if (m) PetscValidIntPointer(m,2);
6571   if (n) PetscValidIntPointer(n,3);
6572   MatCheckPreallocated(mat,1);
6573   if (m) *m = mat->cmap->rstart;
6574   if (n) *n = mat->cmap->rend;
6575   PetscFunctionReturn(0);
6576 }
6577 
6578 /*@C
6579    MatGetOwnershipRange - Returns the range of matrix rows owned by
6580    this processor, assuming that the matrix is laid out with the first
6581    n1 rows on the first processor, the next n2 rows on the second, etc.
6582    For certain parallel layouts this range may not be well defined.
6583 
6584    Not Collective
6585 
6586    Input Parameter:
6587 .  mat - the matrix
6588 
6589    Output Parameters:
6590 +  m - the global index of the first local row
6591 -  n - one more than the global index of the last local row
6592 
6593    Note: Both output parameters can be NULL on input.
6594 $  This function requires that the matrix be preallocated. If you have not preallocated, consider using
6595 $    PetscSplitOwnership(MPI_Comm comm, PetscInt *n, PetscInt *N)
6596 $  and then MPI_Scan() to calculate prefix sums of the local sizes.
6597 
6598    Level: beginner
6599 
6600 .seealso: `MatGetOwnershipRanges()`, `MatGetOwnershipRangeColumn()`, `MatGetOwnershipRangesColumn()`, `PetscSplitOwnership()`, `PetscSplitOwnershipBlock()`
6601 
6602 @*/
6603 PetscErrorCode MatGetOwnershipRange(Mat mat,PetscInt *m,PetscInt *n)
6604 {
6605   PetscFunctionBegin;
6606   PetscValidHeaderSpecific(mat,MAT_CLASSID,1);
6607   PetscValidType(mat,1);
6608   if (m) PetscValidIntPointer(m,2);
6609   if (n) PetscValidIntPointer(n,3);
6610   MatCheckPreallocated(mat,1);
6611   if (m) *m = mat->rmap->rstart;
6612   if (n) *n = mat->rmap->rend;
6613   PetscFunctionReturn(0);
6614 }
6615 
6616 /*@C
6617    MatGetOwnershipRanges - Returns the range of matrix rows owned by
6618    each process
6619 
6620    Not Collective, unless matrix has not been allocated, then collective on Mat
6621 
6622    Input Parameters:
6623 .  mat - the matrix
6624 
6625    Output Parameters:
6626 .  ranges - start of each processors portion plus one more than the total length at the end
6627 
6628    Level: beginner
6629 
6630 .seealso: `MatGetOwnershipRange()`, `MatGetOwnershipRangeColumn()`, `MatGetOwnershipRangesColumn()`
6631 
6632 @*/
6633 PetscErrorCode MatGetOwnershipRanges(Mat mat,const PetscInt **ranges)
6634 {
6635   PetscFunctionBegin;
6636   PetscValidHeaderSpecific(mat,MAT_CLASSID,1);
6637   PetscValidType(mat,1);
6638   MatCheckPreallocated(mat,1);
6639   PetscCall(PetscLayoutGetRanges(mat->rmap,ranges));
6640   PetscFunctionReturn(0);
6641 }
6642 
6643 /*@C
6644    MatGetOwnershipRangesColumn - Returns the range of matrix columns associated with rows of a vector one multiplies by that owned by
6645    this processor. (The columns of the "diagonal blocks" for each process)
6646 
6647    Not Collective, unless matrix has not been allocated, then collective on Mat
6648 
6649    Input Parameters:
6650 .  mat - the matrix
6651 
6652    Output Parameters:
6653 .  ranges - start of each processors portion plus one more then the total length at the end
6654 
6655    Level: beginner
6656 
6657 .seealso: `MatGetOwnershipRange()`, `MatGetOwnershipRangeColumn()`, `MatGetOwnershipRanges()`
6658 
6659 @*/
6660 PetscErrorCode MatGetOwnershipRangesColumn(Mat mat,const PetscInt **ranges)
6661 {
6662   PetscFunctionBegin;
6663   PetscValidHeaderSpecific(mat,MAT_CLASSID,1);
6664   PetscValidType(mat,1);
6665   MatCheckPreallocated(mat,1);
6666   PetscCall(PetscLayoutGetRanges(mat->cmap,ranges));
6667   PetscFunctionReturn(0);
6668 }
6669 
6670 /*@C
6671    MatGetOwnershipIS - Get row and column ownership as index sets
6672 
6673    Not Collective
6674 
6675    Input Parameter:
6676 .  A - matrix
6677 
6678    Output Parameters:
6679 +  rows - rows in which this process owns elements
6680 -  cols - columns in which this process owns elements
6681 
6682    Level: intermediate
6683 
6684 .seealso: `MatGetOwnershipRange()`, `MatGetOwnershipRangeColumn()`, `MatSetValues()`, `MATELEMENTAL`, `MATSCALAPACK`
6685 @*/
6686 PetscErrorCode MatGetOwnershipIS(Mat A,IS *rows,IS *cols)
6687 {
6688   PetscErrorCode (*f)(Mat,IS*,IS*);
6689 
6690   PetscFunctionBegin;
6691   MatCheckPreallocated(A,1);
6692   PetscCall(PetscObjectQueryFunction((PetscObject)A,"MatGetOwnershipIS_C",&f));
6693   if (f) {
6694     PetscCall((*f)(A,rows,cols));
6695   } else {   /* Create a standard row-based partition, each process is responsible for ALL columns in their row block */
6696     if (rows) PetscCall(ISCreateStride(PETSC_COMM_SELF,A->rmap->n,A->rmap->rstart,1,rows));
6697     if (cols) PetscCall(ISCreateStride(PETSC_COMM_SELF,A->cmap->N,0,1,cols));
6698   }
6699   PetscFunctionReturn(0);
6700 }
6701 
6702 /*@C
6703    MatILUFactorSymbolic - Performs symbolic ILU factorization of a matrix.
6704    Uses levels of fill only, not drop tolerance. Use MatLUFactorNumeric()
6705    to complete the factorization.
6706 
6707    Collective on Mat
6708 
6709    Input Parameters:
6710 +  mat - the matrix
6711 .  row - row permutation
6712 .  column - column permutation
6713 -  info - structure containing
6714 $      levels - number of levels of fill.
6715 $      expected fill - as ratio of original fill.
6716 $      1 or 0 - indicating force fill on diagonal (improves robustness for matrices
6717                 missing diagonal entries)
6718 
6719    Output Parameters:
6720 .  fact - new matrix that has been symbolically factored
6721 
6722    Notes:
6723     See Users-Manual: ch_mat for additional information about choosing the fill factor for better efficiency.
6724 
6725    Most users should employ the simplified KSP interface for linear solvers
6726    instead of working directly with matrix algebra routines such as this.
6727    See, e.g., KSPCreate().
6728 
6729    Level: developer
6730 
6731 .seealso: `MatLUFactorSymbolic()`, `MatLUFactorNumeric()`, `MatCholeskyFactor()`
6732           `MatGetOrdering()`, `MatFactorInfo`
6733 
6734     Note: this uses the definition of level of fill as in Y. Saad, 2003
6735 
6736     Developer Note: fortran interface is not autogenerated as the f90
6737     interface definition cannot be generated correctly [due to MatFactorInfo]
6738 
6739    References:
6740 .  * - Y. Saad, Iterative methods for sparse linear systems Philadelphia: Society for Industrial and Applied Mathematics, 2003
6741 @*/
6742 PetscErrorCode MatILUFactorSymbolic(Mat fact,Mat mat,IS row,IS col,const MatFactorInfo *info)
6743 {
6744   PetscFunctionBegin;
6745   PetscValidHeaderSpecific(mat,MAT_CLASSID,2);
6746   PetscValidType(mat,2);
6747   if (row) PetscValidHeaderSpecific(row,IS_CLASSID,3);
6748   if (col) PetscValidHeaderSpecific(col,IS_CLASSID,4);
6749   PetscValidPointer(info,5);
6750   PetscValidPointer(fact,1);
6751   PetscCheck(info->levels >= 0,PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_OUTOFRANGE,"Levels of fill negative %" PetscInt_FMT,(PetscInt)info->levels);
6752   PetscCheck(info->fill >= 1.0,PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_OUTOFRANGE,"Expected fill less than 1.0 %g",(double)info->fill);
6753   if (!fact->ops->ilufactorsymbolic) {
6754     MatSolverType stype;
6755     PetscCall(MatFactorGetSolverType(fact,&stype));
6756     SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Matrix type %s symbolic ILU using solver type %s",((PetscObject)mat)->type_name,stype);
6757   }
6758   PetscCheck(mat->assembled,PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Not for unassembled matrix");
6759   PetscCheck(!mat->factortype,PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Not for factored matrix");
6760   MatCheckPreallocated(mat,2);
6761 
6762   if (!fact->trivialsymbolic) PetscCall(PetscLogEventBegin(MAT_ILUFactorSymbolic,mat,row,col,0));
6763   PetscCall((fact->ops->ilufactorsymbolic)(fact,mat,row,col,info));
6764   if (!fact->trivialsymbolic) PetscCall(PetscLogEventEnd(MAT_ILUFactorSymbolic,mat,row,col,0));
6765   PetscFunctionReturn(0);
6766 }
6767 
6768 /*@C
6769    MatICCFactorSymbolic - Performs symbolic incomplete
6770    Cholesky factorization for a symmetric matrix.  Use
6771    MatCholeskyFactorNumeric() to complete the factorization.
6772 
6773    Collective on Mat
6774 
6775    Input Parameters:
6776 +  mat - the matrix
6777 .  perm - row and column permutation
6778 -  info - structure containing
6779 $      levels - number of levels of fill.
6780 $      expected fill - as ratio of original fill.
6781 
6782    Output Parameter:
6783 .  fact - the factored matrix
6784 
6785    Notes:
6786    Most users should employ the KSP interface for linear solvers
6787    instead of working directly with matrix algebra routines such as this.
6788    See, e.g., KSPCreate().
6789 
6790    Level: developer
6791 
6792 .seealso: `MatCholeskyFactorNumeric()`, `MatCholeskyFactor()`, `MatFactorInfo`
6793 
6794     Note: this uses the definition of level of fill as in Y. Saad, 2003
6795 
6796     Developer Note: fortran interface is not autogenerated as the f90
6797     interface definition cannot be generated correctly [due to MatFactorInfo]
6798 
6799    References:
6800 .  * - Y. Saad, Iterative methods for sparse linear systems Philadelphia: Society for Industrial and Applied Mathematics, 2003
6801 @*/
6802 PetscErrorCode MatICCFactorSymbolic(Mat fact,Mat mat,IS perm,const MatFactorInfo *info)
6803 {
6804   PetscFunctionBegin;
6805   PetscValidHeaderSpecific(mat,MAT_CLASSID,2);
6806   PetscValidType(mat,2);
6807   if (perm) PetscValidHeaderSpecific(perm,IS_CLASSID,3);
6808   PetscValidPointer(info,4);
6809   PetscValidPointer(fact,1);
6810   PetscCheck(!mat->factortype,PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Not for factored matrix");
6811   PetscCheck(info->levels >= 0,PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_OUTOFRANGE,"Levels negative %" PetscInt_FMT,(PetscInt) info->levels);
6812   PetscCheck(info->fill >= 1.0,PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_OUTOFRANGE,"Expected fill less than 1.0 %g",(double)info->fill);
6813   if (!(fact)->ops->iccfactorsymbolic) {
6814     MatSolverType stype;
6815     PetscCall(MatFactorGetSolverType(fact,&stype));
6816     SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Matrix type %s symbolic ICC using solver type %s",((PetscObject)mat)->type_name,stype);
6817   }
6818   PetscCheck(mat->assembled,PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Not for unassembled matrix");
6819   MatCheckPreallocated(mat,2);
6820 
6821   if (!fact->trivialsymbolic) PetscCall(PetscLogEventBegin(MAT_ICCFactorSymbolic,mat,perm,0,0));
6822   PetscCall((fact->ops->iccfactorsymbolic)(fact,mat,perm,info));
6823   if (!fact->trivialsymbolic) PetscCall(PetscLogEventEnd(MAT_ICCFactorSymbolic,mat,perm,0,0));
6824   PetscFunctionReturn(0);
6825 }
6826 
6827 /*@C
6828    MatCreateSubMatrices - Extracts several submatrices from a matrix. If submat
6829    points to an array of valid matrices, they may be reused to store the new
6830    submatrices.
6831 
6832    Collective on Mat
6833 
6834    Input Parameters:
6835 +  mat - the matrix
6836 .  n   - the number of submatrixes to be extracted (on this processor, may be zero)
6837 .  irow, icol - index sets of rows and columns to extract
6838 -  scall - either MAT_INITIAL_MATRIX or MAT_REUSE_MATRIX
6839 
6840    Output Parameter:
6841 .  submat - the array of submatrices
6842 
6843    Notes:
6844    MatCreateSubMatrices() can extract ONLY sequential submatrices
6845    (from both sequential and parallel matrices). Use MatCreateSubMatrix()
6846    to extract a parallel submatrix.
6847 
6848    Some matrix types place restrictions on the row and column
6849    indices, such as that they be sorted or that they be equal to each other.
6850 
6851    The index sets may not have duplicate entries.
6852 
6853    When extracting submatrices from a parallel matrix, each processor can
6854    form a different submatrix by setting the rows and columns of its
6855    individual index sets according to the local submatrix desired.
6856 
6857    When finished using the submatrices, the user should destroy
6858    them with MatDestroySubMatrices().
6859 
6860    MAT_REUSE_MATRIX can only be used when the nonzero structure of the
6861    original matrix has not changed from that last call to MatCreateSubMatrices().
6862 
6863    This routine creates the matrices in submat; you should NOT create them before
6864    calling it. It also allocates the array of matrix pointers submat.
6865 
6866    For BAIJ matrices the index sets must respect the block structure, that is if they
6867    request one row/column in a block, they must request all rows/columns that are in
6868    that block. For example, if the block size is 2 you cannot request just row 0 and
6869    column 0.
6870 
6871    Fortran Note:
6872    The Fortran interface is slightly different from that given below; it
6873    requires one to pass in  as submat a Mat (integer) array of size at least n+1.
6874 
6875    Level: advanced
6876 
6877 .seealso: `MatDestroySubMatrices()`, `MatCreateSubMatrix()`, `MatGetRow()`, `MatGetDiagonal()`, `MatReuse`
6878 @*/
6879 PetscErrorCode MatCreateSubMatrices(Mat mat,PetscInt n,const IS irow[],const IS icol[],MatReuse scall,Mat *submat[])
6880 {
6881   PetscInt       i;
6882   PetscBool      eq;
6883 
6884   PetscFunctionBegin;
6885   PetscValidHeaderSpecific(mat,MAT_CLASSID,1);
6886   PetscValidType(mat,1);
6887   if (n) {
6888     PetscValidPointer(irow,3);
6889     for (i=0; i<n; i++) PetscValidHeaderSpecific(irow[i],IS_CLASSID,3);
6890     PetscValidPointer(icol,4);
6891     for (i=0; i<n; i++) PetscValidHeaderSpecific(icol[i],IS_CLASSID,4);
6892   }
6893   PetscValidPointer(submat,6);
6894   if (n && scall == MAT_REUSE_MATRIX) {
6895     PetscValidPointer(*submat,6);
6896     for (i=0; i<n; i++) PetscValidHeaderSpecific((*submat)[i],MAT_CLASSID,6);
6897   }
6898   PetscCheck(mat->ops->createsubmatrices,PETSC_COMM_SELF,PETSC_ERR_SUP,"Mat type %s",((PetscObject)mat)->type_name);
6899   PetscCheck(mat->assembled,PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Not for unassembled matrix");
6900   PetscCheck(!mat->factortype,PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Not for factored matrix");
6901   MatCheckPreallocated(mat,1);
6902   PetscCall(PetscLogEventBegin(MAT_CreateSubMats,mat,0,0,0));
6903   PetscCall((*mat->ops->createsubmatrices)(mat,n,irow,icol,scall,submat));
6904   PetscCall(PetscLogEventEnd(MAT_CreateSubMats,mat,0,0,0));
6905   for (i=0; i<n; i++) {
6906     (*submat)[i]->factortype = MAT_FACTOR_NONE;  /* in case in place factorization was previously done on submatrix */
6907     PetscCall(ISEqualUnsorted(irow[i],icol[i],&eq));
6908     if (eq) {
6909       PetscCall(MatPropagateSymmetryOptions(mat,(*submat)[i]));
6910     }
6911 #if defined(PETSC_HAVE_VIENNACL) || defined(PETSC_HAVE_CUDA)
6912     if (mat->boundtocpu && mat->bindingpropagates) {
6913       PetscCall(MatBindToCPU((*submat)[i],PETSC_TRUE));
6914       PetscCall(MatSetBindingPropagates((*submat)[i],PETSC_TRUE));
6915     }
6916 #endif
6917   }
6918   PetscFunctionReturn(0);
6919 }
6920 
6921 /*@C
6922    MatCreateSubMatricesMPI - Extracts MPI submatrices across a sub communicator of mat (by pairs of IS that may live on subcomms).
6923 
6924    Collective on Mat
6925 
6926    Input Parameters:
6927 +  mat - the matrix
6928 .  n   - the number of submatrixes to be extracted
6929 .  irow, icol - index sets of rows and columns to extract
6930 -  scall - either MAT_INITIAL_MATRIX or MAT_REUSE_MATRIX
6931 
6932    Output Parameter:
6933 .  submat - the array of submatrices
6934 
6935    Level: advanced
6936 
6937 .seealso: `MatCreateSubMatrices()`, `MatCreateSubMatrix()`, `MatGetRow()`, `MatGetDiagonal()`, `MatReuse`
6938 @*/
6939 PetscErrorCode MatCreateSubMatricesMPI(Mat mat,PetscInt n,const IS irow[],const IS icol[],MatReuse scall,Mat *submat[])
6940 {
6941   PetscInt       i;
6942   PetscBool      eq;
6943 
6944   PetscFunctionBegin;
6945   PetscValidHeaderSpecific(mat,MAT_CLASSID,1);
6946   PetscValidType(mat,1);
6947   if (n) {
6948     PetscValidPointer(irow,3);
6949     PetscValidHeaderSpecific(*irow,IS_CLASSID,3);
6950     PetscValidPointer(icol,4);
6951     PetscValidHeaderSpecific(*icol,IS_CLASSID,4);
6952   }
6953   PetscValidPointer(submat,6);
6954   if (n && scall == MAT_REUSE_MATRIX) {
6955     PetscValidPointer(*submat,6);
6956     PetscValidHeaderSpecific(**submat,MAT_CLASSID,6);
6957   }
6958   PetscCheck(mat->ops->createsubmatricesmpi,PETSC_COMM_SELF,PETSC_ERR_SUP,"Mat type %s",((PetscObject)mat)->type_name);
6959   PetscCheck(mat->assembled,PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Not for unassembled matrix");
6960   PetscCheck(!mat->factortype,PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Not for factored matrix");
6961   MatCheckPreallocated(mat,1);
6962 
6963   PetscCall(PetscLogEventBegin(MAT_CreateSubMats,mat,0,0,0));
6964   PetscCall((*mat->ops->createsubmatricesmpi)(mat,n,irow,icol,scall,submat));
6965   PetscCall(PetscLogEventEnd(MAT_CreateSubMats,mat,0,0,0));
6966   for (i=0; i<n; i++) {
6967     PetscCall(ISEqualUnsorted(irow[i],icol[i],&eq));
6968     if (eq) {
6969       PetscCall(MatPropagateSymmetryOptions(mat,(*submat)[i]));
6970     }
6971   }
6972   PetscFunctionReturn(0);
6973 }
6974 
6975 /*@C
6976    MatDestroyMatrices - Destroys an array of matrices.
6977 
6978    Collective on Mat
6979 
6980    Input Parameters:
6981 +  n - the number of local matrices
6982 -  mat - the matrices (note that this is a pointer to the array of matrices)
6983 
6984    Level: advanced
6985 
6986     Notes:
6987     Frees not only the matrices, but also the array that contains the matrices
6988            In Fortran will not free the array.
6989 
6990 .seealso: `MatCreateSubMatrices()` `MatDestroySubMatrices()`
6991 @*/
6992 PetscErrorCode MatDestroyMatrices(PetscInt n,Mat *mat[])
6993 {
6994   PetscInt       i;
6995 
6996   PetscFunctionBegin;
6997   if (!*mat) PetscFunctionReturn(0);
6998   PetscCheck(n >= 0,PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Trying to destroy negative number of matrices %" PetscInt_FMT,n);
6999   PetscValidPointer(mat,2);
7000 
7001   for (i=0; i<n; i++) {
7002     PetscCall(MatDestroy(&(*mat)[i]));
7003   }
7004 
7005   /* memory is allocated even if n = 0 */
7006   PetscCall(PetscFree(*mat));
7007   PetscFunctionReturn(0);
7008 }
7009 
7010 /*@C
7011    MatDestroySubMatrices - Destroys a set of matrices obtained with MatCreateSubMatrices().
7012 
7013    Collective on Mat
7014 
7015    Input Parameters:
7016 +  n - the number of local matrices
7017 -  mat - the matrices (note that this is a pointer to the array of matrices, just to match the calling
7018                        sequence of MatCreateSubMatrices())
7019 
7020    Level: advanced
7021 
7022     Notes:
7023     Frees not only the matrices, but also the array that contains the matrices
7024            In Fortran will not free the array.
7025 
7026 .seealso: `MatCreateSubMatrices()`
7027 @*/
7028 PetscErrorCode MatDestroySubMatrices(PetscInt n,Mat *mat[])
7029 {
7030   Mat            mat0;
7031 
7032   PetscFunctionBegin;
7033   if (!*mat) PetscFunctionReturn(0);
7034   /* mat[] is an array of length n+1, see MatCreateSubMatrices_xxx() */
7035   PetscCheck(n >= 0,PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Trying to destroy negative number of matrices %" PetscInt_FMT,n);
7036   PetscValidPointer(mat,2);
7037 
7038   mat0 = (*mat)[0];
7039   if (mat0 && mat0->ops->destroysubmatrices) {
7040     PetscCall((mat0->ops->destroysubmatrices)(n,mat));
7041   } else {
7042     PetscCall(MatDestroyMatrices(n,mat));
7043   }
7044   PetscFunctionReturn(0);
7045 }
7046 
7047 /*@C
7048    MatGetSeqNonzeroStructure - Extracts the nonzero structure from a matrix and stores it, in its entirety, on each process
7049 
7050    Collective on Mat
7051 
7052    Input Parameters:
7053 .  mat - the matrix
7054 
7055    Output Parameter:
7056 .  matstruct - the sequential matrix with the nonzero structure of mat
7057 
7058   Level: intermediate
7059 
7060 .seealso: `MatDestroySeqNonzeroStructure()`, `MatCreateSubMatrices()`, `MatDestroyMatrices()`
7061 @*/
7062 PetscErrorCode MatGetSeqNonzeroStructure(Mat mat,Mat *matstruct)
7063 {
7064   PetscFunctionBegin;
7065   PetscValidHeaderSpecific(mat,MAT_CLASSID,1);
7066   PetscValidPointer(matstruct,2);
7067 
7068   PetscValidType(mat,1);
7069   PetscCheck(!mat->factortype,PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Not for factored matrix");
7070   MatCheckPreallocated(mat,1);
7071 
7072   PetscCheck(mat->ops->getseqnonzerostructure,PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Not for matrix type %s",((PetscObject)mat)->type_name);
7073   PetscCall(PetscLogEventBegin(MAT_GetSeqNonzeroStructure,mat,0,0,0));
7074   PetscCall((*mat->ops->getseqnonzerostructure)(mat,matstruct));
7075   PetscCall(PetscLogEventEnd(MAT_GetSeqNonzeroStructure,mat,0,0,0));
7076   PetscFunctionReturn(0);
7077 }
7078 
7079 /*@C
7080    MatDestroySeqNonzeroStructure - Destroys matrix obtained with MatGetSeqNonzeroStructure().
7081 
7082    Collective on Mat
7083 
7084    Input Parameters:
7085 .  mat - the matrix (note that this is a pointer to the array of matrices, just to match the calling
7086                        sequence of MatGetSequentialNonzeroStructure())
7087 
7088    Level: advanced
7089 
7090     Notes:
7091     Frees not only the matrices, but also the array that contains the matrices
7092 
7093 .seealso: `MatGetSeqNonzeroStructure()`
7094 @*/
7095 PetscErrorCode MatDestroySeqNonzeroStructure(Mat *mat)
7096 {
7097   PetscFunctionBegin;
7098   PetscValidPointer(mat,1);
7099   PetscCall(MatDestroy(mat));
7100   PetscFunctionReturn(0);
7101 }
7102 
7103 /*@
7104    MatIncreaseOverlap - Given a set of submatrices indicated by index sets,
7105    replaces the index sets by larger ones that represent submatrices with
7106    additional overlap.
7107 
7108    Collective on Mat
7109 
7110    Input Parameters:
7111 +  mat - the matrix
7112 .  n   - the number of index sets
7113 .  is  - the array of index sets (these index sets will changed during the call)
7114 -  ov  - the additional overlap requested
7115 
7116    Options Database:
7117 .  -mat_increase_overlap_scalable - use a scalable algorithm to compute the overlap (supported by MPIAIJ matrix)
7118 
7119    Level: developer
7120 
7121    Developer Note:
7122    Any implementation must preserve block sizes. That is: if the row block size and the column block size of mat are equal to bs, then the output index sets must be compatible with bs.
7123 
7124 .seealso: `MatCreateSubMatrices()`
7125 @*/
7126 PetscErrorCode MatIncreaseOverlap(Mat mat,PetscInt n,IS is[],PetscInt ov)
7127 {
7128   PetscInt       i,bs,cbs;
7129 
7130   PetscFunctionBegin;
7131   PetscValidHeaderSpecific(mat,MAT_CLASSID,1);
7132   PetscValidType(mat,1);
7133   PetscValidLogicalCollectiveInt(mat,n,2);
7134   PetscCheck(n >= 0,PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Must have one or more domains, you have %" PetscInt_FMT,n);
7135   if (n) {
7136     PetscValidPointer(is,3);
7137     for (i = 0; i < n; i++) PetscValidHeaderSpecific(is[i],IS_CLASSID,3);
7138   }
7139   PetscCheck(mat->assembled,PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Not for unassembled matrix");
7140   PetscCheck(!mat->factortype,PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Not for factored matrix");
7141   MatCheckPreallocated(mat,1);
7142 
7143   if (!ov || !n) PetscFunctionReturn(0);
7144   PetscCheck(mat->ops->increaseoverlap,PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Mat type %s",((PetscObject)mat)->type_name);
7145   PetscCall(PetscLogEventBegin(MAT_IncreaseOverlap,mat,0,0,0));
7146   PetscCall((*mat->ops->increaseoverlap)(mat,n,is,ov));
7147   PetscCall(PetscLogEventEnd(MAT_IncreaseOverlap,mat,0,0,0));
7148   PetscCall(MatGetBlockSizes(mat,&bs,&cbs));
7149   if (bs == cbs) {
7150     for (i=0; i<n; i++) {
7151       PetscCall(ISSetBlockSize(is[i],bs));
7152     }
7153   }
7154   PetscFunctionReturn(0);
7155 }
7156 
7157 PetscErrorCode MatIncreaseOverlapSplit_Single(Mat,IS*,PetscInt);
7158 
7159 /*@
7160    MatIncreaseOverlapSplit - Given a set of submatrices indicated by index sets across
7161    a sub communicator, replaces the index sets by larger ones that represent submatrices with
7162    additional overlap.
7163 
7164    Collective on Mat
7165 
7166    Input Parameters:
7167 +  mat - the matrix
7168 .  n   - the number of index sets
7169 .  is  - the array of index sets (these index sets will changed during the call)
7170 -  ov  - the additional overlap requested
7171 
7172    Options Database:
7173 .  -mat_increase_overlap_scalable - use a scalable algorithm to compute the overlap (supported by MPIAIJ matrix)
7174 
7175    Level: developer
7176 
7177 .seealso: `MatCreateSubMatrices()`
7178 @*/
7179 PetscErrorCode MatIncreaseOverlapSplit(Mat mat,PetscInt n,IS is[],PetscInt ov)
7180 {
7181   PetscInt       i;
7182 
7183   PetscFunctionBegin;
7184   PetscValidHeaderSpecific(mat,MAT_CLASSID,1);
7185   PetscValidType(mat,1);
7186   PetscCheck(n >= 0,PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Must have one or more domains, you have %" PetscInt_FMT,n);
7187   if (n) {
7188     PetscValidPointer(is,3);
7189     PetscValidHeaderSpecific(*is,IS_CLASSID,3);
7190   }
7191   PetscCheck(mat->assembled,PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Not for unassembled matrix");
7192   PetscCheck(!mat->factortype,PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Not for factored matrix");
7193   MatCheckPreallocated(mat,1);
7194   if (!ov) PetscFunctionReturn(0);
7195   PetscCall(PetscLogEventBegin(MAT_IncreaseOverlap,mat,0,0,0));
7196   for (i=0; i<n; i++) {
7197     PetscCall(MatIncreaseOverlapSplit_Single(mat,&is[i],ov));
7198   }
7199   PetscCall(PetscLogEventEnd(MAT_IncreaseOverlap,mat,0,0,0));
7200   PetscFunctionReturn(0);
7201 }
7202 
7203 /*@
7204    MatGetBlockSize - Returns the matrix block size.
7205 
7206    Not Collective
7207 
7208    Input Parameter:
7209 .  mat - the matrix
7210 
7211    Output Parameter:
7212 .  bs - block size
7213 
7214    Notes:
7215     Block row formats are MATSEQBAIJ, MATMPIBAIJ, MATSEQSBAIJ, MATMPISBAIJ. These formats ALWAYS have square block storage in the matrix.
7216 
7217    If the block size has not been set yet this routine returns 1.
7218 
7219    Level: intermediate
7220 
7221 .seealso: `MatCreateSeqBAIJ()`, `MatCreateBAIJ()`, `MatGetBlockSizes()`
7222 @*/
7223 PetscErrorCode MatGetBlockSize(Mat mat,PetscInt *bs)
7224 {
7225   PetscFunctionBegin;
7226   PetscValidHeaderSpecific(mat,MAT_CLASSID,1);
7227   PetscValidIntPointer(bs,2);
7228   *bs = PetscAbs(mat->rmap->bs);
7229   PetscFunctionReturn(0);
7230 }
7231 
7232 /*@
7233    MatGetBlockSizes - Returns the matrix block row and column sizes.
7234 
7235    Not Collective
7236 
7237    Input Parameter:
7238 .  mat - the matrix
7239 
7240    Output Parameters:
7241 +  rbs - row block size
7242 -  cbs - column block size
7243 
7244    Notes:
7245     Block row formats are MATSEQBAIJ, MATMPIBAIJ, MATSEQSBAIJ, MATMPISBAIJ. These formats ALWAYS have square block storage in the matrix.
7246     If you pass a different block size for the columns than the rows, the row block size determines the square block storage.
7247 
7248    If a block size has not been set yet this routine returns 1.
7249 
7250    Level: intermediate
7251 
7252 .seealso: `MatCreateSeqBAIJ()`, `MatCreateBAIJ()`, `MatGetBlockSize()`, `MatSetBlockSize()`, `MatSetBlockSizes()`
7253 @*/
7254 PetscErrorCode MatGetBlockSizes(Mat mat,PetscInt *rbs, PetscInt *cbs)
7255 {
7256   PetscFunctionBegin;
7257   PetscValidHeaderSpecific(mat,MAT_CLASSID,1);
7258   if (rbs) PetscValidIntPointer(rbs,2);
7259   if (cbs) PetscValidIntPointer(cbs,3);
7260   if (rbs) *rbs = PetscAbs(mat->rmap->bs);
7261   if (cbs) *cbs = PetscAbs(mat->cmap->bs);
7262   PetscFunctionReturn(0);
7263 }
7264 
7265 /*@
7266    MatSetBlockSize - Sets the matrix block size.
7267 
7268    Logically Collective on Mat
7269 
7270    Input Parameters:
7271 +  mat - the matrix
7272 -  bs - block size
7273 
7274    Notes:
7275     Block row formats are MATSEQBAIJ, MATMPIBAIJ, MATSEQSBAIJ, MATMPISBAIJ. These formats ALWAYS have square block storage in the matrix.
7276     This must be called before MatSetUp() or MatXXXSetPreallocation() (or will default to 1) and the block size cannot be changed later.
7277 
7278     For MATMPIAIJ and MATSEQAIJ matrix formats, this function can be called at a later stage, provided that the specified block size
7279     is compatible with the matrix local sizes.
7280 
7281    Level: intermediate
7282 
7283 .seealso: `MatCreateSeqBAIJ()`, `MatCreateBAIJ()`, `MatGetBlockSize()`, `MatSetBlockSizes()`, `MatGetBlockSizes()`
7284 @*/
7285 PetscErrorCode MatSetBlockSize(Mat mat,PetscInt bs)
7286 {
7287   PetscFunctionBegin;
7288   PetscValidHeaderSpecific(mat,MAT_CLASSID,1);
7289   PetscValidLogicalCollectiveInt(mat,bs,2);
7290   PetscCall(MatSetBlockSizes(mat,bs,bs));
7291   PetscFunctionReturn(0);
7292 }
7293 
7294 typedef struct {
7295   PetscInt         n;
7296   IS               *is;
7297   Mat              *mat;
7298   PetscObjectState nonzerostate;
7299   Mat              C;
7300 } EnvelopeData;
7301 
7302 static PetscErrorCode EnvelopeDataDestroy(EnvelopeData *edata)
7303 {
7304   for (PetscInt i=0; i<edata->n; i++) {
7305     PetscCall(ISDestroy(&edata->is[i]));
7306   }
7307   PetscCall(PetscFree(edata->is));
7308   PetscCall(PetscFree(edata));
7309   return 0;
7310 }
7311 
7312 /*
7313    MatComputeVariableBlockEnvelope - Given a matrix whose nonzeros are in blocks along the diagonal this computes and stores
7314          the sizes of these blocks in the matrix. An individual block may lie over several processes.
7315 
7316    Collective on mat
7317 
7318    Input Parameter:
7319 .  mat - the matrix
7320 
7321    Notes:
7322      There can be zeros within the blocks
7323 
7324      The blocks can overlap between processes, including laying on more than two processes
7325 
7326 */
7327 static PetscErrorCode MatComputeVariableBlockEnvelope(Mat mat)
7328 {
7329   PetscInt                    n,*sizes,*starts,i = 0,env = 0, tbs = 0, lblocks = 0,rstart,II,ln = 0,cnt = 0,cstart,cend;
7330   PetscInt                    *diag,*odiag,sc;
7331   VecScatter                  scatter;
7332   PetscScalar                 *seqv;
7333   const PetscScalar           *parv;
7334   const PetscInt              *ia,*ja;
7335   PetscBool                   set,flag,done;
7336   Mat                         AA = mat,A;
7337   MPI_Comm                    comm;
7338   PetscMPIInt                 rank,size,tag;
7339   MPI_Status                  status;
7340   PetscContainer              container;
7341   EnvelopeData                *edata;
7342   Vec                         seq,par;
7343   IS                          isglobal;
7344 
7345   PetscFunctionBegin;
7346   PetscValidHeaderSpecific(mat,MAT_CLASSID,1);
7347   PetscCall(MatIsSymmetricKnown(mat,&set,&flag));
7348   if (!set || !flag) {
7349     /* TOO: only needs nonzero structure of transpose */
7350     PetscCall(MatTranspose(mat,MAT_INITIAL_MATRIX,&AA));
7351     PetscCall(MatAXPY(AA,1.0,mat,DIFFERENT_NONZERO_PATTERN));
7352   }
7353   PetscCall(MatAIJGetLocalMat(AA,&A));
7354   PetscCall(MatGetRowIJ(A,0,PETSC_FALSE,PETSC_FALSE,&n,&ia,&ja,&done));
7355   PetscCheck(done,PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"Unable to get IJ structure from matrix");
7356 
7357   PetscCall(MatGetLocalSize(mat,&n,NULL));
7358   PetscCall(PetscObjectGetNewTag((PetscObject)mat,&tag));
7359   PetscCall(PetscObjectGetComm((PetscObject)mat,&comm));
7360   PetscCallMPI(MPI_Comm_size(comm,&size));
7361   PetscCallMPI(MPI_Comm_rank(comm,&rank));
7362 
7363   PetscCall(PetscMalloc2(n,&sizes,n,&starts));
7364 
7365   if (rank > 0) {
7366     PetscCallMPI(MPI_Recv(&env,1,MPIU_INT,rank-1,tag,comm,&status));
7367     PetscCallMPI(MPI_Recv(&tbs,1,MPIU_INT,rank-1,tag,comm,&status));
7368   }
7369   PetscCall(MatGetOwnershipRange(mat,&rstart,NULL));
7370   for (i=0; i<n; i++) {
7371     env = PetscMax(env,ja[ia[i+1]-1]);
7372     II = rstart + i;
7373     if (env == II) {
7374       starts[lblocks]  = tbs;
7375       sizes[lblocks++] = 1 + II - tbs;
7376       tbs = 1 + II;
7377     }
7378   }
7379   if (rank < size-1) {
7380     PetscCallMPI(MPI_Send(&env,1,MPIU_INT,rank+1,tag,comm));
7381     PetscCallMPI(MPI_Send(&tbs,1,MPIU_INT,rank+1,tag,comm));
7382   }
7383 
7384   PetscCall(MatRestoreRowIJ(A,0,PETSC_FALSE,PETSC_FALSE,&n,&ia,&ja,&done));
7385   if (!set || !flag) {
7386     PetscCall(MatDestroy(&AA));
7387   }
7388   PetscCall(MatDestroy(&A));
7389 
7390   PetscCall(PetscNew(&edata));
7391   PetscCall(MatGetNonzeroState(mat,&edata->nonzerostate));
7392   edata->n = lblocks;
7393   /* create IS needed for extracting blocks from the original matrix */
7394   PetscCall(PetscMalloc1(lblocks,&edata->is));
7395   for (PetscInt i=0; i<lblocks; i++) {
7396     PetscCall(ISCreateStride(PETSC_COMM_SELF,sizes[i],starts[i],1,&edata->is[i]));
7397   }
7398 
7399   /* Create the resulting inverse matrix structure with preallocation information */
7400   PetscCall(MatCreate(PetscObjectComm((PetscObject)mat),&edata->C));
7401   PetscCall(MatSetSizes(edata->C,mat->rmap->n,mat->cmap->n,mat->rmap->N,mat->cmap->N));
7402   PetscCall(MatSetBlockSizesFromMats(edata->C,mat,mat));
7403   PetscCall(MatSetType(edata->C,MATAIJ));
7404 
7405   /* Communicate the start and end of each row, from each block to the correct rank */
7406   /* TODO: Use PetscSF instead of VecScatter */
7407   for (PetscInt i=0; i<lblocks; i++) ln += sizes[i];
7408   PetscCall(VecCreateSeq(PETSC_COMM_SELF,2*ln,&seq));
7409   PetscCall(VecGetArrayWrite(seq,&seqv));
7410   for (PetscInt i=0; i<lblocks; i++) {
7411     for (PetscInt j=0; j<sizes[i]; j++) {
7412       seqv[cnt]   = starts[i];
7413       seqv[cnt+1] = starts[i] + sizes[i];
7414       cnt += 2;
7415     }
7416   }
7417   PetscCall(VecRestoreArrayWrite(seq,&seqv));
7418   PetscCallMPI(MPI_Scan(&cnt,&sc,1,MPIU_INT,MPI_SUM,PetscObjectComm((PetscObject)mat)));
7419   sc -= cnt;
7420   PetscCall(VecCreateMPI(PetscObjectComm((PetscObject)mat),2*mat->rmap->n,2*mat->rmap->N,&par));
7421   PetscCall(ISCreateStride(PETSC_COMM_SELF,cnt,sc,1,&isglobal));
7422   PetscCall(VecScatterCreate(seq, NULL  ,par, isglobal,&scatter));
7423   PetscCall(ISDestroy(&isglobal));
7424   PetscCall(VecScatterBegin(scatter,seq,par,INSERT_VALUES,SCATTER_FORWARD));
7425   PetscCall(VecScatterEnd(scatter,seq,par,INSERT_VALUES,SCATTER_FORWARD));
7426   PetscCall(VecScatterDestroy(&scatter));
7427   PetscCall(VecDestroy(&seq));
7428   PetscCall(MatGetOwnershipRangeColumn(mat,&cstart,&cend));
7429   PetscCall(PetscMalloc2(mat->rmap->n,&diag,mat->rmap->n,&odiag));
7430   PetscCall(VecGetArrayRead(par,&parv));
7431   cnt = 0;
7432   PetscCall(MatGetSize(mat,NULL,&n));
7433   for (PetscInt i=0; i<mat->rmap->n; i++) {
7434     PetscInt start,end,d = 0,od = 0;
7435 
7436     start = (PetscInt)PetscRealPart(parv[cnt]);
7437     end   = (PetscInt)PetscRealPart(parv[cnt+1]);
7438     cnt  += 2;
7439 
7440     if (start < cstart) {od += cstart - start + n - cend; d += cend - cstart;}
7441     else if (start < cend) {od += n - cend; d += cend - start;}
7442     else od += n - start;
7443     if (end <= cstart) {od -= cstart - end + n - cend; d -= cend - cstart;}
7444     else if (end < cend) {od -= n - cend; d -= cend - end;}
7445     else od -= n - end;
7446 
7447     odiag[i] = od;
7448     diag[i]  = d;
7449   }
7450   PetscCall(VecRestoreArrayRead(par,&parv));
7451   PetscCall(VecDestroy(&par));
7452   PetscCall(MatXAIJSetPreallocation(edata->C,mat->rmap->bs,diag,odiag,NULL,NULL));
7453   PetscCall(PetscFree2(diag,odiag));
7454   PetscCall(PetscFree2(sizes,starts));
7455 
7456   PetscCall(PetscContainerCreate(PETSC_COMM_SELF,&container));
7457   PetscCall(PetscContainerSetPointer(container,edata));
7458   PetscCall(PetscContainerSetUserDestroy(container,(PetscErrorCode (*)(void*))EnvelopeDataDestroy));
7459   PetscCall(PetscObjectCompose((PetscObject)mat,"EnvelopeData",(PetscObject)container));
7460   PetscCall(PetscObjectDereference((PetscObject)container));
7461   PetscFunctionReturn(0);
7462 }
7463 
7464 /*@
7465   MatInvertVariableBlockEnvelope - set matrix C to be the inverted block diagonal of matrix A
7466 
7467   Collective on Mat
7468 
7469   Input Parameters:
7470 . A - the matrix
7471 
7472   Output Parameters:
7473 . C - matrix with inverted block diagonal of A.  This matrix should be created and may have its type set.
7474 
7475   Notes:
7476      For efficiency the matrix A should have all the nonzero entries clustered in smallish blocks along the diagonal.
7477 
7478   Level: advanced
7479 
7480 .seealso: MatInvertBlockDiagonal(), MatComputeBlockDiagonal()
7481 @*/
7482 PetscErrorCode MatInvertVariableBlockEnvelope(Mat A,MatReuse reuse, Mat *C)
7483 {
7484   PetscContainer    container;
7485   EnvelopeData      *edata;
7486   PetscObjectState  nonzerostate;
7487 
7488   PetscFunctionBegin;
7489   PetscCall(PetscObjectQuery((PetscObject)A,"EnvelopeData",(PetscObject*)&container));
7490   if (!container) {
7491     PetscCall(MatComputeVariableBlockEnvelope(A));
7492     PetscCall(PetscObjectQuery((PetscObject)A,"EnvelopeData",(PetscObject*)&container));
7493   }
7494   PetscCall(PetscContainerGetPointer(container,(void**)&edata));
7495   PetscCall(MatGetNonzeroState(A,&nonzerostate));
7496   PetscCheck(nonzerostate <= edata->nonzerostate,PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"Cannot handle changes to matrix nonzero structure");
7497   PetscCheck(reuse != MAT_REUSE_MATRIX || *C == edata->C,PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"C matrix must be the same as previously output");
7498 
7499   PetscCall(MatCreateSubMatrices(A,edata->n,edata->is,edata->is,MAT_INITIAL_MATRIX,&edata->mat));
7500   *C   = edata->C;
7501 
7502   for (PetscInt i=0; i<edata->n; i++) {
7503     Mat         D;
7504     PetscScalar *dvalues;
7505 
7506     PetscCall(MatConvert(edata->mat[i], MATSEQDENSE,MAT_INITIAL_MATRIX,&D));
7507     PetscCall(MatSetOption(*C,MAT_ROW_ORIENTED,PETSC_FALSE));
7508     PetscCall(MatSeqDenseInvert(D));
7509     PetscCall(MatDenseGetArray(D,&dvalues));
7510     PetscCall(MatSetValuesIS(*C,edata->is[i],edata->is[i],dvalues,INSERT_VALUES));
7511     PetscCall(MatDestroy(&D));
7512   }
7513   PetscCall(MatDestroySubMatrices(edata->n,&edata->mat));
7514   PetscCall(MatAssemblyBegin(*C,MAT_FINAL_ASSEMBLY));
7515   PetscCall(MatAssemblyEnd(*C,MAT_FINAL_ASSEMBLY));
7516   PetscFunctionReturn(0);
7517 }
7518 
7519 /*@
7520    MatSetVariableBlockSizes - Sets diagonal point-blocks of the matrix that need not be of the same size
7521 
7522    Logically Collective on Mat
7523 
7524    Input Parameters:
7525 +  mat - the matrix
7526 .  nblocks - the number of blocks on this process, each block can only exist on a single process
7527 -  bsizes - the block sizes
7528 
7529    Notes:
7530     Currently used by PCVPBJACOBI for AIJ matrices
7531 
7532     Each variable point-block set of degrees of freedom must live on a single MPI rank. That is a point block cannot straddle two MPI ranks.
7533 
7534    Level: intermediate
7535 
7536 .seealso: `MatCreateSeqBAIJ()`, `MatCreateBAIJ()`, `MatGetBlockSize()`, `MatSetBlockSizes()`, `MatGetBlockSizes()`, `MatGetVariableBlockSizes()`, `MatComputeVariableBlockEnvelope()`, `PCVPBJACOBI`
7537 @*/
7538 PetscErrorCode MatSetVariableBlockSizes(Mat mat,PetscInt nblocks,PetscInt *bsizes)
7539 {
7540   PetscInt       i,ncnt = 0, nlocal;
7541 
7542   PetscFunctionBegin;
7543   PetscValidHeaderSpecific(mat,MAT_CLASSID,1);
7544   PetscCheck(nblocks >= 0,PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Number of local blocks must be great than or equal to zero");
7545   PetscCall(MatGetLocalSize(mat,&nlocal,NULL));
7546   for (i=0; i<nblocks; i++) ncnt += bsizes[i];
7547   PetscCheck(ncnt == nlocal,PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Sum of local block sizes %" PetscInt_FMT " does not equal local size of matrix %" PetscInt_FMT,ncnt,nlocal);
7548   PetscCall(PetscFree(mat->bsizes));
7549   mat->nblocks = nblocks;
7550   PetscCall(PetscMalloc1(nblocks,&mat->bsizes));
7551   PetscCall(PetscArraycpy(mat->bsizes,bsizes,nblocks));
7552   PetscFunctionReturn(0);
7553 }
7554 
7555 /*@C
7556    MatGetVariableBlockSizes - Gets a diagonal blocks of the matrix that need not be of the same size
7557 
7558    Logically Collective on Mat
7559 
7560    Input Parameter:
7561 .  mat - the matrix
7562 
7563    Output Parameters:
7564 +  nblocks - the number of blocks on this process
7565 -  bsizes - the block sizes
7566 
7567    Notes: Currently not supported from Fortran
7568 
7569    Level: intermediate
7570 
7571 .seealso: `MatCreateSeqBAIJ()`, `MatCreateBAIJ()`, `MatGetBlockSize()`, `MatSetBlockSizes()`, `MatGetBlockSizes()`, `MatSetVariableBlockSizes()`, `MatComputeVariableBlockEnvelope()`
7572 @*/
7573 PetscErrorCode MatGetVariableBlockSizes(Mat mat,PetscInt *nblocks,const PetscInt **bsizes)
7574 {
7575   PetscFunctionBegin;
7576   PetscValidHeaderSpecific(mat,MAT_CLASSID,1);
7577   *nblocks = mat->nblocks;
7578   *bsizes  = mat->bsizes;
7579   PetscFunctionReturn(0);
7580 }
7581 
7582 /*@
7583    MatSetBlockSizes - Sets the matrix block row and column sizes.
7584 
7585    Logically Collective on Mat
7586 
7587    Input Parameters:
7588 +  mat - the matrix
7589 .  rbs - row block size
7590 -  cbs - column block size
7591 
7592    Notes:
7593     Block row formats are MATSEQBAIJ, MATMPIBAIJ, MATSEQSBAIJ, MATMPISBAIJ. These formats ALWAYS have square block storage in the matrix.
7594     If you pass a different block size for the columns than the rows, the row block size determines the square block storage.
7595     This must be called before MatSetUp() or MatXXXSetPreallocation() (or will default to 1) and the block size cannot be changed later.
7596 
7597     For MATMPIAIJ and MATSEQAIJ matrix formats, this function can be called at a later stage, provided that the specified block sizes
7598     are compatible with the matrix local sizes.
7599 
7600     The row and column block size determine the blocksize of the "row" and "column" vectors returned by MatCreateVecs().
7601 
7602    Level: intermediate
7603 
7604 .seealso: `MatCreateSeqBAIJ()`, `MatCreateBAIJ()`, `MatGetBlockSize()`, `MatSetBlockSize()`, `MatGetBlockSizes()`
7605 @*/
7606 PetscErrorCode MatSetBlockSizes(Mat mat,PetscInt rbs,PetscInt cbs)
7607 {
7608   PetscFunctionBegin;
7609   PetscValidHeaderSpecific(mat,MAT_CLASSID,1);
7610   PetscValidLogicalCollectiveInt(mat,rbs,2);
7611   PetscValidLogicalCollectiveInt(mat,cbs,3);
7612   if (mat->ops->setblocksizes) {
7613     PetscCall((*mat->ops->setblocksizes)(mat,rbs,cbs));
7614   }
7615   if (mat->rmap->refcnt) {
7616     ISLocalToGlobalMapping l2g = NULL;
7617     PetscLayout            nmap = NULL;
7618 
7619     PetscCall(PetscLayoutDuplicate(mat->rmap,&nmap));
7620     if (mat->rmap->mapping) {
7621       PetscCall(ISLocalToGlobalMappingDuplicate(mat->rmap->mapping,&l2g));
7622     }
7623     PetscCall(PetscLayoutDestroy(&mat->rmap));
7624     mat->rmap = nmap;
7625     mat->rmap->mapping = l2g;
7626   }
7627   if (mat->cmap->refcnt) {
7628     ISLocalToGlobalMapping l2g = NULL;
7629     PetscLayout            nmap = NULL;
7630 
7631     PetscCall(PetscLayoutDuplicate(mat->cmap,&nmap));
7632     if (mat->cmap->mapping) {
7633       PetscCall(ISLocalToGlobalMappingDuplicate(mat->cmap->mapping,&l2g));
7634     }
7635     PetscCall(PetscLayoutDestroy(&mat->cmap));
7636     mat->cmap = nmap;
7637     mat->cmap->mapping = l2g;
7638   }
7639   PetscCall(PetscLayoutSetBlockSize(mat->rmap,rbs));
7640   PetscCall(PetscLayoutSetBlockSize(mat->cmap,cbs));
7641   PetscFunctionReturn(0);
7642 }
7643 
7644 /*@
7645    MatSetBlockSizesFromMats - Sets the matrix block row and column sizes to match a pair of matrices
7646 
7647    Logically Collective on Mat
7648 
7649    Input Parameters:
7650 +  mat - the matrix
7651 .  fromRow - matrix from which to copy row block size
7652 -  fromCol - matrix from which to copy column block size (can be same as fromRow)
7653 
7654    Level: developer
7655 
7656 .seealso: `MatCreateSeqBAIJ()`, `MatCreateBAIJ()`, `MatGetBlockSize()`, `MatSetBlockSizes()`
7657 @*/
7658 PetscErrorCode MatSetBlockSizesFromMats(Mat mat,Mat fromRow,Mat fromCol)
7659 {
7660   PetscFunctionBegin;
7661   PetscValidHeaderSpecific(mat,MAT_CLASSID,1);
7662   PetscValidHeaderSpecific(fromRow,MAT_CLASSID,2);
7663   PetscValidHeaderSpecific(fromCol,MAT_CLASSID,3);
7664   if (fromRow->rmap->bs > 0) PetscCall(PetscLayoutSetBlockSize(mat->rmap,fromRow->rmap->bs));
7665   if (fromCol->cmap->bs > 0) PetscCall(PetscLayoutSetBlockSize(mat->cmap,fromCol->cmap->bs));
7666   PetscFunctionReturn(0);
7667 }
7668 
7669 /*@
7670    MatResidual - Default routine to calculate the residual.
7671 
7672    Collective on Mat
7673 
7674    Input Parameters:
7675 +  mat - the matrix
7676 .  b   - the right-hand-side
7677 -  x   - the approximate solution
7678 
7679    Output Parameter:
7680 .  r - location to store the residual
7681 
7682    Level: developer
7683 
7684 .seealso: `PCMGSetResidual()`
7685 @*/
7686 PetscErrorCode MatResidual(Mat mat,Vec b,Vec x,Vec r)
7687 {
7688   PetscFunctionBegin;
7689   PetscValidHeaderSpecific(mat,MAT_CLASSID,1);
7690   PetscValidHeaderSpecific(b,VEC_CLASSID,2);
7691   PetscValidHeaderSpecific(x,VEC_CLASSID,3);
7692   PetscValidHeaderSpecific(r,VEC_CLASSID,4);
7693   PetscValidType(mat,1);
7694   MatCheckPreallocated(mat,1);
7695   PetscCall(PetscLogEventBegin(MAT_Residual,mat,0,0,0));
7696   if (!mat->ops->residual) {
7697     PetscCall(MatMult(mat,x,r));
7698     PetscCall(VecAYPX(r,-1.0,b));
7699   } else {
7700     PetscCall((*mat->ops->residual)(mat,b,x,r));
7701   }
7702   PetscCall(PetscLogEventEnd(MAT_Residual,mat,0,0,0));
7703   PetscFunctionReturn(0);
7704 }
7705 
7706 /*@C
7707     MatGetRowIJ - Returns the compressed row storage i and j indices for sequential matrices.
7708 
7709    Collective on Mat
7710 
7711     Input Parameters:
7712 +   mat - the matrix
7713 .   shift -  0 or 1 indicating we want the indices starting at 0 or 1
7714 .   symmetric - PETSC_TRUE or PETSC_FALSE indicating the matrix data structure should be   symmetrized
7715 -   inodecompressed - PETSC_TRUE or PETSC_FALSE  indicating if the nonzero structure of the
7716                  inodes or the nonzero elements is wanted. For BAIJ matrices the compressed version is
7717                  always used.
7718 
7719     Output Parameters:
7720 +   n - number of rows in the (possibly compressed) matrix
7721 .   ia - the row pointers; that is ia[0] = 0, ia[row] = ia[row-1] + number of elements in that row of the matrix
7722 .   ja - the column indices
7723 -   done - indicates if the routine actually worked and returned appropriate ia[] and ja[] arrays; callers
7724            are responsible for handling the case when done == PETSC_FALSE and ia and ja are not set
7725 
7726     Level: developer
7727 
7728     Notes:
7729     You CANNOT change any of the ia[] or ja[] values.
7730 
7731     Use MatRestoreRowIJ() when you are finished accessing the ia[] and ja[] values.
7732 
7733     Fortran Notes:
7734     In Fortran use
7735 $
7736 $      PetscInt ia(1), ja(1)
7737 $      PetscOffset iia, jja
7738 $      call MatGetRowIJ(mat,shift,symmetric,inodecompressed,n,ia,iia,ja,jja,done,ierr)
7739 $      ! Access the ith and jth entries via ia(iia + i) and ja(jja + j)
7740 
7741      or
7742 $
7743 $    PetscInt, pointer :: ia(:),ja(:)
7744 $    call MatGetRowIJF90(mat,shift,symmetric,inodecompressed,n,ia,ja,done,ierr)
7745 $    ! Access the ith and jth entries via ia(i) and ja(j)
7746 
7747 .seealso: `MatGetColumnIJ()`, `MatRestoreRowIJ()`, `MatSeqAIJGetArray()`
7748 @*/
7749 PetscErrorCode MatGetRowIJ(Mat mat,PetscInt shift,PetscBool symmetric,PetscBool inodecompressed,PetscInt *n,const PetscInt *ia[],const PetscInt *ja[],PetscBool  *done)
7750 {
7751   PetscFunctionBegin;
7752   PetscValidHeaderSpecific(mat,MAT_CLASSID,1);
7753   PetscValidType(mat,1);
7754   PetscValidIntPointer(n,5);
7755   if (ia) PetscValidPointer(ia,6);
7756   if (ja) PetscValidPointer(ja,7);
7757   PetscValidBoolPointer(done,8);
7758   MatCheckPreallocated(mat,1);
7759   if (!mat->ops->getrowij) *done = PETSC_FALSE;
7760   else {
7761     *done = PETSC_TRUE;
7762     PetscCall(PetscLogEventBegin(MAT_GetRowIJ,mat,0,0,0));
7763     PetscCall((*mat->ops->getrowij)(mat,shift,symmetric,inodecompressed,n,ia,ja,done));
7764     PetscCall(PetscLogEventEnd(MAT_GetRowIJ,mat,0,0,0));
7765   }
7766   PetscFunctionReturn(0);
7767 }
7768 
7769 /*@C
7770     MatGetColumnIJ - Returns the compressed column storage i and j indices for sequential matrices.
7771 
7772     Collective on Mat
7773 
7774     Input Parameters:
7775 +   mat - the matrix
7776 .   shift - 1 or zero indicating we want the indices starting at 0 or 1
7777 .   symmetric - PETSC_TRUE or PETSC_FALSE indicating the matrix data structure should be
7778                 symmetrized
7779 .   inodecompressed - PETSC_TRUE or PETSC_FALSE indicating if the nonzero structure of the
7780                  inodes or the nonzero elements is wanted. For BAIJ matrices the compressed version is
7781                  always used.
7782 .   n - number of columns in the (possibly compressed) matrix
7783 .   ia - the column pointers; that is ia[0] = 0, ia[col] = i[col-1] + number of elements in that col of the matrix
7784 -   ja - the row indices
7785 
7786     Output Parameters:
7787 .   done - PETSC_TRUE or PETSC_FALSE, indicating whether the values have been returned
7788 
7789     Level: developer
7790 
7791 .seealso: `MatGetRowIJ()`, `MatRestoreColumnIJ()`
7792 @*/
7793 PetscErrorCode MatGetColumnIJ(Mat mat,PetscInt shift,PetscBool symmetric,PetscBool inodecompressed,PetscInt *n,const PetscInt *ia[],const PetscInt *ja[],PetscBool  *done)
7794 {
7795   PetscFunctionBegin;
7796   PetscValidHeaderSpecific(mat,MAT_CLASSID,1);
7797   PetscValidType(mat,1);
7798   PetscValidIntPointer(n,5);
7799   if (ia) PetscValidPointer(ia,6);
7800   if (ja) PetscValidPointer(ja,7);
7801   PetscValidBoolPointer(done,8);
7802   MatCheckPreallocated(mat,1);
7803   if (!mat->ops->getcolumnij) *done = PETSC_FALSE;
7804   else {
7805     *done = PETSC_TRUE;
7806     PetscCall((*mat->ops->getcolumnij)(mat,shift,symmetric,inodecompressed,n,ia,ja,done));
7807   }
7808   PetscFunctionReturn(0);
7809 }
7810 
7811 /*@C
7812     MatRestoreRowIJ - Call after you are completed with the ia,ja indices obtained with
7813     MatGetRowIJ().
7814 
7815     Collective on Mat
7816 
7817     Input Parameters:
7818 +   mat - the matrix
7819 .   shift - 1 or zero indicating we want the indices starting at 0 or 1
7820 .   symmetric - PETSC_TRUE or PETSC_FALSE indicating the matrix data structure should be
7821                 symmetrized
7822 .   inodecompressed -  PETSC_TRUE or PETSC_FALSE indicating if the nonzero structure of the
7823                  inodes or the nonzero elements is wanted. For BAIJ matrices the compressed version is
7824                  always used.
7825 .   n - size of (possibly compressed) matrix
7826 .   ia - the row pointers
7827 -   ja - the column indices
7828 
7829     Output Parameters:
7830 .   done - PETSC_TRUE or PETSC_FALSE indicated that the values have been returned
7831 
7832     Note:
7833     This routine zeros out n, ia, and ja. This is to prevent accidental
7834     us of the array after it has been restored. If you pass NULL, it will
7835     not zero the pointers.  Use of ia or ja after MatRestoreRowIJ() is invalid.
7836 
7837     Level: developer
7838 
7839 .seealso: `MatGetRowIJ()`, `MatRestoreColumnIJ()`
7840 @*/
7841 PetscErrorCode MatRestoreRowIJ(Mat mat,PetscInt shift,PetscBool symmetric,PetscBool inodecompressed,PetscInt *n,const PetscInt *ia[],const PetscInt *ja[],PetscBool  *done)
7842 {
7843   PetscFunctionBegin;
7844   PetscValidHeaderSpecific(mat,MAT_CLASSID,1);
7845   PetscValidType(mat,1);
7846   if (ia) PetscValidPointer(ia,6);
7847   if (ja) PetscValidPointer(ja,7);
7848   PetscValidBoolPointer(done,8);
7849   MatCheckPreallocated(mat,1);
7850 
7851   if (!mat->ops->restorerowij) *done = PETSC_FALSE;
7852   else {
7853     *done = PETSC_TRUE;
7854     PetscCall((*mat->ops->restorerowij)(mat,shift,symmetric,inodecompressed,n,ia,ja,done));
7855     if (n)  *n = 0;
7856     if (ia) *ia = NULL;
7857     if (ja) *ja = NULL;
7858   }
7859   PetscFunctionReturn(0);
7860 }
7861 
7862 /*@C
7863     MatRestoreColumnIJ - Call after you are completed with the ia,ja indices obtained with
7864     MatGetColumnIJ().
7865 
7866     Collective on Mat
7867 
7868     Input Parameters:
7869 +   mat - the matrix
7870 .   shift - 1 or zero indicating we want the indices starting at 0 or 1
7871 .   symmetric - PETSC_TRUE or PETSC_FALSE indicating the matrix data structure should be
7872                 symmetrized
7873 -   inodecompressed - PETSC_TRUE or PETSC_FALSE indicating if the nonzero structure of the
7874                  inodes or the nonzero elements is wanted. For BAIJ matrices the compressed version is
7875                  always used.
7876 
7877     Output Parameters:
7878 +   n - size of (possibly compressed) matrix
7879 .   ia - the column pointers
7880 .   ja - the row indices
7881 -   done - PETSC_TRUE or PETSC_FALSE indicated that the values have been returned
7882 
7883     Level: developer
7884 
7885 .seealso: `MatGetColumnIJ()`, `MatRestoreRowIJ()`
7886 @*/
7887 PetscErrorCode MatRestoreColumnIJ(Mat mat,PetscInt shift,PetscBool symmetric,PetscBool inodecompressed,PetscInt *n,const PetscInt *ia[],const PetscInt *ja[],PetscBool  *done)
7888 {
7889   PetscFunctionBegin;
7890   PetscValidHeaderSpecific(mat,MAT_CLASSID,1);
7891   PetscValidType(mat,1);
7892   if (ia) PetscValidPointer(ia,6);
7893   if (ja) PetscValidPointer(ja,7);
7894   PetscValidBoolPointer(done,8);
7895   MatCheckPreallocated(mat,1);
7896 
7897   if (!mat->ops->restorecolumnij) *done = PETSC_FALSE;
7898   else {
7899     *done = PETSC_TRUE;
7900     PetscCall((*mat->ops->restorecolumnij)(mat,shift,symmetric,inodecompressed,n,ia,ja,done));
7901     if (n)  *n = 0;
7902     if (ia) *ia = NULL;
7903     if (ja) *ja = NULL;
7904   }
7905   PetscFunctionReturn(0);
7906 }
7907 
7908 /*@C
7909     MatColoringPatch -Used inside matrix coloring routines that
7910     use MatGetRowIJ() and/or MatGetColumnIJ().
7911 
7912     Collective on Mat
7913 
7914     Input Parameters:
7915 +   mat - the matrix
7916 .   ncolors - max color value
7917 .   n   - number of entries in colorarray
7918 -   colorarray - array indicating color for each column
7919 
7920     Output Parameters:
7921 .   iscoloring - coloring generated using colorarray information
7922 
7923     Level: developer
7924 
7925 .seealso: `MatGetRowIJ()`, `MatGetColumnIJ()`
7926 
7927 @*/
7928 PetscErrorCode MatColoringPatch(Mat mat,PetscInt ncolors,PetscInt n,ISColoringValue colorarray[],ISColoring *iscoloring)
7929 {
7930   PetscFunctionBegin;
7931   PetscValidHeaderSpecific(mat,MAT_CLASSID,1);
7932   PetscValidType(mat,1);
7933   PetscValidIntPointer(colorarray,4);
7934   PetscValidPointer(iscoloring,5);
7935   MatCheckPreallocated(mat,1);
7936 
7937   if (!mat->ops->coloringpatch) {
7938     PetscCall(ISColoringCreate(PetscObjectComm((PetscObject)mat),ncolors,n,colorarray,PETSC_OWN_POINTER,iscoloring));
7939   } else {
7940     PetscCall((*mat->ops->coloringpatch)(mat,ncolors,n,colorarray,iscoloring));
7941   }
7942   PetscFunctionReturn(0);
7943 }
7944 
7945 /*@
7946    MatSetUnfactored - Resets a factored matrix to be treated as unfactored.
7947 
7948    Logically Collective on Mat
7949 
7950    Input Parameter:
7951 .  mat - the factored matrix to be reset
7952 
7953    Notes:
7954    This routine should be used only with factored matrices formed by in-place
7955    factorization via ILU(0) (or by in-place LU factorization for the MATSEQDENSE
7956    format).  This option can save memory, for example, when solving nonlinear
7957    systems with a matrix-free Newton-Krylov method and a matrix-based, in-place
7958    ILU(0) preconditioner.
7959 
7960    Note that one can specify in-place ILU(0) factorization by calling
7961 .vb
7962      PCType(pc,PCILU);
7963      PCFactorSeUseInPlace(pc);
7964 .ve
7965    or by using the options -pc_type ilu -pc_factor_in_place
7966 
7967    In-place factorization ILU(0) can also be used as a local
7968    solver for the blocks within the block Jacobi or additive Schwarz
7969    methods (runtime option: -sub_pc_factor_in_place).  See Users-Manual: ch_pc
7970    for details on setting local solver options.
7971 
7972    Most users should employ the simplified KSP interface for linear solvers
7973    instead of working directly with matrix algebra routines such as this.
7974    See, e.g., KSPCreate().
7975 
7976    Level: developer
7977 
7978 .seealso: `PCFactorSetUseInPlace()`, `PCFactorGetUseInPlace()`
7979 
7980 @*/
7981 PetscErrorCode MatSetUnfactored(Mat mat)
7982 {
7983   PetscFunctionBegin;
7984   PetscValidHeaderSpecific(mat,MAT_CLASSID,1);
7985   PetscValidType(mat,1);
7986   MatCheckPreallocated(mat,1);
7987   mat->factortype = MAT_FACTOR_NONE;
7988   if (!mat->ops->setunfactored) PetscFunctionReturn(0);
7989   PetscCall((*mat->ops->setunfactored)(mat));
7990   PetscFunctionReturn(0);
7991 }
7992 
7993 /*MC
7994     MatDenseGetArrayF90 - Accesses a matrix array from Fortran90.
7995 
7996     Synopsis:
7997     MatDenseGetArrayF90(Mat x,{Scalar, pointer :: xx_v(:,:)},integer ierr)
7998 
7999     Not collective
8000 
8001     Input Parameter:
8002 .   x - matrix
8003 
8004     Output Parameters:
8005 +   xx_v - the Fortran90 pointer to the array
8006 -   ierr - error code
8007 
8008     Example of Usage:
8009 .vb
8010       PetscScalar, pointer xx_v(:,:)
8011       ....
8012       call MatDenseGetArrayF90(x,xx_v,ierr)
8013       a = xx_v(3)
8014       call MatDenseRestoreArrayF90(x,xx_v,ierr)
8015 .ve
8016 
8017     Level: advanced
8018 
8019 .seealso: `MatDenseRestoreArrayF90()`, `MatDenseGetArray()`, `MatDenseRestoreArray()`, `MatSeqAIJGetArrayF90()`
8020 
8021 M*/
8022 
8023 /*MC
8024     MatDenseRestoreArrayF90 - Restores a matrix array that has been
8025     accessed with MatDenseGetArrayF90().
8026 
8027     Synopsis:
8028     MatDenseRestoreArrayF90(Mat x,{Scalar, pointer :: xx_v(:,:)},integer ierr)
8029 
8030     Not collective
8031 
8032     Input Parameters:
8033 +   x - matrix
8034 -   xx_v - the Fortran90 pointer to the array
8035 
8036     Output Parameter:
8037 .   ierr - error code
8038 
8039     Example of Usage:
8040 .vb
8041        PetscScalar, pointer xx_v(:,:)
8042        ....
8043        call MatDenseGetArrayF90(x,xx_v,ierr)
8044        a = xx_v(3)
8045        call MatDenseRestoreArrayF90(x,xx_v,ierr)
8046 .ve
8047 
8048     Level: advanced
8049 
8050 .seealso: `MatDenseGetArrayF90()`, `MatDenseGetArray()`, `MatDenseRestoreArray()`, `MatSeqAIJRestoreArrayF90()`
8051 
8052 M*/
8053 
8054 /*MC
8055     MatSeqAIJGetArrayF90 - Accesses a matrix array from Fortran90.
8056 
8057     Synopsis:
8058     MatSeqAIJGetArrayF90(Mat x,{Scalar, pointer :: xx_v(:)},integer ierr)
8059 
8060     Not collective
8061 
8062     Input Parameter:
8063 .   x - matrix
8064 
8065     Output Parameters:
8066 +   xx_v - the Fortran90 pointer to the array
8067 -   ierr - error code
8068 
8069     Example of Usage:
8070 .vb
8071       PetscScalar, pointer xx_v(:)
8072       ....
8073       call MatSeqAIJGetArrayF90(x,xx_v,ierr)
8074       a = xx_v(3)
8075       call MatSeqAIJRestoreArrayF90(x,xx_v,ierr)
8076 .ve
8077 
8078     Level: advanced
8079 
8080 .seealso: `MatSeqAIJRestoreArrayF90()`, `MatSeqAIJGetArray()`, `MatSeqAIJRestoreArray()`, `MatDenseGetArrayF90()`
8081 
8082 M*/
8083 
8084 /*MC
8085     MatSeqAIJRestoreArrayF90 - Restores a matrix array that has been
8086     accessed with MatSeqAIJGetArrayF90().
8087 
8088     Synopsis:
8089     MatSeqAIJRestoreArrayF90(Mat x,{Scalar, pointer :: xx_v(:)},integer ierr)
8090 
8091     Not collective
8092 
8093     Input Parameters:
8094 +   x - matrix
8095 -   xx_v - the Fortran90 pointer to the array
8096 
8097     Output Parameter:
8098 .   ierr - error code
8099 
8100     Example of Usage:
8101 .vb
8102        PetscScalar, pointer xx_v(:)
8103        ....
8104        call MatSeqAIJGetArrayF90(x,xx_v,ierr)
8105        a = xx_v(3)
8106        call MatSeqAIJRestoreArrayF90(x,xx_v,ierr)
8107 .ve
8108 
8109     Level: advanced
8110 
8111 .seealso: `MatSeqAIJGetArrayF90()`, `MatSeqAIJGetArray()`, `MatSeqAIJRestoreArray()`, `MatDenseRestoreArrayF90()`
8112 
8113 M*/
8114 
8115 /*@
8116     MatCreateSubMatrix - Gets a single submatrix on the same number of processors
8117                       as the original matrix.
8118 
8119     Collective on Mat
8120 
8121     Input Parameters:
8122 +   mat - the original matrix
8123 .   isrow - parallel IS containing the rows this processor should obtain
8124 .   iscol - parallel IS containing all columns you wish to keep. Each process should list the columns that will be in IT's "diagonal part" in the new matrix.
8125 -   cll - either MAT_INITIAL_MATRIX or MAT_REUSE_MATRIX
8126 
8127     Output Parameter:
8128 .   newmat - the new submatrix, of the same type as the old
8129 
8130     Level: advanced
8131 
8132     Notes:
8133     The submatrix will be able to be multiplied with vectors using the same layout as iscol.
8134 
8135     Some matrix types place restrictions on the row and column indices, such
8136     as that they be sorted or that they be equal to each other.
8137 
8138     The index sets may not have duplicate entries.
8139 
8140       The first time this is called you should use a cll of MAT_INITIAL_MATRIX,
8141    the MatCreateSubMatrix() routine will create the newmat for you. Any additional calls
8142    to this routine with a mat of the same nonzero structure and with a call of MAT_REUSE_MATRIX
8143    will reuse the matrix generated the first time.  You should call MatDestroy() on newmat when
8144    you are finished using it.
8145 
8146     The communicator of the newly obtained matrix is ALWAYS the same as the communicator of
8147     the input matrix.
8148 
8149     If iscol is NULL then all columns are obtained (not supported in Fortran).
8150 
8151    Example usage:
8152    Consider the following 8x8 matrix with 34 non-zero values, that is
8153    assembled across 3 processors. Let's assume that proc0 owns 3 rows,
8154    proc1 owns 3 rows, proc2 owns 2 rows. This division can be shown
8155    as follows:
8156 
8157 .vb
8158             1  2  0  |  0  3  0  |  0  4
8159     Proc0   0  5  6  |  7  0  0  |  8  0
8160             9  0 10  | 11  0  0  | 12  0
8161     -------------------------------------
8162            13  0 14  | 15 16 17  |  0  0
8163     Proc1   0 18  0  | 19 20 21  |  0  0
8164             0  0  0  | 22 23  0  | 24  0
8165     -------------------------------------
8166     Proc2  25 26 27  |  0  0 28  | 29  0
8167            30  0  0  | 31 32 33  |  0 34
8168 .ve
8169 
8170     Suppose isrow = [0 1 | 4 | 6 7] and iscol = [1 2 | 3 4 5 | 6].  The resulting submatrix is
8171 
8172 .vb
8173             2  0  |  0  3  0  |  0
8174     Proc0   5  6  |  7  0  0  |  8
8175     -------------------------------
8176     Proc1  18  0  | 19 20 21  |  0
8177     -------------------------------
8178     Proc2  26 27  |  0  0 28  | 29
8179             0  0  | 31 32 33  |  0
8180 .ve
8181 
8182 .seealso: `MatCreateSubMatrices()`, `MatCreateSubMatricesMPI()`, `MatCreateSubMatrixVirtual()`, `MatSubMatrixVirtualUpdate()`
8183 @*/
8184 PetscErrorCode MatCreateSubMatrix(Mat mat,IS isrow,IS iscol,MatReuse cll,Mat *newmat)
8185 {
8186   PetscMPIInt    size;
8187   Mat            *local;
8188   IS             iscoltmp;
8189   PetscBool      flg;
8190 
8191   PetscFunctionBegin;
8192   PetscValidHeaderSpecific(mat,MAT_CLASSID,1);
8193   PetscValidHeaderSpecific(isrow,IS_CLASSID,2);
8194   if (iscol) PetscValidHeaderSpecific(iscol,IS_CLASSID,3);
8195   PetscValidPointer(newmat,5);
8196   if (cll == MAT_REUSE_MATRIX) PetscValidHeaderSpecific(*newmat,MAT_CLASSID,5);
8197   PetscValidType(mat,1);
8198   PetscCheck(!mat->factortype,PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Not for factored matrix");
8199   PetscCheck(cll != MAT_IGNORE_MATRIX,PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Cannot use MAT_IGNORE_MATRIX");
8200 
8201   MatCheckPreallocated(mat,1);
8202   PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)mat),&size));
8203 
8204   if (!iscol || isrow == iscol) {
8205     PetscBool   stride;
8206     PetscMPIInt grabentirematrix = 0,grab;
8207     PetscCall(PetscObjectTypeCompare((PetscObject)isrow,ISSTRIDE,&stride));
8208     if (stride) {
8209       PetscInt first,step,n,rstart,rend;
8210       PetscCall(ISStrideGetInfo(isrow,&first,&step));
8211       if (step == 1) {
8212         PetscCall(MatGetOwnershipRange(mat,&rstart,&rend));
8213         if (rstart == first) {
8214           PetscCall(ISGetLocalSize(isrow,&n));
8215           if (n == rend-rstart) {
8216             grabentirematrix = 1;
8217           }
8218         }
8219       }
8220     }
8221     PetscCall(MPIU_Allreduce(&grabentirematrix,&grab,1,MPI_INT,MPI_MIN,PetscObjectComm((PetscObject)mat)));
8222     if (grab) {
8223       PetscCall(PetscInfo(mat,"Getting entire matrix as submatrix\n"));
8224       if (cll == MAT_INITIAL_MATRIX) {
8225         *newmat = mat;
8226         PetscCall(PetscObjectReference((PetscObject)mat));
8227       }
8228       PetscFunctionReturn(0);
8229     }
8230   }
8231 
8232   if (!iscol) {
8233     PetscCall(ISCreateStride(PetscObjectComm((PetscObject)mat),mat->cmap->n,mat->cmap->rstart,1,&iscoltmp));
8234   } else {
8235     iscoltmp = iscol;
8236   }
8237 
8238   /* if original matrix is on just one processor then use submatrix generated */
8239   if (mat->ops->createsubmatrices && !mat->ops->createsubmatrix && size == 1 && cll == MAT_REUSE_MATRIX) {
8240     PetscCall(MatCreateSubMatrices(mat,1,&isrow,&iscoltmp,MAT_REUSE_MATRIX,&newmat));
8241     goto setproperties;
8242   } else if (mat->ops->createsubmatrices && !mat->ops->createsubmatrix && size == 1) {
8243     PetscCall(MatCreateSubMatrices(mat,1,&isrow,&iscoltmp,MAT_INITIAL_MATRIX,&local));
8244     *newmat = *local;
8245     PetscCall(PetscFree(local));
8246     goto setproperties;
8247   } else if (!mat->ops->createsubmatrix) {
8248     /* Create a new matrix type that implements the operation using the full matrix */
8249     PetscCall(PetscLogEventBegin(MAT_CreateSubMat,mat,0,0,0));
8250     switch (cll) {
8251     case MAT_INITIAL_MATRIX:
8252       PetscCall(MatCreateSubMatrixVirtual(mat,isrow,iscoltmp,newmat));
8253       break;
8254     case MAT_REUSE_MATRIX:
8255       PetscCall(MatSubMatrixVirtualUpdate(*newmat,mat,isrow,iscoltmp));
8256       break;
8257     default: SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_OUTOFRANGE,"Invalid MatReuse, must be either MAT_INITIAL_MATRIX or MAT_REUSE_MATRIX");
8258     }
8259     PetscCall(PetscLogEventEnd(MAT_CreateSubMat,mat,0,0,0));
8260     goto setproperties;
8261   }
8262 
8263   PetscCheck(mat->ops->createsubmatrix,PETSC_COMM_SELF,PETSC_ERR_SUP,"Mat type %s",((PetscObject)mat)->type_name);
8264   PetscCall(PetscLogEventBegin(MAT_CreateSubMat,mat,0,0,0));
8265   PetscCall((*mat->ops->createsubmatrix)(mat,isrow,iscoltmp,cll,newmat));
8266   PetscCall(PetscLogEventEnd(MAT_CreateSubMat,mat,0,0,0));
8267 
8268 setproperties:
8269   PetscCall(ISEqualUnsorted(isrow,iscoltmp,&flg));
8270   if (flg) {
8271     PetscCall(MatPropagateSymmetryOptions(mat,*newmat));
8272   }
8273   if (!iscol) PetscCall(ISDestroy(&iscoltmp));
8274   if (*newmat && cll == MAT_INITIAL_MATRIX) PetscCall(PetscObjectStateIncrease((PetscObject)*newmat));
8275   PetscFunctionReturn(0);
8276 }
8277 
8278 /*@
8279    MatPropagateSymmetryOptions - Propagates symmetry options set on a matrix to another matrix
8280 
8281    Not Collective
8282 
8283    Input Parameters:
8284 +  A - the matrix we wish to propagate options from
8285 -  B - the matrix we wish to propagate options to
8286 
8287    Level: beginner
8288 
8289    Notes: Propagates the options associated to MAT_SYMMETRY_ETERNAL, MAT_STRUCTURALLY_SYMMETRIC, MAT_HERMITIAN, MAT_SPD and MAT_SYMMETRIC
8290 
8291 .seealso: `MatSetOption()`
8292 @*/
8293 PetscErrorCode MatPropagateSymmetryOptions(Mat A, Mat B)
8294 {
8295   PetscFunctionBegin;
8296   PetscValidHeaderSpecific(A,MAT_CLASSID,1);
8297   PetscValidHeaderSpecific(B,MAT_CLASSID,2);
8298   if (A->symmetric_eternal) { /* symmetric_eternal does not have a corresponding *set flag */
8299     PetscCall(MatSetOption(B,MAT_SYMMETRY_ETERNAL,A->symmetric_eternal));
8300   }
8301   if (A->structurally_symmetric_set) {
8302     PetscCall(MatSetOption(B,MAT_STRUCTURALLY_SYMMETRIC,A->structurally_symmetric));
8303   }
8304   if (A->hermitian_set) {
8305     PetscCall(MatSetOption(B,MAT_HERMITIAN,A->hermitian));
8306   }
8307   if (A->spd_set) {
8308     PetscCall(MatSetOption(B,MAT_SPD,A->spd));
8309   }
8310   if (A->symmetric_set) {
8311     PetscCall(MatSetOption(B,MAT_SYMMETRIC,A->symmetric));
8312   }
8313   PetscFunctionReturn(0);
8314 }
8315 
8316 /*@
8317    MatStashSetInitialSize - sets the sizes of the matrix stash, that is
8318    used during the assembly process to store values that belong to
8319    other processors.
8320 
8321    Not Collective
8322 
8323    Input Parameters:
8324 +  mat   - the matrix
8325 .  size  - the initial size of the stash.
8326 -  bsize - the initial size of the block-stash(if used).
8327 
8328    Options Database Keys:
8329 +   -matstash_initial_size <size> or <size0,size1,...sizep-1>
8330 -   -matstash_block_initial_size <bsize>  or <bsize0,bsize1,...bsizep-1>
8331 
8332    Level: intermediate
8333 
8334    Notes:
8335      The block-stash is used for values set with MatSetValuesBlocked() while
8336      the stash is used for values set with MatSetValues()
8337 
8338      Run with the option -info and look for output of the form
8339      MatAssemblyBegin_MPIXXX:Stash has MM entries, uses nn mallocs.
8340      to determine the appropriate value, MM, to use for size and
8341      MatAssemblyBegin_MPIXXX:Block-Stash has BMM entries, uses nn mallocs.
8342      to determine the value, BMM to use for bsize
8343 
8344 .seealso: `MatAssemblyBegin()`, `MatAssemblyEnd()`, `Mat`, `MatStashGetInfo()`
8345 
8346 @*/
8347 PetscErrorCode MatStashSetInitialSize(Mat mat,PetscInt size, PetscInt bsize)
8348 {
8349   PetscFunctionBegin;
8350   PetscValidHeaderSpecific(mat,MAT_CLASSID,1);
8351   PetscValidType(mat,1);
8352   PetscCall(MatStashSetInitialSize_Private(&mat->stash,size));
8353   PetscCall(MatStashSetInitialSize_Private(&mat->bstash,bsize));
8354   PetscFunctionReturn(0);
8355 }
8356 
8357 /*@
8358    MatInterpolateAdd - w = y + A*x or A'*x depending on the shape of
8359      the matrix
8360 
8361    Neighbor-wise Collective on Mat
8362 
8363    Input Parameters:
8364 +  mat   - the matrix
8365 .  x,y - the vectors
8366 -  w - where the result is stored
8367 
8368    Level: intermediate
8369 
8370    Notes:
8371     w may be the same vector as y.
8372 
8373     This allows one to use either the restriction or interpolation (its transpose)
8374     matrix to do the interpolation
8375 
8376 .seealso: `MatMultAdd()`, `MatMultTransposeAdd()`, `MatRestrict()`
8377 
8378 @*/
8379 PetscErrorCode MatInterpolateAdd(Mat A,Vec x,Vec y,Vec w)
8380 {
8381   PetscInt       M,N,Ny;
8382 
8383   PetscFunctionBegin;
8384   PetscValidHeaderSpecific(A,MAT_CLASSID,1);
8385   PetscValidHeaderSpecific(x,VEC_CLASSID,2);
8386   PetscValidHeaderSpecific(y,VEC_CLASSID,3);
8387   PetscValidHeaderSpecific(w,VEC_CLASSID,4);
8388   PetscCall(MatGetSize(A,&M,&N));
8389   PetscCall(VecGetSize(y,&Ny));
8390   if (M == Ny) {
8391     PetscCall(MatMultAdd(A,x,y,w));
8392   } else {
8393     PetscCall(MatMultTransposeAdd(A,x,y,w));
8394   }
8395   PetscFunctionReturn(0);
8396 }
8397 
8398 /*@
8399    MatInterpolate - y = A*x or A'*x depending on the shape of
8400      the matrix
8401 
8402    Neighbor-wise Collective on Mat
8403 
8404    Input Parameters:
8405 +  mat   - the matrix
8406 -  x,y - the vectors
8407 
8408    Level: intermediate
8409 
8410    Notes:
8411     This allows one to use either the restriction or interpolation (its transpose)
8412     matrix to do the interpolation
8413 
8414 .seealso: `MatMultAdd()`, `MatMultTransposeAdd()`, `MatRestrict()`
8415 
8416 @*/
8417 PetscErrorCode MatInterpolate(Mat A,Vec x,Vec y)
8418 {
8419   PetscInt       M,N,Ny;
8420 
8421   PetscFunctionBegin;
8422   PetscValidHeaderSpecific(A,MAT_CLASSID,1);
8423   PetscValidHeaderSpecific(x,VEC_CLASSID,2);
8424   PetscValidHeaderSpecific(y,VEC_CLASSID,3);
8425   PetscCall(MatGetSize(A,&M,&N));
8426   PetscCall(VecGetSize(y,&Ny));
8427   if (M == Ny) {
8428     PetscCall(MatMult(A,x,y));
8429   } else {
8430     PetscCall(MatMultTranspose(A,x,y));
8431   }
8432   PetscFunctionReturn(0);
8433 }
8434 
8435 /*@
8436    MatRestrict - y = A*x or A'*x
8437 
8438    Neighbor-wise Collective on Mat
8439 
8440    Input Parameters:
8441 +  mat   - the matrix
8442 -  x,y - the vectors
8443 
8444    Level: intermediate
8445 
8446    Notes:
8447     This allows one to use either the restriction or interpolation (its transpose)
8448     matrix to do the restriction
8449 
8450 .seealso: `MatMultAdd()`, `MatMultTransposeAdd()`, `MatInterpolate()`
8451 
8452 @*/
8453 PetscErrorCode MatRestrict(Mat A,Vec x,Vec y)
8454 {
8455   PetscInt       M,N,Ny;
8456 
8457   PetscFunctionBegin;
8458   PetscValidHeaderSpecific(A,MAT_CLASSID,1);
8459   PetscValidHeaderSpecific(x,VEC_CLASSID,2);
8460   PetscValidHeaderSpecific(y,VEC_CLASSID,3);
8461   PetscCall(MatGetSize(A,&M,&N));
8462   PetscCall(VecGetSize(y,&Ny));
8463   if (M == Ny) {
8464     PetscCall(MatMult(A,x,y));
8465   } else {
8466     PetscCall(MatMultTranspose(A,x,y));
8467   }
8468   PetscFunctionReturn(0);
8469 }
8470 
8471 /*@
8472    MatMatInterpolateAdd - Y = W + A*X or W + A'*X
8473 
8474    Neighbor-wise Collective on Mat
8475 
8476    Input Parameters:
8477 +  mat   - the matrix
8478 -  w, x - the input dense matrices
8479 
8480    Output Parameters:
8481 .  y - the output dense matrix
8482 
8483    Level: intermediate
8484 
8485    Notes:
8486     This allows one to use either the restriction or interpolation (its transpose)
8487     matrix to do the interpolation. y matrix can be reused if already created with the proper sizes,
8488     otherwise it will be recreated. y must be initialized to NULL if not supplied.
8489 
8490 .seealso: `MatInterpolateAdd()`, `MatMatInterpolate()`, `MatMatRestrict()`
8491 
8492 @*/
8493 PetscErrorCode MatMatInterpolateAdd(Mat A,Mat x,Mat w,Mat *y)
8494 {
8495   PetscInt       M,N,Mx,Nx,Mo,My = 0,Ny = 0;
8496   PetscBool      trans = PETSC_TRUE;
8497   MatReuse       reuse = MAT_INITIAL_MATRIX;
8498 
8499   PetscFunctionBegin;
8500   PetscValidHeaderSpecific(A,MAT_CLASSID,1);
8501   PetscValidHeaderSpecific(x,MAT_CLASSID,2);
8502   PetscValidType(x,2);
8503   if (w) PetscValidHeaderSpecific(w,MAT_CLASSID,3);
8504   if (*y) PetscValidHeaderSpecific(*y,MAT_CLASSID,4);
8505   PetscCall(MatGetSize(A,&M,&N));
8506   PetscCall(MatGetSize(x,&Mx,&Nx));
8507   if (N == Mx) trans = PETSC_FALSE;
8508   else PetscCheck(M == Mx,PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"Size mismatch: A %" PetscInt_FMT "x%" PetscInt_FMT ", X %" PetscInt_FMT "x%" PetscInt_FMT,M,N,Mx,Nx);
8509   Mo = trans ? N : M;
8510   if (*y) {
8511     PetscCall(MatGetSize(*y,&My,&Ny));
8512     if (Mo == My && Nx == Ny) { reuse = MAT_REUSE_MATRIX; }
8513     else {
8514       PetscCheck(w || *y != w,PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"Cannot reuse y and w, size mismatch: A %" PetscInt_FMT "x%" PetscInt_FMT ", X %" PetscInt_FMT "x%" PetscInt_FMT ", Y %" PetscInt_FMT "x%" PetscInt_FMT,M,N,Mx,Nx,My,Ny);
8515       PetscCall(MatDestroy(y));
8516     }
8517   }
8518 
8519   if (w && *y == w) { /* this is to minimize changes in PCMG */
8520     PetscBool flg;
8521 
8522     PetscCall(PetscObjectQuery((PetscObject)*y,"__MatMatIntAdd_w",(PetscObject*)&w));
8523     if (w) {
8524       PetscInt My,Ny,Mw,Nw;
8525 
8526       PetscCall(PetscObjectTypeCompare((PetscObject)*y,((PetscObject)w)->type_name,&flg));
8527       PetscCall(MatGetSize(*y,&My,&Ny));
8528       PetscCall(MatGetSize(w,&Mw,&Nw));
8529       if (!flg || My != Mw || Ny != Nw) w = NULL;
8530     }
8531     if (!w) {
8532       PetscCall(MatDuplicate(*y,MAT_COPY_VALUES,&w));
8533       PetscCall(PetscObjectCompose((PetscObject)*y,"__MatMatIntAdd_w",(PetscObject)w));
8534       PetscCall(PetscLogObjectParent((PetscObject)*y,(PetscObject)w));
8535       PetscCall(PetscObjectDereference((PetscObject)w));
8536     } else {
8537       PetscCall(MatCopy(*y,w,UNKNOWN_NONZERO_PATTERN));
8538     }
8539   }
8540   if (!trans) {
8541     PetscCall(MatMatMult(A,x,reuse,PETSC_DEFAULT,y));
8542   } else {
8543     PetscCall(MatTransposeMatMult(A,x,reuse,PETSC_DEFAULT,y));
8544   }
8545   if (w) {
8546     PetscCall(MatAXPY(*y,1.0,w,UNKNOWN_NONZERO_PATTERN));
8547   }
8548   PetscFunctionReturn(0);
8549 }
8550 
8551 /*@
8552    MatMatInterpolate - Y = A*X or A'*X
8553 
8554    Neighbor-wise Collective on Mat
8555 
8556    Input Parameters:
8557 +  mat   - the matrix
8558 -  x - the input dense matrix
8559 
8560    Output Parameters:
8561 .  y - the output dense matrix
8562 
8563    Level: intermediate
8564 
8565    Notes:
8566     This allows one to use either the restriction or interpolation (its transpose)
8567     matrix to do the interpolation. y matrix can be reused if already created with the proper sizes,
8568     otherwise it will be recreated. y must be initialized to NULL if not supplied.
8569 
8570 .seealso: `MatInterpolate()`, `MatRestrict()`, `MatMatRestrict()`
8571 
8572 @*/
8573 PetscErrorCode MatMatInterpolate(Mat A,Mat x,Mat *y)
8574 {
8575   PetscFunctionBegin;
8576   PetscCall(MatMatInterpolateAdd(A,x,NULL,y));
8577   PetscFunctionReturn(0);
8578 }
8579 
8580 /*@
8581    MatMatRestrict - Y = A*X or A'*X
8582 
8583    Neighbor-wise Collective on Mat
8584 
8585    Input Parameters:
8586 +  mat   - the matrix
8587 -  x - the input dense matrix
8588 
8589    Output Parameters:
8590 .  y - the output dense matrix
8591 
8592    Level: intermediate
8593 
8594    Notes:
8595     This allows one to use either the restriction or interpolation (its transpose)
8596     matrix to do the restriction. y matrix can be reused if already created with the proper sizes,
8597     otherwise it will be recreated. y must be initialized to NULL if not supplied.
8598 
8599 .seealso: `MatRestrict()`, `MatInterpolate()`, `MatMatInterpolate()`
8600 @*/
8601 PetscErrorCode MatMatRestrict(Mat A,Mat x,Mat *y)
8602 {
8603   PetscFunctionBegin;
8604   PetscCall(MatMatInterpolateAdd(A,x,NULL,y));
8605   PetscFunctionReturn(0);
8606 }
8607 
8608 /*@
8609    MatGetNullSpace - retrieves the null space of a matrix.
8610 
8611    Logically Collective on Mat
8612 
8613    Input Parameters:
8614 +  mat - the matrix
8615 -  nullsp - the null space object
8616 
8617    Level: developer
8618 
8619 .seealso: `MatCreate()`, `MatNullSpaceCreate()`, `MatSetNearNullSpace()`, `MatSetNullSpace()`
8620 @*/
8621 PetscErrorCode MatGetNullSpace(Mat mat, MatNullSpace *nullsp)
8622 {
8623   PetscFunctionBegin;
8624   PetscValidHeaderSpecific(mat,MAT_CLASSID,1);
8625   PetscValidPointer(nullsp,2);
8626   *nullsp = (mat->symmetric_set && mat->symmetric && !mat->nullsp) ? mat->transnullsp : mat->nullsp;
8627   PetscFunctionReturn(0);
8628 }
8629 
8630 /*@
8631    MatSetNullSpace - attaches a null space to a matrix.
8632 
8633    Logically Collective on Mat
8634 
8635    Input Parameters:
8636 +  mat - the matrix
8637 -  nullsp - the null space object
8638 
8639    Level: advanced
8640 
8641    Notes:
8642       This null space is used by the KSP linear solvers to solve singular systems.
8643 
8644       Overwrites any previous null space that may have been attached. You can remove the null space from the matrix object by calling this routine with an nullsp of NULL
8645 
8646       For inconsistent singular systems (linear systems where the right hand side is not in the range of the operator) the KSP residuals will not converge to
8647       to zero but the linear system will still be solved in a least squares sense.
8648 
8649       The fundamental theorem of linear algebra (Gilbert Strang, Introduction to Applied Mathematics, page 72) states that
8650    the domain of a matrix A (from R^n to R^m (m rows, n columns) R^n = the direct sum of the null space of A, n(A), + the range of A^T, R(A^T).
8651    Similarly R^m = direct sum n(A^T) + R(A).  Hence the linear system A x = b has a solution only if b in R(A) (or correspondingly b is orthogonal to
8652    n(A^T)) and if x is a solution then x + alpha n(A) is a solution for any alpha. The minimum norm solution is orthogonal to n(A). For problems without a solution
8653    the solution that minimizes the norm of the residual (the least squares solution) can be obtained by solving A x = \hat{b} where \hat{b} is b orthogonalized to the n(A^T).
8654    This  \hat{b} can be obtained by calling MatNullSpaceRemove() with the null space of the transpose of the matrix.
8655 
8656     If the matrix is known to be symmetric because it is an SBAIJ matrix or one as called MatSetOption(mat,MAT_SYMMETRIC or MAT_SYMMETRIC_ETERNAL,PETSC_TRUE); this
8657     routine also automatically calls MatSetTransposeNullSpace().
8658 
8659     The user should call `MatNullSpaceDestroy()`.
8660 
8661 .seealso: `MatCreate()`, `MatNullSpaceCreate()`, `MatSetNearNullSpace()`, `MatGetNullSpace()`, `MatSetTransposeNullSpace()`, `MatGetTransposeNullSpace()`, `MatNullSpaceRemove()`,
8662           `KSPSetPCSide()`
8663 @*/
8664 PetscErrorCode MatSetNullSpace(Mat mat,MatNullSpace nullsp)
8665 {
8666   PetscFunctionBegin;
8667   PetscValidHeaderSpecific(mat,MAT_CLASSID,1);
8668   if (nullsp) PetscValidHeaderSpecific(nullsp,MAT_NULLSPACE_CLASSID,2);
8669   if (nullsp) PetscCall(PetscObjectReference((PetscObject)nullsp));
8670   PetscCall(MatNullSpaceDestroy(&mat->nullsp));
8671   mat->nullsp = nullsp;
8672   if (mat->symmetric_set && mat->symmetric) {
8673     PetscCall(MatSetTransposeNullSpace(mat,nullsp));
8674   }
8675   PetscFunctionReturn(0);
8676 }
8677 
8678 /*@
8679    MatGetTransposeNullSpace - retrieves the null space of the transpose of a matrix.
8680 
8681    Logically Collective on Mat
8682 
8683    Input Parameters:
8684 +  mat - the matrix
8685 -  nullsp - the null space object
8686 
8687    Level: developer
8688 
8689 .seealso: `MatCreate()`, `MatNullSpaceCreate()`, `MatSetNearNullSpace()`, `MatSetTransposeNullSpace()`, `MatSetNullSpace()`, `MatGetNullSpace()`
8690 @*/
8691 PetscErrorCode MatGetTransposeNullSpace(Mat mat, MatNullSpace *nullsp)
8692 {
8693   PetscFunctionBegin;
8694   PetscValidHeaderSpecific(mat,MAT_CLASSID,1);
8695   PetscValidType(mat,1);
8696   PetscValidPointer(nullsp,2);
8697   *nullsp = (mat->symmetric_set && mat->symmetric && !mat->transnullsp) ? mat->nullsp : mat->transnullsp;
8698   PetscFunctionReturn(0);
8699 }
8700 
8701 /*@
8702    MatSetTransposeNullSpace - attaches the null space of a transpose of a matrix to the matrix
8703 
8704    Logically Collective on Mat
8705 
8706    Input Parameters:
8707 +  mat - the matrix
8708 -  nullsp - the null space object
8709 
8710    Level: advanced
8711 
8712    Notes:
8713       This allows solving singular linear systems defined by the transpose of the matrix using KSP solvers with left preconditioning.
8714 
8715       See MatSetNullSpace()
8716 
8717 .seealso: `MatCreate()`, `MatNullSpaceCreate()`, `MatSetNearNullSpace()`, `MatGetNullSpace()`, `MatSetNullSpace()`, `MatGetTransposeNullSpace()`, `MatNullSpaceRemove()`, `KSPSetPCSide()`
8718 @*/
8719 PetscErrorCode MatSetTransposeNullSpace(Mat mat,MatNullSpace nullsp)
8720 {
8721   PetscFunctionBegin;
8722   PetscValidHeaderSpecific(mat,MAT_CLASSID,1);
8723   if (nullsp) PetscValidHeaderSpecific(nullsp,MAT_NULLSPACE_CLASSID,2);
8724   if (nullsp) PetscCall(PetscObjectReference((PetscObject)nullsp));
8725   PetscCall(MatNullSpaceDestroy(&mat->transnullsp));
8726   mat->transnullsp = nullsp;
8727   PetscFunctionReturn(0);
8728 }
8729 
8730 /*@
8731    MatSetNearNullSpace - attaches a null space to a matrix, which is often the null space (rigid body modes) of the operator without boundary conditions
8732         This null space will be used to provide near null space vectors to a multigrid preconditioner built from this matrix.
8733 
8734    Logically Collective on Mat
8735 
8736    Input Parameters:
8737 +  mat - the matrix
8738 -  nullsp - the null space object
8739 
8740    Level: advanced
8741 
8742    Notes:
8743       Overwrites any previous near null space that may have been attached
8744 
8745       You can remove the null space by calling this routine with an nullsp of NULL
8746 
8747 .seealso: `MatCreate()`, `MatNullSpaceCreate()`, `MatSetNullSpace()`, `MatNullSpaceCreateRigidBody()`, `MatGetNearNullSpace()`
8748 @*/
8749 PetscErrorCode MatSetNearNullSpace(Mat mat,MatNullSpace nullsp)
8750 {
8751   PetscFunctionBegin;
8752   PetscValidHeaderSpecific(mat,MAT_CLASSID,1);
8753   PetscValidType(mat,1);
8754   if (nullsp) PetscValidHeaderSpecific(nullsp,MAT_NULLSPACE_CLASSID,2);
8755   MatCheckPreallocated(mat,1);
8756   if (nullsp) PetscCall(PetscObjectReference((PetscObject)nullsp));
8757   PetscCall(MatNullSpaceDestroy(&mat->nearnullsp));
8758   mat->nearnullsp = nullsp;
8759   PetscFunctionReturn(0);
8760 }
8761 
8762 /*@
8763    MatGetNearNullSpace - Get null space attached with MatSetNearNullSpace()
8764 
8765    Not Collective
8766 
8767    Input Parameter:
8768 .  mat - the matrix
8769 
8770    Output Parameter:
8771 .  nullsp - the null space object, NULL if not set
8772 
8773    Level: developer
8774 
8775 .seealso: `MatSetNearNullSpace()`, `MatGetNullSpace()`, `MatNullSpaceCreate()`
8776 @*/
8777 PetscErrorCode MatGetNearNullSpace(Mat mat,MatNullSpace *nullsp)
8778 {
8779   PetscFunctionBegin;
8780   PetscValidHeaderSpecific(mat,MAT_CLASSID,1);
8781   PetscValidType(mat,1);
8782   PetscValidPointer(nullsp,2);
8783   MatCheckPreallocated(mat,1);
8784   *nullsp = mat->nearnullsp;
8785   PetscFunctionReturn(0);
8786 }
8787 
8788 /*@C
8789    MatICCFactor - Performs in-place incomplete Cholesky factorization of matrix.
8790 
8791    Collective on Mat
8792 
8793    Input Parameters:
8794 +  mat - the matrix
8795 .  row - row/column permutation
8796 .  fill - expected fill factor >= 1.0
8797 -  level - level of fill, for ICC(k)
8798 
8799    Notes:
8800    Probably really in-place only when level of fill is zero, otherwise allocates
8801    new space to store factored matrix and deletes previous memory.
8802 
8803    Most users should employ the simplified KSP interface for linear solvers
8804    instead of working directly with matrix algebra routines such as this.
8805    See, e.g., KSPCreate().
8806 
8807    Level: developer
8808 
8809 .seealso: `MatICCFactorSymbolic()`, `MatLUFactorNumeric()`, `MatCholeskyFactor()`
8810 
8811     Developer Note: fortran interface is not autogenerated as the f90
8812     interface definition cannot be generated correctly [due to MatFactorInfo]
8813 
8814 @*/
8815 PetscErrorCode MatICCFactor(Mat mat,IS row,const MatFactorInfo *info)
8816 {
8817   PetscFunctionBegin;
8818   PetscValidHeaderSpecific(mat,MAT_CLASSID,1);
8819   PetscValidType(mat,1);
8820   if (row) PetscValidHeaderSpecific(row,IS_CLASSID,2);
8821   PetscValidPointer(info,3);
8822   PetscCheck(mat->rmap->N == mat->cmap->N,PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONG,"matrix must be square");
8823   PetscCheck(mat->assembled,PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Not for unassembled matrix");
8824   PetscCheck(!mat->factortype,PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Not for factored matrix");
8825   PetscCheck(mat->ops->iccfactor,PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Mat type %s",((PetscObject)mat)->type_name);
8826   MatCheckPreallocated(mat,1);
8827   PetscCall((*mat->ops->iccfactor)(mat,row,info));
8828   PetscCall(PetscObjectStateIncrease((PetscObject)mat));
8829   PetscFunctionReturn(0);
8830 }
8831 
8832 /*@
8833    MatDiagonalScaleLocal - Scales columns of a matrix given the scaling values including the
8834          ghosted ones.
8835 
8836    Not Collective
8837 
8838    Input Parameters:
8839 +  mat - the matrix
8840 -  diag - the diagonal values, including ghost ones
8841 
8842    Level: developer
8843 
8844    Notes:
8845     Works only for MPIAIJ and MPIBAIJ matrices
8846 
8847 .seealso: `MatDiagonalScale()`
8848 @*/
8849 PetscErrorCode MatDiagonalScaleLocal(Mat mat,Vec diag)
8850 {
8851   PetscMPIInt    size;
8852 
8853   PetscFunctionBegin;
8854   PetscValidHeaderSpecific(mat,MAT_CLASSID,1);
8855   PetscValidHeaderSpecific(diag,VEC_CLASSID,2);
8856   PetscValidType(mat,1);
8857 
8858   PetscCheck(mat->assembled,PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Matrix must be already assembled");
8859   PetscCall(PetscLogEventBegin(MAT_Scale,mat,0,0,0));
8860   PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)mat),&size));
8861   if (size == 1) {
8862     PetscInt n,m;
8863     PetscCall(VecGetSize(diag,&n));
8864     PetscCall(MatGetSize(mat,NULL,&m));
8865     if (m == n) {
8866       PetscCall(MatDiagonalScale(mat,NULL,diag));
8867     } else SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Only supported for sequential matrices when no ghost points/periodic conditions");
8868   } else {
8869     PetscUseMethod(mat,"MatDiagonalScaleLocal_C",(Mat,Vec),(mat,diag));
8870   }
8871   PetscCall(PetscLogEventEnd(MAT_Scale,mat,0,0,0));
8872   PetscCall(PetscObjectStateIncrease((PetscObject)mat));
8873   PetscFunctionReturn(0);
8874 }
8875 
8876 /*@
8877    MatGetInertia - Gets the inertia from a factored matrix
8878 
8879    Collective on Mat
8880 
8881    Input Parameter:
8882 .  mat - the matrix
8883 
8884    Output Parameters:
8885 +   nneg - number of negative eigenvalues
8886 .   nzero - number of zero eigenvalues
8887 -   npos - number of positive eigenvalues
8888 
8889    Level: advanced
8890 
8891    Notes:
8892     Matrix must have been factored by MatCholeskyFactor()
8893 
8894 @*/
8895 PetscErrorCode MatGetInertia(Mat mat,PetscInt *nneg,PetscInt *nzero,PetscInt *npos)
8896 {
8897   PetscFunctionBegin;
8898   PetscValidHeaderSpecific(mat,MAT_CLASSID,1);
8899   PetscValidType(mat,1);
8900   PetscCheck(mat->factortype,PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Unfactored matrix");
8901   PetscCheck(mat->assembled,PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Numeric factor mat is not assembled");
8902   PetscCheck(mat->ops->getinertia,PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Mat type %s",((PetscObject)mat)->type_name);
8903   PetscCall((*mat->ops->getinertia)(mat,nneg,nzero,npos));
8904   PetscFunctionReturn(0);
8905 }
8906 
8907 /* ----------------------------------------------------------------*/
8908 /*@C
8909    MatSolves - Solves A x = b, given a factored matrix, for a collection of vectors
8910 
8911    Neighbor-wise Collective on Mats
8912 
8913    Input Parameters:
8914 +  mat - the factored matrix
8915 -  b - the right-hand-side vectors
8916 
8917    Output Parameter:
8918 .  x - the result vectors
8919 
8920    Notes:
8921    The vectors b and x cannot be the same.  I.e., one cannot
8922    call MatSolves(A,x,x).
8923 
8924    Notes:
8925    Most users should employ the simplified KSP interface for linear solvers
8926    instead of working directly with matrix algebra routines such as this.
8927    See, e.g., KSPCreate().
8928 
8929    Level: developer
8930 
8931 .seealso: `MatSolveAdd()`, `MatSolveTranspose()`, `MatSolveTransposeAdd()`, `MatSolve()`
8932 @*/
8933 PetscErrorCode MatSolves(Mat mat,Vecs b,Vecs x)
8934 {
8935   PetscFunctionBegin;
8936   PetscValidHeaderSpecific(mat,MAT_CLASSID,1);
8937   PetscValidType(mat,1);
8938   PetscCheck(x != b,PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_IDN,"x and b must be different vectors");
8939   PetscCheck(mat->factortype,PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Unfactored matrix");
8940   if (!mat->rmap->N && !mat->cmap->N) PetscFunctionReturn(0);
8941 
8942   PetscCheck(mat->ops->solves,PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Mat type %s",((PetscObject)mat)->type_name);
8943   MatCheckPreallocated(mat,1);
8944   PetscCall(PetscLogEventBegin(MAT_Solves,mat,0,0,0));
8945   PetscCall((*mat->ops->solves)(mat,b,x));
8946   PetscCall(PetscLogEventEnd(MAT_Solves,mat,0,0,0));
8947   PetscFunctionReturn(0);
8948 }
8949 
8950 /*@
8951    MatIsSymmetric - Test whether a matrix is symmetric
8952 
8953    Collective on Mat
8954 
8955    Input Parameters:
8956 +  A - the matrix to test
8957 -  tol - difference between value and its transpose less than this amount counts as equal (use 0.0 for exact transpose)
8958 
8959    Output Parameters:
8960 .  flg - the result
8961 
8962    Notes:
8963     For real numbers MatIsSymmetric() and MatIsHermitian() return identical results
8964 
8965    Level: intermediate
8966 
8967 .seealso: `MatTranspose()`, `MatIsTranspose()`, `MatIsHermitian()`, `MatIsStructurallySymmetric()`, `MatSetOption()`, `MatIsSymmetricKnown()`
8968 @*/
8969 PetscErrorCode MatIsSymmetric(Mat A,PetscReal tol,PetscBool *flg)
8970 {
8971   PetscFunctionBegin;
8972   PetscValidHeaderSpecific(A,MAT_CLASSID,1);
8973   PetscValidBoolPointer(flg,3);
8974 
8975   if (!A->symmetric_set) {
8976     if (!A->ops->issymmetric) {
8977       MatType mattype;
8978       PetscCall(MatGetType(A,&mattype));
8979       SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Matrix of type %s does not support checking for symmetric",mattype);
8980     }
8981     PetscCall((*A->ops->issymmetric)(A,tol,flg));
8982     if (!tol) {
8983       PetscCall(MatSetOption(A,MAT_SYMMETRIC,*flg));
8984     }
8985   } else if (A->symmetric) {
8986     *flg = PETSC_TRUE;
8987   } else if (!tol) {
8988     *flg = PETSC_FALSE;
8989   } else {
8990     if (!A->ops->issymmetric) {
8991       MatType mattype;
8992       PetscCall(MatGetType(A,&mattype));
8993       SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Matrix of type %s does not support checking for symmetric",mattype);
8994     }
8995     PetscCall((*A->ops->issymmetric)(A,tol,flg));
8996   }
8997   PetscFunctionReturn(0);
8998 }
8999 
9000 /*@
9001    MatIsHermitian - Test whether a matrix is Hermitian
9002 
9003    Collective on Mat
9004 
9005    Input Parameters:
9006 +  A - the matrix to test
9007 -  tol - difference between value and its transpose less than this amount counts as equal (use 0.0 for exact Hermitian)
9008 
9009    Output Parameters:
9010 .  flg - the result
9011 
9012    Level: intermediate
9013 
9014 .seealso: `MatTranspose()`, `MatIsTranspose()`, `MatIsHermitian()`, `MatIsStructurallySymmetric()`, `MatSetOption()`,
9015           `MatIsSymmetricKnown()`, `MatIsSymmetric()`
9016 @*/
9017 PetscErrorCode MatIsHermitian(Mat A,PetscReal tol,PetscBool *flg)
9018 {
9019   PetscFunctionBegin;
9020   PetscValidHeaderSpecific(A,MAT_CLASSID,1);
9021   PetscValidBoolPointer(flg,3);
9022 
9023   if (!A->hermitian_set) {
9024     if (!A->ops->ishermitian) {
9025       MatType mattype;
9026       PetscCall(MatGetType(A,&mattype));
9027       SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Matrix of type %s does not support checking for hermitian",mattype);
9028     }
9029     PetscCall((*A->ops->ishermitian)(A,tol,flg));
9030     if (!tol) {
9031       PetscCall(MatSetOption(A,MAT_HERMITIAN,*flg));
9032     }
9033   } else if (A->hermitian) {
9034     *flg = PETSC_TRUE;
9035   } else if (!tol) {
9036     *flg = PETSC_FALSE;
9037   } else {
9038     if (!A->ops->ishermitian) {
9039       MatType mattype;
9040       PetscCall(MatGetType(A,&mattype));
9041       SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Matrix of type %s does not support checking for hermitian",mattype);
9042     }
9043     PetscCall((*A->ops->ishermitian)(A,tol,flg));
9044   }
9045   PetscFunctionReturn(0);
9046 }
9047 
9048 /*@
9049    MatIsSymmetricKnown - Checks the flag on the matrix to see if it is symmetric.
9050 
9051    Not Collective
9052 
9053    Input Parameter:
9054 .  A - the matrix to check
9055 
9056    Output Parameters:
9057 +  set - if the symmetric flag is set (this tells you if the next flag is valid)
9058 -  flg - the result
9059 
9060    Level: advanced
9061 
9062    Note: Does not check the matrix values directly, so this may return unknown (set = PETSC_FALSE). Use MatIsSymmetric()
9063          if you want it explicitly checked
9064 
9065 .seealso: `MatTranspose()`, `MatIsTranspose()`, `MatIsHermitian()`, `MatIsStructurallySymmetric()`, `MatSetOption()`, `MatIsSymmetric()`
9066 @*/
9067 PetscErrorCode MatIsSymmetricKnown(Mat A,PetscBool *set,PetscBool *flg)
9068 {
9069   PetscFunctionBegin;
9070   PetscValidHeaderSpecific(A,MAT_CLASSID,1);
9071   PetscValidBoolPointer(set,2);
9072   PetscValidBoolPointer(flg,3);
9073   if (A->symmetric_set) {
9074     *set = PETSC_TRUE;
9075     *flg = A->symmetric;
9076   } else {
9077     *set = PETSC_FALSE;
9078   }
9079   PetscFunctionReturn(0);
9080 }
9081 
9082 /*@
9083    MatIsHermitianKnown - Checks the flag on the matrix to see if it is hermitian.
9084 
9085    Not Collective
9086 
9087    Input Parameter:
9088 .  A - the matrix to check
9089 
9090    Output Parameters:
9091 +  set - if the hermitian flag is set (this tells you if the next flag is valid)
9092 -  flg - the result
9093 
9094    Level: advanced
9095 
9096    Note: Does not check the matrix values directly, so this may return unknown (set = PETSC_FALSE). Use MatIsHermitian()
9097          if you want it explicitly checked
9098 
9099 .seealso: `MatTranspose()`, `MatIsTranspose()`, `MatIsHermitian()`, `MatIsStructurallySymmetric()`, `MatSetOption()`, `MatIsSymmetric()`
9100 @*/
9101 PetscErrorCode MatIsHermitianKnown(Mat A,PetscBool *set,PetscBool *flg)
9102 {
9103   PetscFunctionBegin;
9104   PetscValidHeaderSpecific(A,MAT_CLASSID,1);
9105   PetscValidBoolPointer(set,2);
9106   PetscValidBoolPointer(flg,3);
9107   if (A->hermitian_set) {
9108     *set = PETSC_TRUE;
9109     *flg = A->hermitian;
9110   } else {
9111     *set = PETSC_FALSE;
9112   }
9113   PetscFunctionReturn(0);
9114 }
9115 
9116 /*@
9117    MatIsStructurallySymmetric - Test whether a matrix is structurally symmetric
9118 
9119    Collective on Mat
9120 
9121    Input Parameter:
9122 .  A - the matrix to test
9123 
9124    Output Parameters:
9125 .  flg - the result
9126 
9127    Level: intermediate
9128 
9129 .seealso: `MatTranspose()`, `MatIsTranspose()`, `MatIsHermitian()`, `MatIsSymmetric()`, `MatSetOption()`
9130 @*/
9131 PetscErrorCode MatIsStructurallySymmetric(Mat A,PetscBool *flg)
9132 {
9133   PetscFunctionBegin;
9134   PetscValidHeaderSpecific(A,MAT_CLASSID,1);
9135   PetscValidBoolPointer(flg,2);
9136   if (!A->structurally_symmetric_set) {
9137     PetscCheck(A->ops->isstructurallysymmetric,PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"Matrix of type %s does not support checking for structural symmetric",((PetscObject)A)->type_name);
9138     PetscCall((*A->ops->isstructurallysymmetric)(A,flg));
9139     PetscCall(MatSetOption(A,MAT_STRUCTURALLY_SYMMETRIC,*flg));
9140   } else *flg = A->structurally_symmetric;
9141   PetscFunctionReturn(0);
9142 }
9143 
9144 /*@
9145    MatStashGetInfo - Gets how many values are currently in the matrix stash, i.e. need
9146        to be communicated to other processors during the MatAssemblyBegin/End() process
9147 
9148     Not collective
9149 
9150    Input Parameter:
9151 .   vec - the vector
9152 
9153    Output Parameters:
9154 +   nstash   - the size of the stash
9155 .   reallocs - the number of additional mallocs incurred.
9156 .   bnstash   - the size of the block stash
9157 -   breallocs - the number of additional mallocs incurred.in the block stash
9158 
9159    Level: advanced
9160 
9161 .seealso: `MatAssemblyBegin()`, `MatAssemblyEnd()`, `Mat`, `MatStashSetInitialSize()`
9162 
9163 @*/
9164 PetscErrorCode MatStashGetInfo(Mat mat,PetscInt *nstash,PetscInt *reallocs,PetscInt *bnstash,PetscInt *breallocs)
9165 {
9166   PetscFunctionBegin;
9167   PetscCall(MatStashGetInfo_Private(&mat->stash,nstash,reallocs));
9168   PetscCall(MatStashGetInfo_Private(&mat->bstash,bnstash,breallocs));
9169   PetscFunctionReturn(0);
9170 }
9171 
9172 /*@C
9173    MatCreateVecs - Get vector(s) compatible with the matrix, i.e. with the same
9174      parallel layout
9175 
9176    Collective on Mat
9177 
9178    Input Parameter:
9179 .  mat - the matrix
9180 
9181    Output Parameters:
9182 +   right - (optional) vector that the matrix can be multiplied against
9183 -   left - (optional) vector that the matrix vector product can be stored in
9184 
9185    Notes:
9186     The blocksize of the returned vectors is determined by the row and column block sizes set with MatSetBlockSizes() or the single blocksize (same for both) set by MatSetBlockSize().
9187 
9188   Notes:
9189     These are new vectors which are not owned by the Mat, they should be destroyed in VecDestroy() when no longer needed
9190 
9191   Level: advanced
9192 
9193 .seealso: `MatCreate()`, `VecDestroy()`
9194 @*/
9195 PetscErrorCode MatCreateVecs(Mat mat,Vec *right,Vec *left)
9196 {
9197   PetscFunctionBegin;
9198   PetscValidHeaderSpecific(mat,MAT_CLASSID,1);
9199   PetscValidType(mat,1);
9200   if (mat->ops->getvecs) {
9201     PetscCall((*mat->ops->getvecs)(mat,right,left));
9202   } else {
9203     PetscInt rbs,cbs;
9204     PetscCall(MatGetBlockSizes(mat,&rbs,&cbs));
9205     if (right) {
9206       PetscCheck(mat->cmap->n >= 0,PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"PetscLayout for columns not yet setup");
9207       PetscCall(VecCreate(PetscObjectComm((PetscObject)mat),right));
9208       PetscCall(VecSetSizes(*right,mat->cmap->n,PETSC_DETERMINE));
9209       PetscCall(VecSetBlockSize(*right,cbs));
9210       PetscCall(VecSetType(*right,mat->defaultvectype));
9211 #if defined(PETSC_HAVE_VIENNACL) || defined(PETSC_HAVE_CUDA)
9212       if (mat->boundtocpu && mat->bindingpropagates) {
9213         PetscCall(VecSetBindingPropagates(*right,PETSC_TRUE));
9214         PetscCall(VecBindToCPU(*right,PETSC_TRUE));
9215       }
9216 #endif
9217       PetscCall(PetscLayoutReference(mat->cmap,&(*right)->map));
9218     }
9219     if (left) {
9220       PetscCheck(mat->rmap->n >= 0,PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"PetscLayout for rows not yet setup");
9221       PetscCall(VecCreate(PetscObjectComm((PetscObject)mat),left));
9222       PetscCall(VecSetSizes(*left,mat->rmap->n,PETSC_DETERMINE));
9223       PetscCall(VecSetBlockSize(*left,rbs));
9224       PetscCall(VecSetType(*left,mat->defaultvectype));
9225 #if defined(PETSC_HAVE_VIENNACL) || defined(PETSC_HAVE_CUDA)
9226       if (mat->boundtocpu && mat->bindingpropagates) {
9227         PetscCall(VecSetBindingPropagates(*left,PETSC_TRUE));
9228         PetscCall(VecBindToCPU(*left,PETSC_TRUE));
9229       }
9230 #endif
9231       PetscCall(PetscLayoutReference(mat->rmap,&(*left)->map));
9232     }
9233   }
9234   PetscFunctionReturn(0);
9235 }
9236 
9237 /*@C
9238    MatFactorInfoInitialize - Initializes a MatFactorInfo data structure
9239      with default values.
9240 
9241    Not Collective
9242 
9243    Input Parameters:
9244 .    info - the MatFactorInfo data structure
9245 
9246    Notes:
9247     The solvers are generally used through the KSP and PC objects, for example
9248           PCLU, PCILU, PCCHOLESKY, PCICC
9249 
9250    Level: developer
9251 
9252 .seealso: `MatFactorInfo`
9253 
9254     Developer Note: fortran interface is not autogenerated as the f90
9255     interface definition cannot be generated correctly [due to MatFactorInfo]
9256 
9257 @*/
9258 
9259 PetscErrorCode MatFactorInfoInitialize(MatFactorInfo *info)
9260 {
9261   PetscFunctionBegin;
9262   PetscCall(PetscMemzero(info,sizeof(MatFactorInfo)));
9263   PetscFunctionReturn(0);
9264 }
9265 
9266 /*@
9267    MatFactorSetSchurIS - Set indices corresponding to the Schur complement you wish to have computed
9268 
9269    Collective on Mat
9270 
9271    Input Parameters:
9272 +  mat - the factored matrix
9273 -  is - the index set defining the Schur indices (0-based)
9274 
9275    Notes:
9276     Call MatFactorSolveSchurComplement() or MatFactorSolveSchurComplementTranspose() after this call to solve a Schur complement system.
9277 
9278    You can call MatFactorGetSchurComplement() or MatFactorCreateSchurComplement() after this call.
9279 
9280    Level: developer
9281 
9282 .seealso: `MatGetFactor()`, `MatFactorGetSchurComplement()`, `MatFactorRestoreSchurComplement()`, `MatFactorCreateSchurComplement()`, `MatFactorSolveSchurComplement()`,
9283           `MatFactorSolveSchurComplementTranspose()`, `MatFactorSolveSchurComplement()`
9284 
9285 @*/
9286 PetscErrorCode MatFactorSetSchurIS(Mat mat,IS is)
9287 {
9288   PetscErrorCode (*f)(Mat,IS);
9289 
9290   PetscFunctionBegin;
9291   PetscValidType(mat,1);
9292   PetscValidHeaderSpecific(mat,MAT_CLASSID,1);
9293   PetscValidType(is,2);
9294   PetscValidHeaderSpecific(is,IS_CLASSID,2);
9295   PetscCheckSameComm(mat,1,is,2);
9296   PetscCheck(mat->factortype,PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Only for factored matrix");
9297   PetscCall(PetscObjectQueryFunction((PetscObject)mat,"MatFactorSetSchurIS_C",&f));
9298   PetscCheck(f,PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"The selected MatSolverType does not support Schur complement computation. You should use MATSOLVERMUMPS or MATSOLVERMKL_PARDISO");
9299   PetscCall(MatDestroy(&mat->schur));
9300   PetscCall((*f)(mat,is));
9301   PetscCheck(mat->schur,PetscObjectComm((PetscObject)mat),PETSC_ERR_PLIB,"Schur complement has not been created");
9302   PetscFunctionReturn(0);
9303 }
9304 
9305 /*@
9306   MatFactorCreateSchurComplement - Create a Schur complement matrix object using Schur data computed during the factorization step
9307 
9308    Logically Collective on Mat
9309 
9310    Input Parameters:
9311 +  F - the factored matrix obtained by calling MatGetFactor() from PETSc-MUMPS interface
9312 .  S - location where to return the Schur complement, can be NULL
9313 -  status - the status of the Schur complement matrix, can be NULL
9314 
9315    Notes:
9316    You must call MatFactorSetSchurIS() before calling this routine.
9317 
9318    The routine provides a copy of the Schur matrix stored within the solver data structures.
9319    The caller must destroy the object when it is no longer needed.
9320    If MatFactorInvertSchurComplement() has been called, the routine gets back the inverse.
9321 
9322    Use MatFactorGetSchurComplement() to get access to the Schur complement matrix inside the factored matrix instead of making a copy of it (which this function does)
9323 
9324    Developer Notes:
9325     The reason this routine exists is because the representation of the Schur complement within the factor matrix may be different than a standard PETSc
9326    matrix representation and we normally do not want to use the time or memory to make a copy as a regular PETSc matrix.
9327 
9328    See MatCreateSchurComplement() or MatGetSchurComplement() for ways to create virtual or approximate Schur complements.
9329 
9330    Level: advanced
9331 
9332    References:
9333 
9334 .seealso: `MatGetFactor()`, `MatFactorSetSchurIS()`, `MatFactorGetSchurComplement()`, `MatFactorSchurStatus`
9335 @*/
9336 PetscErrorCode MatFactorCreateSchurComplement(Mat F,Mat* S,MatFactorSchurStatus* status)
9337 {
9338   PetscFunctionBegin;
9339   PetscValidHeaderSpecific(F,MAT_CLASSID,1);
9340   if (S) PetscValidPointer(S,2);
9341   if (status) PetscValidPointer(status,3);
9342   if (S) {
9343     PetscErrorCode (*f)(Mat,Mat*);
9344 
9345     PetscCall(PetscObjectQueryFunction((PetscObject)F,"MatFactorCreateSchurComplement_C",&f));
9346     if (f) {
9347       PetscCall((*f)(F,S));
9348     } else {
9349       PetscCall(MatDuplicate(F->schur,MAT_COPY_VALUES,S));
9350     }
9351   }
9352   if (status) *status = F->schur_status;
9353   PetscFunctionReturn(0);
9354 }
9355 
9356 /*@
9357   MatFactorGetSchurComplement - Gets access to a Schur complement matrix using the current Schur data within a factored matrix
9358 
9359    Logically Collective on Mat
9360 
9361    Input Parameters:
9362 +  F - the factored matrix obtained by calling MatGetFactor()
9363 .  *S - location where to return the Schur complement, can be NULL
9364 -  status - the status of the Schur complement matrix, can be NULL
9365 
9366    Notes:
9367    You must call MatFactorSetSchurIS() before calling this routine.
9368 
9369    Schur complement mode is currently implemented for sequential matrices.
9370    The routine returns a the Schur Complement stored within the data strutures of the solver.
9371    If MatFactorInvertSchurComplement() has previously been called, the returned matrix is actually the inverse of the Schur complement.
9372    The returned matrix should not be destroyed; the caller should call MatFactorRestoreSchurComplement() when the object is no longer needed.
9373 
9374    Use MatFactorCreateSchurComplement() to create a copy of the Schur complement matrix that is within a factored matrix
9375 
9376    See MatCreateSchurComplement() or MatGetSchurComplement() for ways to create virtual or approximate Schur complements.
9377 
9378    Level: advanced
9379 
9380    References:
9381 
9382 .seealso: `MatGetFactor()`, `MatFactorSetSchurIS()`, `MatFactorRestoreSchurComplement()`, `MatFactorCreateSchurComplement()`, `MatFactorSchurStatus`
9383 @*/
9384 PetscErrorCode MatFactorGetSchurComplement(Mat F,Mat* S,MatFactorSchurStatus* status)
9385 {
9386   PetscFunctionBegin;
9387   PetscValidHeaderSpecific(F,MAT_CLASSID,1);
9388   if (S) PetscValidPointer(S,2);
9389   if (status) PetscValidPointer(status,3);
9390   if (S) *S = F->schur;
9391   if (status) *status = F->schur_status;
9392   PetscFunctionReturn(0);
9393 }
9394 
9395 /*@
9396   MatFactorRestoreSchurComplement - Restore the Schur complement matrix object obtained from a call to MatFactorGetSchurComplement
9397 
9398    Logically Collective on Mat
9399 
9400    Input Parameters:
9401 +  F - the factored matrix obtained by calling MatGetFactor()
9402 .  *S - location where the Schur complement is stored
9403 -  status - the status of the Schur complement matrix (see MatFactorSchurStatus)
9404 
9405    Notes:
9406 
9407    Level: advanced
9408 
9409    References:
9410 
9411 .seealso: `MatGetFactor()`, `MatFactorSetSchurIS()`, `MatFactorRestoreSchurComplement()`, `MatFactorCreateSchurComplement()`, `MatFactorSchurStatus`
9412 @*/
9413 PetscErrorCode MatFactorRestoreSchurComplement(Mat F,Mat* S,MatFactorSchurStatus status)
9414 {
9415   PetscFunctionBegin;
9416   PetscValidHeaderSpecific(F,MAT_CLASSID,1);
9417   if (S) {
9418     PetscValidHeaderSpecific(*S,MAT_CLASSID,2);
9419     *S = NULL;
9420   }
9421   F->schur_status = status;
9422   PetscCall(MatFactorUpdateSchurStatus_Private(F));
9423   PetscFunctionReturn(0);
9424 }
9425 
9426 /*@
9427   MatFactorSolveSchurComplementTranspose - Solve the transpose of the Schur complement system computed during the factorization step
9428 
9429    Logically Collective on Mat
9430 
9431    Input Parameters:
9432 +  F - the factored matrix obtained by calling MatGetFactor()
9433 .  rhs - location where the right hand side of the Schur complement system is stored
9434 -  sol - location where the solution of the Schur complement system has to be returned
9435 
9436    Notes:
9437    The sizes of the vectors should match the size of the Schur complement
9438 
9439    Must be called after MatFactorSetSchurIS()
9440 
9441    Level: advanced
9442 
9443    References:
9444 
9445 .seealso: `MatGetFactor()`, `MatFactorSetSchurIS()`, `MatFactorSolveSchurComplement()`
9446 @*/
9447 PetscErrorCode MatFactorSolveSchurComplementTranspose(Mat F, Vec rhs, Vec sol)
9448 {
9449   PetscFunctionBegin;
9450   PetscValidType(F,1);
9451   PetscValidType(rhs,2);
9452   PetscValidType(sol,3);
9453   PetscValidHeaderSpecific(F,MAT_CLASSID,1);
9454   PetscValidHeaderSpecific(rhs,VEC_CLASSID,2);
9455   PetscValidHeaderSpecific(sol,VEC_CLASSID,3);
9456   PetscCheckSameComm(F,1,rhs,2);
9457   PetscCheckSameComm(F,1,sol,3);
9458   PetscCall(MatFactorFactorizeSchurComplement(F));
9459   switch (F->schur_status) {
9460   case MAT_FACTOR_SCHUR_FACTORED:
9461     PetscCall(MatSolveTranspose(F->schur,rhs,sol));
9462     break;
9463   case MAT_FACTOR_SCHUR_INVERTED:
9464     PetscCall(MatMultTranspose(F->schur,rhs,sol));
9465     break;
9466   default:
9467     SETERRQ(PetscObjectComm((PetscObject)F),PETSC_ERR_SUP,"Unhandled MatFactorSchurStatus %d",F->schur_status);
9468   }
9469   PetscFunctionReturn(0);
9470 }
9471 
9472 /*@
9473   MatFactorSolveSchurComplement - Solve the Schur complement system computed during the factorization step
9474 
9475    Logically Collective on Mat
9476 
9477    Input Parameters:
9478 +  F - the factored matrix obtained by calling MatGetFactor()
9479 .  rhs - location where the right hand side of the Schur complement system is stored
9480 -  sol - location where the solution of the Schur complement system has to be returned
9481 
9482    Notes:
9483    The sizes of the vectors should match the size of the Schur complement
9484 
9485    Must be called after MatFactorSetSchurIS()
9486 
9487    Level: advanced
9488 
9489    References:
9490 
9491 .seealso: `MatGetFactor()`, `MatFactorSetSchurIS()`, `MatFactorSolveSchurComplementTranspose()`
9492 @*/
9493 PetscErrorCode MatFactorSolveSchurComplement(Mat F, Vec rhs, Vec sol)
9494 {
9495   PetscFunctionBegin;
9496   PetscValidType(F,1);
9497   PetscValidType(rhs,2);
9498   PetscValidType(sol,3);
9499   PetscValidHeaderSpecific(F,MAT_CLASSID,1);
9500   PetscValidHeaderSpecific(rhs,VEC_CLASSID,2);
9501   PetscValidHeaderSpecific(sol,VEC_CLASSID,3);
9502   PetscCheckSameComm(F,1,rhs,2);
9503   PetscCheckSameComm(F,1,sol,3);
9504   PetscCall(MatFactorFactorizeSchurComplement(F));
9505   switch (F->schur_status) {
9506   case MAT_FACTOR_SCHUR_FACTORED:
9507     PetscCall(MatSolve(F->schur,rhs,sol));
9508     break;
9509   case MAT_FACTOR_SCHUR_INVERTED:
9510     PetscCall(MatMult(F->schur,rhs,sol));
9511     break;
9512   default:
9513     SETERRQ(PetscObjectComm((PetscObject)F),PETSC_ERR_SUP,"Unhandled MatFactorSchurStatus %d",F->schur_status);
9514   }
9515   PetscFunctionReturn(0);
9516 }
9517 
9518 /*@
9519   MatFactorInvertSchurComplement - Invert the Schur complement matrix computed during the factorization step
9520 
9521    Logically Collective on Mat
9522 
9523    Input Parameters:
9524 .  F - the factored matrix obtained by calling MatGetFactor()
9525 
9526    Notes:
9527     Must be called after MatFactorSetSchurIS().
9528 
9529    Call MatFactorGetSchurComplement() or  MatFactorCreateSchurComplement() AFTER this call to actually compute the inverse and get access to it.
9530 
9531    Level: advanced
9532 
9533    References:
9534 
9535 .seealso: `MatGetFactor()`, `MatFactorSetSchurIS()`, `MatFactorGetSchurComplement()`, `MatFactorCreateSchurComplement()`
9536 @*/
9537 PetscErrorCode MatFactorInvertSchurComplement(Mat F)
9538 {
9539   PetscFunctionBegin;
9540   PetscValidType(F,1);
9541   PetscValidHeaderSpecific(F,MAT_CLASSID,1);
9542   if (F->schur_status == MAT_FACTOR_SCHUR_INVERTED) PetscFunctionReturn(0);
9543   PetscCall(MatFactorFactorizeSchurComplement(F));
9544   PetscCall(MatFactorInvertSchurComplement_Private(F));
9545   F->schur_status = MAT_FACTOR_SCHUR_INVERTED;
9546   PetscFunctionReturn(0);
9547 }
9548 
9549 /*@
9550   MatFactorFactorizeSchurComplement - Factorize the Schur complement matrix computed during the factorization step
9551 
9552    Logically Collective on Mat
9553 
9554    Input Parameters:
9555 .  F - the factored matrix obtained by calling MatGetFactor()
9556 
9557    Notes:
9558     Must be called after MatFactorSetSchurIS().
9559 
9560    Level: advanced
9561 
9562    References:
9563 
9564 .seealso: `MatGetFactor()`, `MatFactorSetSchurIS()`, `MatFactorInvertSchurComplement()`
9565 @*/
9566 PetscErrorCode MatFactorFactorizeSchurComplement(Mat F)
9567 {
9568   PetscFunctionBegin;
9569   PetscValidType(F,1);
9570   PetscValidHeaderSpecific(F,MAT_CLASSID,1);
9571   if (F->schur_status == MAT_FACTOR_SCHUR_INVERTED || F->schur_status == MAT_FACTOR_SCHUR_FACTORED) PetscFunctionReturn(0);
9572   PetscCall(MatFactorFactorizeSchurComplement_Private(F));
9573   F->schur_status = MAT_FACTOR_SCHUR_FACTORED;
9574   PetscFunctionReturn(0);
9575 }
9576 
9577 /*@
9578    MatPtAP - Creates the matrix product C = P^T * A * P
9579 
9580    Neighbor-wise Collective on Mat
9581 
9582    Input Parameters:
9583 +  A - the matrix
9584 .  P - the projection matrix
9585 .  scall - either MAT_INITIAL_MATRIX or MAT_REUSE_MATRIX
9586 -  fill - expected fill as ratio of nnz(C)/(nnz(A) + nnz(P)), use PETSC_DEFAULT if you do not have a good estimate
9587           if the result is a dense matrix this is irrelevant
9588 
9589    Output Parameters:
9590 .  C - the product matrix
9591 
9592    Notes:
9593    C will be created and must be destroyed by the user with MatDestroy().
9594 
9595    For matrix types without special implementation the function fallbacks to MatMatMult() followed by MatTransposeMatMult().
9596 
9597    Level: intermediate
9598 
9599 .seealso: `MatMatMult()`, `MatRARt()`
9600 @*/
9601 PetscErrorCode MatPtAP(Mat A,Mat P,MatReuse scall,PetscReal fill,Mat *C)
9602 {
9603   PetscFunctionBegin;
9604   if (scall == MAT_REUSE_MATRIX) MatCheckProduct(*C,5);
9605   PetscCheck(scall != MAT_INPLACE_MATRIX,PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"Inplace product not supported");
9606 
9607   if (scall == MAT_INITIAL_MATRIX) {
9608     PetscCall(MatProductCreate(A,P,NULL,C));
9609     PetscCall(MatProductSetType(*C,MATPRODUCT_PtAP));
9610     PetscCall(MatProductSetAlgorithm(*C,"default"));
9611     PetscCall(MatProductSetFill(*C,fill));
9612 
9613     (*C)->product->api_user = PETSC_TRUE;
9614     PetscCall(MatProductSetFromOptions(*C));
9615     PetscCheck((*C)->ops->productsymbolic,PetscObjectComm((PetscObject)(*C)),PETSC_ERR_SUP,"MatProduct %s not supported for A %s and P %s",MatProductTypes[MATPRODUCT_PtAP],((PetscObject)A)->type_name,((PetscObject)P)->type_name);
9616     PetscCall(MatProductSymbolic(*C));
9617   } else { /* scall == MAT_REUSE_MATRIX */
9618     PetscCall(MatProductReplaceMats(A,P,NULL,*C));
9619   }
9620 
9621   PetscCall(MatProductNumeric(*C));
9622   if (A->symmetric) {
9623     if (A->spd) {
9624       PetscCall(MatSetOption(*C,MAT_SPD,PETSC_TRUE));
9625     } else {
9626       PetscCall(MatSetOption(*C,MAT_SYMMETRIC,PETSC_TRUE));
9627     }
9628   }
9629   PetscFunctionReturn(0);
9630 }
9631 
9632 /*@
9633    MatRARt - Creates the matrix product C = R * A * R^T
9634 
9635    Neighbor-wise Collective on Mat
9636 
9637    Input Parameters:
9638 +  A - the matrix
9639 .  R - the projection matrix
9640 .  scall - either MAT_INITIAL_MATRIX or MAT_REUSE_MATRIX
9641 -  fill - expected fill as ratio of nnz(C)/nnz(A), use PETSC_DEFAULT if you do not have a good estimate
9642           if the result is a dense matrix this is irrelevant
9643 
9644    Output Parameters:
9645 .  C - the product matrix
9646 
9647    Notes:
9648    C will be created and must be destroyed by the user with MatDestroy().
9649 
9650    This routine is currently only implemented for pairs of AIJ matrices and classes
9651    which inherit from AIJ. Due to PETSc sparse matrix block row distribution among processes,
9652    parallel MatRARt is implemented via explicit transpose of R, which could be very expensive.
9653    We recommend using MatPtAP().
9654 
9655    Level: intermediate
9656 
9657 .seealso: `MatMatMult()`, `MatPtAP()`
9658 @*/
9659 PetscErrorCode MatRARt(Mat A,Mat R,MatReuse scall,PetscReal fill,Mat *C)
9660 {
9661   PetscFunctionBegin;
9662   if (scall == MAT_REUSE_MATRIX) MatCheckProduct(*C,5);
9663   PetscCheck(scall != MAT_INPLACE_MATRIX,PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"Inplace product not supported");
9664 
9665   if (scall == MAT_INITIAL_MATRIX) {
9666     PetscCall(MatProductCreate(A,R,NULL,C));
9667     PetscCall(MatProductSetType(*C,MATPRODUCT_RARt));
9668     PetscCall(MatProductSetAlgorithm(*C,"default"));
9669     PetscCall(MatProductSetFill(*C,fill));
9670 
9671     (*C)->product->api_user = PETSC_TRUE;
9672     PetscCall(MatProductSetFromOptions(*C));
9673     PetscCheck((*C)->ops->productsymbolic,PetscObjectComm((PetscObject)(*C)),PETSC_ERR_SUP,"MatProduct %s not supported for A %s and R %s",MatProductTypes[MATPRODUCT_RARt],((PetscObject)A)->type_name,((PetscObject)R)->type_name);
9674     PetscCall(MatProductSymbolic(*C));
9675   } else { /* scall == MAT_REUSE_MATRIX */
9676     PetscCall(MatProductReplaceMats(A,R,NULL,*C));
9677   }
9678 
9679   PetscCall(MatProductNumeric(*C));
9680   if (A->symmetric_set && A->symmetric) {
9681     PetscCall(MatSetOption(*C,MAT_SYMMETRIC,PETSC_TRUE));
9682   }
9683   PetscFunctionReturn(0);
9684 }
9685 
9686 static PetscErrorCode MatProduct_Private(Mat A,Mat B,MatReuse scall,PetscReal fill,MatProductType ptype, Mat *C)
9687 {
9688   PetscFunctionBegin;
9689   PetscCheck(scall != MAT_INPLACE_MATRIX,PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"Inplace product not supported");
9690 
9691   if (scall == MAT_INITIAL_MATRIX) {
9692     PetscCall(PetscInfo(A,"Calling MatProduct API with MAT_INITIAL_MATRIX and product type %s\n",MatProductTypes[ptype]));
9693     PetscCall(MatProductCreate(A,B,NULL,C));
9694     PetscCall(MatProductSetType(*C,ptype));
9695     PetscCall(MatProductSetAlgorithm(*C,MATPRODUCTALGORITHMDEFAULT));
9696     PetscCall(MatProductSetFill(*C,fill));
9697 
9698     (*C)->product->api_user = PETSC_TRUE;
9699     PetscCall(MatProductSetFromOptions(*C));
9700     PetscCall(MatProductSymbolic(*C));
9701   } else { /* scall == MAT_REUSE_MATRIX */
9702     Mat_Product *product = (*C)->product;
9703     PetscBool isdense;
9704 
9705     PetscCall(PetscObjectBaseTypeCompareAny((PetscObject)(*C),&isdense,MATSEQDENSE,MATMPIDENSE,""));
9706     if (isdense && product && product->type != ptype) {
9707       PetscCall(MatProductClear(*C));
9708       product = NULL;
9709     }
9710     PetscCall(PetscInfo(A,"Calling MatProduct API with MAT_REUSE_MATRIX %s product present and product type %s\n",product ? "with" : "without",MatProductTypes[ptype]));
9711     if (!product) { /* user provide the dense matrix *C without calling MatProductCreate() or reusing it from previous calls */
9712       if (isdense) {
9713         PetscCall(MatProductCreate_Private(A,B,NULL,*C));
9714         product = (*C)->product;
9715         product->fill     = fill;
9716         product->api_user = PETSC_TRUE;
9717         product->clear    = PETSC_TRUE;
9718 
9719         PetscCall(MatProductSetType(*C,ptype));
9720         PetscCall(MatProductSetFromOptions(*C));
9721         PetscCheck((*C)->ops->productsymbolic,PetscObjectComm((PetscObject)(*C)),PETSC_ERR_SUP,"MatProduct %s not supported for %s and %s",MatProductTypes[ptype],((PetscObject)A)->type_name,((PetscObject)B)->type_name);
9722         PetscCall(MatProductSymbolic(*C));
9723       } else SETERRQ(PetscObjectComm((PetscObject)(*C)),PETSC_ERR_SUP,"Call MatProductCreate() first");
9724     } else { /* user may change input matrices A or B when REUSE */
9725       PetscCall(MatProductReplaceMats(A,B,NULL,*C));
9726     }
9727   }
9728   PetscCall(MatProductNumeric(*C));
9729   PetscFunctionReturn(0);
9730 }
9731 
9732 /*@
9733    MatMatMult - Performs Matrix-Matrix Multiplication C=A*B.
9734 
9735    Neighbor-wise Collective on Mat
9736 
9737    Input Parameters:
9738 +  A - the left matrix
9739 .  B - the right matrix
9740 .  scall - either MAT_INITIAL_MATRIX or MAT_REUSE_MATRIX
9741 -  fill - expected fill as ratio of nnz(C)/(nnz(A) + nnz(B)), use PETSC_DEFAULT if you do not have a good estimate
9742           if the result is a dense matrix this is irrelevant
9743 
9744    Output Parameters:
9745 .  C - the product matrix
9746 
9747    Notes:
9748    Unless scall is MAT_REUSE_MATRIX C will be created.
9749 
9750    MAT_REUSE_MATRIX can only be used if the matrices A and B have the same nonzero pattern as in the previous call and C was obtained from a previous
9751    call to this function with MAT_INITIAL_MATRIX.
9752 
9753    To determine the correct fill value, run with -info and search for the string "Fill ratio" to see the value actually needed.
9754 
9755    If you have many matrices with the same non-zero structure to multiply, you should use MatProductCreate()/MatProductSymbolic()/MatProductReplaceMats(), and call MatProductNumeric() repeatedly.
9756 
9757    In the special case where matrix B (and hence C) are dense you can create the correctly sized matrix C yourself and then call this routine with MAT_REUSE_MATRIX, rather than first having MatMatMult() create it for you. You can NEVER do this if the matrix C is sparse.
9758 
9759    Example of Usage:
9760 .vb
9761      MatProductCreate(A,B,NULL,&C);
9762      MatProductSetType(C,MATPRODUCT_AB);
9763      MatProductSymbolic(C);
9764      MatProductNumeric(C); // compute C=A * B
9765      MatProductReplaceMats(A1,B1,NULL,C); // compute C=A1 * B1
9766      MatProductNumeric(C);
9767      MatProductReplaceMats(A2,NULL,NULL,C); // compute C=A2 * B1
9768      MatProductNumeric(C);
9769 .ve
9770 
9771    Level: intermediate
9772 
9773 .seealso: `MatTransposeMatMult()`, `MatMatTransposeMult()`, `MatPtAP()`, `MatProductCreate()`, `MatProductSymbolic()`, `MatProductReplaceMats()`, `MatProductNumeric()`
9774 @*/
9775 PetscErrorCode MatMatMult(Mat A,Mat B,MatReuse scall,PetscReal fill,Mat *C)
9776 {
9777   PetscFunctionBegin;
9778   PetscCall(MatProduct_Private(A,B,scall,fill,MATPRODUCT_AB,C));
9779   PetscFunctionReturn(0);
9780 }
9781 
9782 /*@
9783    MatMatTransposeMult - Performs Matrix-Matrix Multiplication C=A*B^T.
9784 
9785    Neighbor-wise Collective on Mat
9786 
9787    Input Parameters:
9788 +  A - the left matrix
9789 .  B - the right matrix
9790 .  scall - either MAT_INITIAL_MATRIX or MAT_REUSE_MATRIX
9791 -  fill - expected fill as ratio of nnz(C)/(nnz(A) + nnz(B)), use PETSC_DEFAULT if not known
9792 
9793    Output Parameters:
9794 .  C - the product matrix
9795 
9796    Notes:
9797    C will be created if MAT_INITIAL_MATRIX and must be destroyed by the user with MatDestroy().
9798 
9799    MAT_REUSE_MATRIX can only be used if the matrices A and B have the same nonzero pattern as in the previous call
9800 
9801   To determine the correct fill value, run with -info and search for the string "Fill ratio" to see the value
9802    actually needed.
9803 
9804    This routine is currently only implemented for pairs of SeqAIJ matrices, for the SeqDense class,
9805    and for pairs of MPIDense matrices.
9806 
9807    Options Database Keys:
9808 .  -matmattransmult_mpidense_mpidense_via {allgatherv,cyclic} - Choose between algorithms for MPIDense matrices: the
9809               first redundantly copies the transposed B matrix on each process and requiers O(log P) communication complexity;
9810               the second never stores more than one portion of the B matrix at a time by requires O(P) communication complexity.
9811 
9812    Level: intermediate
9813 
9814 .seealso: `MatMatMult()`, `MatTransposeMatMult()` `MatPtAP()`
9815 @*/
9816 PetscErrorCode MatMatTransposeMult(Mat A,Mat B,MatReuse scall,PetscReal fill,Mat *C)
9817 {
9818   PetscFunctionBegin;
9819   PetscCall(MatProduct_Private(A,B,scall,fill,MATPRODUCT_ABt,C));
9820   if (A == B) {
9821     PetscCall(MatSetOption(*C,MAT_SYMMETRIC,PETSC_TRUE));
9822   }
9823   PetscFunctionReturn(0);
9824 }
9825 
9826 /*@
9827    MatTransposeMatMult - Performs Matrix-Matrix Multiplication C=A^T*B.
9828 
9829    Neighbor-wise Collective on Mat
9830 
9831    Input Parameters:
9832 +  A - the left matrix
9833 .  B - the right matrix
9834 .  scall - either MAT_INITIAL_MATRIX or MAT_REUSE_MATRIX
9835 -  fill - expected fill as ratio of nnz(C)/(nnz(A) + nnz(B)), use PETSC_DEFAULT if not known
9836 
9837    Output Parameters:
9838 .  C - the product matrix
9839 
9840    Notes:
9841    C will be created if MAT_INITIAL_MATRIX and must be destroyed by the user with MatDestroy().
9842 
9843    MAT_REUSE_MATRIX can only be used if the matrices A and B have the same nonzero pattern as in the previous call.
9844 
9845   To determine the correct fill value, run with -info and search for the string "Fill ratio" to see the value
9846    actually needed.
9847 
9848    This routine is currently implemented for pairs of AIJ matrices and pairs of SeqDense matrices and classes
9849    which inherit from SeqAIJ.  C will be of the same type as the input matrices.
9850 
9851    Level: intermediate
9852 
9853 .seealso: `MatMatMult()`, `MatMatTransposeMult()`, `MatPtAP()`
9854 @*/
9855 PetscErrorCode MatTransposeMatMult(Mat A,Mat B,MatReuse scall,PetscReal fill,Mat *C)
9856 {
9857   PetscFunctionBegin;
9858   PetscCall(MatProduct_Private(A,B,scall,fill,MATPRODUCT_AtB,C));
9859   PetscFunctionReturn(0);
9860 }
9861 
9862 /*@
9863    MatMatMatMult - Performs Matrix-Matrix-Matrix Multiplication D=A*B*C.
9864 
9865    Neighbor-wise Collective on Mat
9866 
9867    Input Parameters:
9868 +  A - the left matrix
9869 .  B - the middle matrix
9870 .  C - the right matrix
9871 .  scall - either MAT_INITIAL_MATRIX or MAT_REUSE_MATRIX
9872 -  fill - expected fill as ratio of nnz(D)/(nnz(A) + nnz(B)+nnz(C)), use PETSC_DEFAULT if you do not have a good estimate
9873           if the result is a dense matrix this is irrelevant
9874 
9875    Output Parameters:
9876 .  D - the product matrix
9877 
9878    Notes:
9879    Unless scall is MAT_REUSE_MATRIX D will be created.
9880 
9881    MAT_REUSE_MATRIX can only be used if the matrices A, B and C have the same nonzero pattern as in the previous call
9882 
9883    To determine the correct fill value, run with -info and search for the string "Fill ratio" to see the value
9884    actually needed.
9885 
9886    If you have many matrices with the same non-zero structure to multiply, you
9887    should use MAT_REUSE_MATRIX in all calls but the first
9888 
9889    Level: intermediate
9890 
9891 .seealso: `MatMatMult`, `MatPtAP()`, `MatMatTransposeMult()`, `MatTransposeMatMult()`
9892 @*/
9893 PetscErrorCode MatMatMatMult(Mat A,Mat B,Mat C,MatReuse scall,PetscReal fill,Mat *D)
9894 {
9895   PetscFunctionBegin;
9896   if (scall == MAT_REUSE_MATRIX) MatCheckProduct(*D,6);
9897   PetscCheck(scall != MAT_INPLACE_MATRIX,PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"Inplace product not supported");
9898 
9899   if (scall == MAT_INITIAL_MATRIX) {
9900     PetscCall(MatProductCreate(A,B,C,D));
9901     PetscCall(MatProductSetType(*D,MATPRODUCT_ABC));
9902     PetscCall(MatProductSetAlgorithm(*D,"default"));
9903     PetscCall(MatProductSetFill(*D,fill));
9904 
9905     (*D)->product->api_user = PETSC_TRUE;
9906     PetscCall(MatProductSetFromOptions(*D));
9907     PetscCheck((*D)->ops->productsymbolic,PetscObjectComm((PetscObject)(*D)),PETSC_ERR_SUP,"MatProduct %s not supported for A %s, B %s and C %s",MatProductTypes[MATPRODUCT_ABC],((PetscObject)A)->type_name,((PetscObject)B)->type_name,((PetscObject)C)->type_name);
9908     PetscCall(MatProductSymbolic(*D));
9909   } else { /* user may change input matrices when REUSE */
9910     PetscCall(MatProductReplaceMats(A,B,C,*D));
9911   }
9912   PetscCall(MatProductNumeric(*D));
9913   PetscFunctionReturn(0);
9914 }
9915 
9916 /*@
9917    MatCreateRedundantMatrix - Create redundant matrices and put them into processors of subcommunicators.
9918 
9919    Collective on Mat
9920 
9921    Input Parameters:
9922 +  mat - the matrix
9923 .  nsubcomm - the number of subcommunicators (= number of redundant parallel or sequential matrices)
9924 .  subcomm - MPI communicator split from the communicator where mat resides in (or MPI_COMM_NULL if nsubcomm is used)
9925 -  reuse - either MAT_INITIAL_MATRIX or MAT_REUSE_MATRIX
9926 
9927    Output Parameter:
9928 .  matredundant - redundant matrix
9929 
9930    Notes:
9931    MAT_REUSE_MATRIX can only be used when the nonzero structure of the
9932    original matrix has not changed from that last call to MatCreateRedundantMatrix().
9933 
9934    This routine creates the duplicated matrices in the subcommunicators; you should NOT create them before
9935    calling it.
9936 
9937    Level: advanced
9938 
9939 .seealso: `MatDestroy()`
9940 @*/
9941 PetscErrorCode MatCreateRedundantMatrix(Mat mat,PetscInt nsubcomm,MPI_Comm subcomm,MatReuse reuse,Mat *matredundant)
9942 {
9943   MPI_Comm       comm;
9944   PetscMPIInt    size;
9945   PetscInt       mloc_sub,nloc_sub,rstart,rend,M=mat->rmap->N,N=mat->cmap->N,bs=mat->rmap->bs;
9946   Mat_Redundant  *redund=NULL;
9947   PetscSubcomm   psubcomm=NULL;
9948   MPI_Comm       subcomm_in=subcomm;
9949   Mat            *matseq;
9950   IS             isrow,iscol;
9951   PetscBool      newsubcomm=PETSC_FALSE;
9952 
9953   PetscFunctionBegin;
9954   PetscValidHeaderSpecific(mat,MAT_CLASSID,1);
9955   if (nsubcomm && reuse == MAT_REUSE_MATRIX) {
9956     PetscValidPointer(*matredundant,5);
9957     PetscValidHeaderSpecific(*matredundant,MAT_CLASSID,5);
9958   }
9959 
9960   PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)mat),&size));
9961   if (size == 1 || nsubcomm == 1) {
9962     if (reuse == MAT_INITIAL_MATRIX) {
9963       PetscCall(MatDuplicate(mat,MAT_COPY_VALUES,matredundant));
9964     } else {
9965       PetscCheck(*matredundant != mat,PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"MAT_REUSE_MATRIX means reuse the matrix passed in as the final argument, not the original matrix");
9966       PetscCall(MatCopy(mat,*matredundant,SAME_NONZERO_PATTERN));
9967     }
9968     PetscFunctionReturn(0);
9969   }
9970 
9971   PetscCheck(mat->assembled,PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Not for unassembled matrix");
9972   PetscCheck(!mat->factortype,PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Not for factored matrix");
9973   MatCheckPreallocated(mat,1);
9974 
9975   PetscCall(PetscLogEventBegin(MAT_RedundantMat,mat,0,0,0));
9976   if (subcomm_in == MPI_COMM_NULL && reuse == MAT_INITIAL_MATRIX) { /* get subcomm if user does not provide subcomm */
9977     /* create psubcomm, then get subcomm */
9978     PetscCall(PetscObjectGetComm((PetscObject)mat,&comm));
9979     PetscCallMPI(MPI_Comm_size(comm,&size));
9980     PetscCheck(nsubcomm >= 1 && nsubcomm <= size,PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"nsubcomm must between 1 and %d",size);
9981 
9982     PetscCall(PetscSubcommCreate(comm,&psubcomm));
9983     PetscCall(PetscSubcommSetNumber(psubcomm,nsubcomm));
9984     PetscCall(PetscSubcommSetType(psubcomm,PETSC_SUBCOMM_CONTIGUOUS));
9985     PetscCall(PetscSubcommSetFromOptions(psubcomm));
9986     PetscCall(PetscCommDuplicate(PetscSubcommChild(psubcomm),&subcomm,NULL));
9987     newsubcomm = PETSC_TRUE;
9988     PetscCall(PetscSubcommDestroy(&psubcomm));
9989   }
9990 
9991   /* get isrow, iscol and a local sequential matrix matseq[0] */
9992   if (reuse == MAT_INITIAL_MATRIX) {
9993     mloc_sub = PETSC_DECIDE;
9994     nloc_sub = PETSC_DECIDE;
9995     if (bs < 1) {
9996       PetscCall(PetscSplitOwnership(subcomm,&mloc_sub,&M));
9997       PetscCall(PetscSplitOwnership(subcomm,&nloc_sub,&N));
9998     } else {
9999       PetscCall(PetscSplitOwnershipBlock(subcomm,bs,&mloc_sub,&M));
10000       PetscCall(PetscSplitOwnershipBlock(subcomm,bs,&nloc_sub,&N));
10001     }
10002     PetscCallMPI(MPI_Scan(&mloc_sub,&rend,1,MPIU_INT,MPI_SUM,subcomm));
10003     rstart = rend - mloc_sub;
10004     PetscCall(ISCreateStride(PETSC_COMM_SELF,mloc_sub,rstart,1,&isrow));
10005     PetscCall(ISCreateStride(PETSC_COMM_SELF,N,0,1,&iscol));
10006   } else { /* reuse == MAT_REUSE_MATRIX */
10007     PetscCheck(*matredundant != mat,PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"MAT_REUSE_MATRIX means reuse the matrix passed in as the final argument, not the original matrix");
10008     /* retrieve subcomm */
10009     PetscCall(PetscObjectGetComm((PetscObject)(*matredundant),&subcomm));
10010     redund = (*matredundant)->redundant;
10011     isrow  = redund->isrow;
10012     iscol  = redund->iscol;
10013     matseq = redund->matseq;
10014   }
10015   PetscCall(MatCreateSubMatrices(mat,1,&isrow,&iscol,reuse,&matseq));
10016 
10017   /* get matredundant over subcomm */
10018   if (reuse == MAT_INITIAL_MATRIX) {
10019     PetscCall(MatCreateMPIMatConcatenateSeqMat(subcomm,matseq[0],nloc_sub,reuse,matredundant));
10020 
10021     /* create a supporting struct and attach it to C for reuse */
10022     PetscCall(PetscNewLog(*matredundant,&redund));
10023     (*matredundant)->redundant = redund;
10024     redund->isrow              = isrow;
10025     redund->iscol              = iscol;
10026     redund->matseq             = matseq;
10027     if (newsubcomm) {
10028       redund->subcomm          = subcomm;
10029     } else {
10030       redund->subcomm          = MPI_COMM_NULL;
10031     }
10032   } else {
10033     PetscCall(MatCreateMPIMatConcatenateSeqMat(subcomm,matseq[0],PETSC_DECIDE,reuse,matredundant));
10034   }
10035 #if defined(PETSC_HAVE_VIENNACL) || defined(PETSC_HAVE_CUDA)
10036   if (matseq[0]->boundtocpu && matseq[0]->bindingpropagates) {
10037     PetscCall(MatBindToCPU(*matredundant,PETSC_TRUE));
10038     PetscCall(MatSetBindingPropagates(*matredundant,PETSC_TRUE));
10039   }
10040 #endif
10041   PetscCall(PetscLogEventEnd(MAT_RedundantMat,mat,0,0,0));
10042   PetscFunctionReturn(0);
10043 }
10044 
10045 /*@C
10046    MatGetMultiProcBlock - Create multiple [bjacobi] 'parallel submatrices' from
10047    a given 'mat' object. Each submatrix can span multiple procs.
10048 
10049    Collective on Mat
10050 
10051    Input Parameters:
10052 +  mat - the matrix
10053 .  subcomm - the subcommunicator obtained by com_split(comm)
10054 -  scall - either MAT_INITIAL_MATRIX or MAT_REUSE_MATRIX
10055 
10056    Output Parameter:
10057 .  subMat - 'parallel submatrices each spans a given subcomm
10058 
10059   Notes:
10060   The submatrix partition across processors is dictated by 'subComm' a
10061   communicator obtained by MPI_comm_split(). The subComm
10062   is not restriced to be grouped with consecutive original ranks.
10063 
10064   Due the MPI_Comm_split() usage, the parallel layout of the submatrices
10065   map directly to the layout of the original matrix [wrt the local
10066   row,col partitioning]. So the original 'DiagonalMat' naturally maps
10067   into the 'DiagonalMat' of the subMat, hence it is used directly from
10068   the subMat. However the offDiagMat looses some columns - and this is
10069   reconstructed with MatSetValues()
10070 
10071   Level: advanced
10072 
10073 .seealso: `MatCreateSubMatrices()`
10074 @*/
10075 PetscErrorCode   MatGetMultiProcBlock(Mat mat, MPI_Comm subComm, MatReuse scall,Mat *subMat)
10076 {
10077   PetscMPIInt    commsize,subCommSize;
10078 
10079   PetscFunctionBegin;
10080   PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)mat),&commsize));
10081   PetscCallMPI(MPI_Comm_size(subComm,&subCommSize));
10082   PetscCheck(subCommSize <= commsize,PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_OUTOFRANGE,"CommSize %d < SubCommZize %d",commsize,subCommSize);
10083 
10084   PetscCheck(scall != MAT_REUSE_MATRIX || *subMat != mat,PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"MAT_REUSE_MATRIX means reuse the matrix passed in as the final argument, not the original matrix");
10085   PetscCall(PetscLogEventBegin(MAT_GetMultiProcBlock,mat,0,0,0));
10086   PetscCall((*mat->ops->getmultiprocblock)(mat,subComm,scall,subMat));
10087   PetscCall(PetscLogEventEnd(MAT_GetMultiProcBlock,mat,0,0,0));
10088   PetscFunctionReturn(0);
10089 }
10090 
10091 /*@
10092    MatGetLocalSubMatrix - Gets a reference to a submatrix specified in local numbering
10093 
10094    Not Collective
10095 
10096    Input Parameters:
10097 +  mat - matrix to extract local submatrix from
10098 .  isrow - local row indices for submatrix
10099 -  iscol - local column indices for submatrix
10100 
10101    Output Parameter:
10102 .  submat - the submatrix
10103 
10104    Level: intermediate
10105 
10106    Notes:
10107    The submat should be returned with MatRestoreLocalSubMatrix().
10108 
10109    Depending on the format of mat, the returned submat may not implement MatMult().  Its communicator may be
10110    the same as mat, it may be PETSC_COMM_SELF, or some other subcomm of mat's.
10111 
10112    The submat always implements MatSetValuesLocal().  If isrow and iscol have the same block size, then
10113    MatSetValuesBlockedLocal() will also be implemented.
10114 
10115    The mat must have had a ISLocalToGlobalMapping provided to it with MatSetLocalToGlobalMapping(). Note that
10116    matrices obtained with DMCreateMatrix() generally already have the local to global mapping provided.
10117 
10118 .seealso: `MatRestoreLocalSubMatrix()`, `MatCreateLocalRef()`, `MatSetLocalToGlobalMapping()`
10119 @*/
10120 PetscErrorCode MatGetLocalSubMatrix(Mat mat,IS isrow,IS iscol,Mat *submat)
10121 {
10122   PetscFunctionBegin;
10123   PetscValidHeaderSpecific(mat,MAT_CLASSID,1);
10124   PetscValidHeaderSpecific(isrow,IS_CLASSID,2);
10125   PetscValidHeaderSpecific(iscol,IS_CLASSID,3);
10126   PetscCheckSameComm(isrow,2,iscol,3);
10127   PetscValidPointer(submat,4);
10128   PetscCheck(mat->rmap->mapping,PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Matrix must have local to global mapping provided before this call");
10129 
10130   if (mat->ops->getlocalsubmatrix) {
10131     PetscCall((*mat->ops->getlocalsubmatrix)(mat,isrow,iscol,submat));
10132   } else {
10133     PetscCall(MatCreateLocalRef(mat,isrow,iscol,submat));
10134   }
10135   PetscFunctionReturn(0);
10136 }
10137 
10138 /*@
10139    MatRestoreLocalSubMatrix - Restores a reference to a submatrix specified in local numbering
10140 
10141    Not Collective
10142 
10143    Input Parameters:
10144 +  mat - matrix to extract local submatrix from
10145 .  isrow - local row indices for submatrix
10146 .  iscol - local column indices for submatrix
10147 -  submat - the submatrix
10148 
10149    Level: intermediate
10150 
10151 .seealso: `MatGetLocalSubMatrix()`
10152 @*/
10153 PetscErrorCode MatRestoreLocalSubMatrix(Mat mat,IS isrow,IS iscol,Mat *submat)
10154 {
10155   PetscFunctionBegin;
10156   PetscValidHeaderSpecific(mat,MAT_CLASSID,1);
10157   PetscValidHeaderSpecific(isrow,IS_CLASSID,2);
10158   PetscValidHeaderSpecific(iscol,IS_CLASSID,3);
10159   PetscCheckSameComm(isrow,2,iscol,3);
10160   PetscValidPointer(submat,4);
10161   if (*submat) {
10162     PetscValidHeaderSpecific(*submat,MAT_CLASSID,4);
10163   }
10164 
10165   if (mat->ops->restorelocalsubmatrix) {
10166     PetscCall((*mat->ops->restorelocalsubmatrix)(mat,isrow,iscol,submat));
10167   } else {
10168     PetscCall(MatDestroy(submat));
10169   }
10170   *submat = NULL;
10171   PetscFunctionReturn(0);
10172 }
10173 
10174 /* --------------------------------------------------------*/
10175 /*@
10176    MatFindZeroDiagonals - Finds all the rows of a matrix that have zero or no diagonal entry in the matrix
10177 
10178    Collective on Mat
10179 
10180    Input Parameter:
10181 .  mat - the matrix
10182 
10183    Output Parameter:
10184 .  is - if any rows have zero diagonals this contains the list of them
10185 
10186    Level: developer
10187 
10188 .seealso: `MatMultTranspose()`, `MatMultAdd()`, `MatMultTransposeAdd()`
10189 @*/
10190 PetscErrorCode MatFindZeroDiagonals(Mat mat,IS *is)
10191 {
10192   PetscFunctionBegin;
10193   PetscValidHeaderSpecific(mat,MAT_CLASSID,1);
10194   PetscValidType(mat,1);
10195   PetscCheck(mat->assembled,PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Not for unassembled matrix");
10196   PetscCheck(!mat->factortype,PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Not for factored matrix");
10197 
10198   if (!mat->ops->findzerodiagonals) {
10199     Vec                diag;
10200     const PetscScalar *a;
10201     PetscInt          *rows;
10202     PetscInt           rStart, rEnd, r, nrow = 0;
10203 
10204     PetscCall(MatCreateVecs(mat, &diag, NULL));
10205     PetscCall(MatGetDiagonal(mat, diag));
10206     PetscCall(MatGetOwnershipRange(mat, &rStart, &rEnd));
10207     PetscCall(VecGetArrayRead(diag, &a));
10208     for (r = 0; r < rEnd-rStart; ++r) if (a[r] == 0.0) ++nrow;
10209     PetscCall(PetscMalloc1(nrow, &rows));
10210     nrow = 0;
10211     for (r = 0; r < rEnd-rStart; ++r) if (a[r] == 0.0) rows[nrow++] = r+rStart;
10212     PetscCall(VecRestoreArrayRead(diag, &a));
10213     PetscCall(VecDestroy(&diag));
10214     PetscCall(ISCreateGeneral(PetscObjectComm((PetscObject) mat), nrow, rows, PETSC_OWN_POINTER, is));
10215   } else {
10216     PetscCall((*mat->ops->findzerodiagonals)(mat, is));
10217   }
10218   PetscFunctionReturn(0);
10219 }
10220 
10221 /*@
10222    MatFindOffBlockDiagonalEntries - Finds all the rows of a matrix that have entries outside of the main diagonal block (defined by the matrix block size)
10223 
10224    Collective on Mat
10225 
10226    Input Parameter:
10227 .  mat - the matrix
10228 
10229    Output Parameter:
10230 .  is - contains the list of rows with off block diagonal entries
10231 
10232    Level: developer
10233 
10234 .seealso: `MatMultTranspose()`, `MatMultAdd()`, `MatMultTransposeAdd()`
10235 @*/
10236 PetscErrorCode MatFindOffBlockDiagonalEntries(Mat mat,IS *is)
10237 {
10238   PetscFunctionBegin;
10239   PetscValidHeaderSpecific(mat,MAT_CLASSID,1);
10240   PetscValidType(mat,1);
10241   PetscCheck(mat->assembled,PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Not for unassembled matrix");
10242   PetscCheck(!mat->factortype,PetscObjectComm((PetscObject)mat),PETSC_ERR_ARG_WRONGSTATE,"Not for factored matrix");
10243 
10244   PetscCheck(mat->ops->findoffblockdiagonalentries,PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Matrix type %s does not have a find off block diagonal entries defined",((PetscObject)mat)->type_name);
10245   PetscCall((*mat->ops->findoffblockdiagonalentries)(mat,is));
10246   PetscFunctionReturn(0);
10247 }
10248 
10249 /*@C
10250   MatInvertBlockDiagonal - Inverts the block diagonal entries.
10251 
10252   Collective on Mat
10253 
10254   Input Parameters:
10255 . mat - the matrix
10256 
10257   Output Parameters:
10258 . values - the block inverses in column major order (FORTRAN-like)
10259 
10260    Note:
10261      The size of the blocks is determined by the block size of the matrix.
10262 
10263    Fortran Note:
10264      This routine is not available from Fortran.
10265 
10266   Level: advanced
10267 
10268 .seealso: `MatInvertVariableBlockEnvelope()`, `MatInvertBlockDiagonalMat()`
10269 @*/
10270 PetscErrorCode MatInvertBlockDiagonal(Mat mat,const PetscScalar **values)
10271 {
10272   PetscFunctionBegin;
10273   PetscValidHeaderSpecific(mat,MAT_CLASSID,1);
10274   PetscCheck(mat->assembled,PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Not for unassembled matrix");
10275   PetscCheck(!mat->factortype,PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Not for factored matrix");
10276   PetscCheck(mat->ops->invertblockdiagonal,PETSC_COMM_SELF,PETSC_ERR_SUP,"Not supported for type %s",((PetscObject)mat)->type_name);
10277   PetscCall((*mat->ops->invertblockdiagonal)(mat,values));
10278   PetscFunctionReturn(0);
10279 }
10280 
10281 /*@C
10282   MatInvertVariableBlockDiagonal - Inverts the point block diagonal entries.
10283 
10284   Collective on Mat
10285 
10286   Input Parameters:
10287 + mat - the matrix
10288 . nblocks - the number of blocks on the process, set with MatSetVariableBlockSizes()
10289 - bsizes - the size of each block on the process, set with MatSetVariableBlockSizes()
10290 
10291   Output Parameters:
10292 . values - the block inverses in column major order (FORTRAN-like)
10293 
10294    Note:
10295    This routine is not available from Fortran.
10296 
10297   Level: advanced
10298 
10299 .seealso: `MatInvertBlockDiagonal()`, `MatSetVariableBlockSizes()`, `MatInvertVariableBlockEnvelope()`
10300 @*/
10301 PetscErrorCode MatInvertVariableBlockDiagonal(Mat mat,PetscInt nblocks,const PetscInt *bsizes,PetscScalar *values)
10302 {
10303   PetscFunctionBegin;
10304   PetscValidHeaderSpecific(mat,MAT_CLASSID,1);
10305   PetscCheck(mat->assembled,PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Not for unassembled matrix");
10306   PetscCheck(!mat->factortype,PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Not for factored matrix");
10307   PetscCheck(mat->ops->invertvariableblockdiagonal,PETSC_COMM_SELF,PETSC_ERR_SUP,"Not supported for type %s",((PetscObject)mat)->type_name);
10308   PetscCall((*mat->ops->invertvariableblockdiagonal)(mat,nblocks,bsizes,values));
10309   PetscFunctionReturn(0);
10310 }
10311 
10312 /*@
10313   MatInvertBlockDiagonalMat - set matrix C to be the inverted block diagonal of matrix A
10314 
10315   Collective on Mat
10316 
10317   Input Parameters:
10318 . A - the matrix
10319 
10320   Output Parameters:
10321 . C - matrix with inverted block diagonal of A.  This matrix should be created and may have its type set.
10322 
10323   Notes: the blocksize of the matrix is used to determine the blocks on the diagonal of C
10324 
10325   Level: advanced
10326 
10327 .seealso: `MatInvertBlockDiagonal()`
10328 @*/
10329 PetscErrorCode MatInvertBlockDiagonalMat(Mat A,Mat C)
10330 {
10331   const PetscScalar *vals;
10332   PetscInt          *dnnz;
10333   PetscInt           m,rstart,rend,bs,i,j;
10334 
10335   PetscFunctionBegin;
10336   PetscCall(MatInvertBlockDiagonal(A,&vals));
10337   PetscCall(MatGetBlockSize(A,&bs));
10338   PetscCall(MatGetLocalSize(A,&m,NULL));
10339   PetscCall(MatSetLayouts(C,A->rmap,A->cmap));
10340   PetscCall(PetscMalloc1(m/bs,&dnnz));
10341   for (j = 0; j < m/bs; j++) dnnz[j] = 1;
10342   PetscCall(MatXAIJSetPreallocation(C,bs,dnnz,NULL,NULL,NULL));
10343   PetscCall(PetscFree(dnnz));
10344   PetscCall(MatGetOwnershipRange(C,&rstart,&rend));
10345   PetscCall(MatSetOption(C,MAT_ROW_ORIENTED,PETSC_FALSE));
10346   for (i = rstart/bs; i < rend/bs; i++) {
10347     PetscCall(MatSetValuesBlocked(C,1,&i,1,&i,&vals[(i-rstart/bs)*bs*bs],INSERT_VALUES));
10348   }
10349   PetscCall(MatAssemblyBegin(C,MAT_FINAL_ASSEMBLY));
10350   PetscCall(MatAssemblyEnd(C,MAT_FINAL_ASSEMBLY));
10351   PetscCall(MatSetOption(C,MAT_ROW_ORIENTED,PETSC_TRUE));
10352   PetscFunctionReturn(0);
10353 }
10354 
10355 /*@C
10356     MatTransposeColoringDestroy - Destroys a coloring context for matrix product C=A*B^T that was created
10357     via MatTransposeColoringCreate().
10358 
10359     Collective on MatTransposeColoring
10360 
10361     Input Parameter:
10362 .   c - coloring context
10363 
10364     Level: intermediate
10365 
10366 .seealso: `MatTransposeColoringCreate()`
10367 @*/
10368 PetscErrorCode MatTransposeColoringDestroy(MatTransposeColoring *c)
10369 {
10370   MatTransposeColoring matcolor=*c;
10371 
10372   PetscFunctionBegin;
10373   if (!matcolor) PetscFunctionReturn(0);
10374   if (--((PetscObject)matcolor)->refct > 0) {matcolor = NULL; PetscFunctionReturn(0);}
10375 
10376   PetscCall(PetscFree3(matcolor->ncolumns,matcolor->nrows,matcolor->colorforrow));
10377   PetscCall(PetscFree(matcolor->rows));
10378   PetscCall(PetscFree(matcolor->den2sp));
10379   PetscCall(PetscFree(matcolor->colorforcol));
10380   PetscCall(PetscFree(matcolor->columns));
10381   if (matcolor->brows>0) {
10382     PetscCall(PetscFree(matcolor->lstart));
10383   }
10384   PetscCall(PetscHeaderDestroy(c));
10385   PetscFunctionReturn(0);
10386 }
10387 
10388 /*@C
10389     MatTransColoringApplySpToDen - Given a symbolic matrix product C=A*B^T for which
10390     a MatTransposeColoring context has been created, computes a dense B^T by Apply
10391     MatTransposeColoring to sparse B.
10392 
10393     Collective on MatTransposeColoring
10394 
10395     Input Parameters:
10396 +   B - sparse matrix B
10397 .   Btdense - symbolic dense matrix B^T
10398 -   coloring - coloring context created with MatTransposeColoringCreate()
10399 
10400     Output Parameter:
10401 .   Btdense - dense matrix B^T
10402 
10403     Level: advanced
10404 
10405      Notes:
10406     These are used internally for some implementations of MatRARt()
10407 
10408 .seealso: `MatTransposeColoringCreate()`, `MatTransposeColoringDestroy()`, `MatTransColoringApplyDenToSp()`
10409 
10410 @*/
10411 PetscErrorCode MatTransColoringApplySpToDen(MatTransposeColoring coloring,Mat B,Mat Btdense)
10412 {
10413   PetscFunctionBegin;
10414   PetscValidHeaderSpecific(B,MAT_CLASSID,2);
10415   PetscValidHeaderSpecific(Btdense,MAT_CLASSID,3);
10416   PetscValidHeaderSpecific(coloring,MAT_TRANSPOSECOLORING_CLASSID,1);
10417 
10418   PetscCheck(B->ops->transcoloringapplysptoden,PETSC_COMM_SELF,PETSC_ERR_SUP,"Not supported for this matrix type %s",((PetscObject)B)->type_name);
10419   PetscCall((B->ops->transcoloringapplysptoden)(coloring,B,Btdense));
10420   PetscFunctionReturn(0);
10421 }
10422 
10423 /*@C
10424     MatTransColoringApplyDenToSp - Given a symbolic matrix product Csp=A*B^T for which
10425     a MatTransposeColoring context has been created and a dense matrix Cden=A*Btdense
10426     in which Btdens is obtained from MatTransColoringApplySpToDen(), recover sparse matrix
10427     Csp from Cden.
10428 
10429     Collective on MatTransposeColoring
10430 
10431     Input Parameters:
10432 +   coloring - coloring context created with MatTransposeColoringCreate()
10433 -   Cden - matrix product of a sparse matrix and a dense matrix Btdense
10434 
10435     Output Parameter:
10436 .   Csp - sparse matrix
10437 
10438     Level: advanced
10439 
10440      Notes:
10441     These are used internally for some implementations of MatRARt()
10442 
10443 .seealso: `MatTransposeColoringCreate()`, `MatTransposeColoringDestroy()`, `MatTransColoringApplySpToDen()`
10444 
10445 @*/
10446 PetscErrorCode MatTransColoringApplyDenToSp(MatTransposeColoring matcoloring,Mat Cden,Mat Csp)
10447 {
10448   PetscFunctionBegin;
10449   PetscValidHeaderSpecific(matcoloring,MAT_TRANSPOSECOLORING_CLASSID,1);
10450   PetscValidHeaderSpecific(Cden,MAT_CLASSID,2);
10451   PetscValidHeaderSpecific(Csp,MAT_CLASSID,3);
10452 
10453   PetscCheck(Csp->ops->transcoloringapplydentosp,PETSC_COMM_SELF,PETSC_ERR_SUP,"Not supported for this matrix type %s",((PetscObject)Csp)->type_name);
10454   PetscCall((Csp->ops->transcoloringapplydentosp)(matcoloring,Cden,Csp));
10455   PetscCall(MatAssemblyBegin(Csp,MAT_FINAL_ASSEMBLY));
10456   PetscCall(MatAssemblyEnd(Csp,MAT_FINAL_ASSEMBLY));
10457   PetscFunctionReturn(0);
10458 }
10459 
10460 /*@C
10461    MatTransposeColoringCreate - Creates a matrix coloring context for matrix product C=A*B^T.
10462 
10463    Collective on Mat
10464 
10465    Input Parameters:
10466 +  mat - the matrix product C
10467 -  iscoloring - the coloring of the matrix; usually obtained with MatColoringCreate() or DMCreateColoring()
10468 
10469     Output Parameter:
10470 .   color - the new coloring context
10471 
10472     Level: intermediate
10473 
10474 .seealso: `MatTransposeColoringDestroy()`, `MatTransColoringApplySpToDen()`,
10475           `MatTransColoringApplyDenToSp()`
10476 @*/
10477 PetscErrorCode MatTransposeColoringCreate(Mat mat,ISColoring iscoloring,MatTransposeColoring *color)
10478 {
10479   MatTransposeColoring c;
10480   MPI_Comm             comm;
10481 
10482   PetscFunctionBegin;
10483   PetscCall(PetscLogEventBegin(MAT_TransposeColoringCreate,mat,0,0,0));
10484   PetscCall(PetscObjectGetComm((PetscObject)mat,&comm));
10485   PetscCall(PetscHeaderCreate(c,MAT_TRANSPOSECOLORING_CLASSID,"MatTransposeColoring","Matrix product C=A*B^T via coloring","Mat",comm,MatTransposeColoringDestroy,NULL));
10486 
10487   c->ctype = iscoloring->ctype;
10488   if (mat->ops->transposecoloringcreate) {
10489     PetscCall((*mat->ops->transposecoloringcreate)(mat,iscoloring,c));
10490   } else SETERRQ(PetscObjectComm((PetscObject)mat),PETSC_ERR_SUP,"Code not yet written for matrix type %s",((PetscObject)mat)->type_name);
10491 
10492   *color = c;
10493   PetscCall(PetscLogEventEnd(MAT_TransposeColoringCreate,mat,0,0,0));
10494   PetscFunctionReturn(0);
10495 }
10496 
10497 /*@
10498       MatGetNonzeroState - Returns a 64 bit integer representing the current state of nonzeros in the matrix. If the
10499         matrix has had no new nonzero locations added to the matrix since the previous call then the value will be the
10500         same, otherwise it will be larger
10501 
10502      Not Collective
10503 
10504   Input Parameter:
10505 .    A  - the matrix
10506 
10507   Output Parameter:
10508 .    state - the current state
10509 
10510   Notes:
10511     You can only compare states from two different calls to the SAME matrix, you cannot compare calls between
10512          different matrices
10513 
10514   Level: intermediate
10515 
10516 @*/
10517 PetscErrorCode MatGetNonzeroState(Mat mat,PetscObjectState *state)
10518 {
10519   PetscFunctionBegin;
10520   PetscValidHeaderSpecific(mat,MAT_CLASSID,1);
10521   *state = mat->nonzerostate;
10522   PetscFunctionReturn(0);
10523 }
10524 
10525 /*@
10526       MatCreateMPIMatConcatenateSeqMat - Creates a single large PETSc matrix by concatenating sequential
10527                  matrices from each processor
10528 
10529     Collective
10530 
10531    Input Parameters:
10532 +    comm - the communicators the parallel matrix will live on
10533 .    seqmat - the input sequential matrices
10534 .    n - number of local columns (or PETSC_DECIDE)
10535 -    reuse - either MAT_INITIAL_MATRIX or MAT_REUSE_MATRIX
10536 
10537    Output Parameter:
10538 .    mpimat - the parallel matrix generated
10539 
10540     Level: advanced
10541 
10542    Notes:
10543     The number of columns of the matrix in EACH processor MUST be the same.
10544 
10545 @*/
10546 PetscErrorCode MatCreateMPIMatConcatenateSeqMat(MPI_Comm comm,Mat seqmat,PetscInt n,MatReuse reuse,Mat *mpimat)
10547 {
10548   PetscFunctionBegin;
10549   PetscCheck(seqmat->ops->creatempimatconcatenateseqmat,PetscObjectComm((PetscObject)seqmat),PETSC_ERR_SUP,"Mat type %s",((PetscObject)seqmat)->type_name);
10550   PetscCheck(reuse != MAT_REUSE_MATRIX || seqmat != *mpimat,PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"MAT_REUSE_MATRIX means reuse the matrix passed in as the final argument, not the original matrix");
10551 
10552   PetscCall(PetscLogEventBegin(MAT_Merge,seqmat,0,0,0));
10553   PetscCall((*seqmat->ops->creatempimatconcatenateseqmat)(comm,seqmat,n,reuse,mpimat));
10554   PetscCall(PetscLogEventEnd(MAT_Merge,seqmat,0,0,0));
10555   PetscFunctionReturn(0);
10556 }
10557 
10558 /*@
10559      MatSubdomainsCreateCoalesce - Creates index subdomains by coalescing adjacent
10560                  ranks' ownership ranges.
10561 
10562     Collective on A
10563 
10564    Input Parameters:
10565 +    A   - the matrix to create subdomains from
10566 -    N   - requested number of subdomains
10567 
10568    Output Parameters:
10569 +    n   - number of subdomains resulting on this rank
10570 -    iss - IS list with indices of subdomains on this rank
10571 
10572     Level: advanced
10573 
10574     Notes:
10575     number of subdomains must be smaller than the communicator size
10576 @*/
10577 PetscErrorCode MatSubdomainsCreateCoalesce(Mat A,PetscInt N,PetscInt *n,IS *iss[])
10578 {
10579   MPI_Comm        comm,subcomm;
10580   PetscMPIInt     size,rank,color;
10581   PetscInt        rstart,rend,k;
10582 
10583   PetscFunctionBegin;
10584   PetscCall(PetscObjectGetComm((PetscObject)A,&comm));
10585   PetscCallMPI(MPI_Comm_size(comm,&size));
10586   PetscCallMPI(MPI_Comm_rank(comm,&rank));
10587   PetscCheck(N >= 1 && N < (PetscInt)size,PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"number of subdomains must be > 0 and < %d, got N = %" PetscInt_FMT,size,N);
10588   *n = 1;
10589   k = ((PetscInt)size)/N + ((PetscInt)size%N>0); /* There are up to k ranks to a color */
10590   color = rank/k;
10591   PetscCallMPI(MPI_Comm_split(comm,color,rank,&subcomm));
10592   PetscCall(PetscMalloc1(1,iss));
10593   PetscCall(MatGetOwnershipRange(A,&rstart,&rend));
10594   PetscCall(ISCreateStride(subcomm,rend-rstart,rstart,1,iss[0]));
10595   PetscCallMPI(MPI_Comm_free(&subcomm));
10596   PetscFunctionReturn(0);
10597 }
10598 
10599 /*@
10600    MatGalerkin - Constructs the coarse grid problem via Galerkin projection.
10601 
10602    If the interpolation and restriction operators are the same, uses MatPtAP.
10603    If they are not the same, use MatMatMatMult.
10604 
10605    Once the coarse grid problem is constructed, correct for interpolation operators
10606    that are not of full rank, which can legitimately happen in the case of non-nested
10607    geometric multigrid.
10608 
10609    Input Parameters:
10610 +  restrct - restriction operator
10611 .  dA - fine grid matrix
10612 .  interpolate - interpolation operator
10613 .  reuse - either MAT_INITIAL_MATRIX or MAT_REUSE_MATRIX
10614 -  fill - expected fill, use PETSC_DEFAULT if you do not have a good estimate
10615 
10616    Output Parameters:
10617 .  A - the Galerkin coarse matrix
10618 
10619    Options Database Key:
10620 .  -pc_mg_galerkin <both,pmat,mat,none> - for what matrices the Galerkin process should be used
10621 
10622    Level: developer
10623 
10624 .seealso: `MatPtAP()`, `MatMatMatMult()`
10625 @*/
10626 PetscErrorCode  MatGalerkin(Mat restrct, Mat dA, Mat interpolate, MatReuse reuse, PetscReal fill, Mat *A)
10627 {
10628   IS             zerorows;
10629   Vec            diag;
10630 
10631   PetscFunctionBegin;
10632   PetscCheck(reuse != MAT_INPLACE_MATRIX,PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"Inplace product not supported");
10633   /* Construct the coarse grid matrix */
10634   if (interpolate == restrct) {
10635     PetscCall(MatPtAP(dA,interpolate,reuse,fill,A));
10636   } else {
10637     PetscCall(MatMatMatMult(restrct,dA,interpolate,reuse,fill,A));
10638   }
10639 
10640   /* If the interpolation matrix is not of full rank, A will have zero rows.
10641      This can legitimately happen in the case of non-nested geometric multigrid.
10642      In that event, we set the rows of the matrix to the rows of the identity,
10643      ignoring the equations (as the RHS will also be zero). */
10644 
10645   PetscCall(MatFindZeroRows(*A, &zerorows));
10646 
10647   if (zerorows != NULL) { /* if there are any zero rows */
10648     PetscCall(MatCreateVecs(*A, &diag, NULL));
10649     PetscCall(MatGetDiagonal(*A, diag));
10650     PetscCall(VecISSet(diag, zerorows, 1.0));
10651     PetscCall(MatDiagonalSet(*A, diag, INSERT_VALUES));
10652     PetscCall(VecDestroy(&diag));
10653     PetscCall(ISDestroy(&zerorows));
10654   }
10655   PetscFunctionReturn(0);
10656 }
10657 
10658 /*@C
10659     MatSetOperation - Allows user to set a matrix operation for any matrix type
10660 
10661    Logically Collective on Mat
10662 
10663     Input Parameters:
10664 +   mat - the matrix
10665 .   op - the name of the operation
10666 -   f - the function that provides the operation
10667 
10668    Level: developer
10669 
10670     Usage:
10671 $      extern PetscErrorCode usermult(Mat,Vec,Vec);
10672 $      PetscCall(MatCreateXXX(comm,...&A);
10673 $      PetscCall(MatSetOperation(A,MATOP_MULT,(void(*)(void))usermult);
10674 
10675     Notes:
10676     See the file include/petscmat.h for a complete list of matrix
10677     operations, which all have the form MATOP_<OPERATION>, where
10678     <OPERATION> is the name (in all capital letters) of the
10679     user interface routine (e.g., MatMult() -> MATOP_MULT).
10680 
10681     All user-provided functions (except for MATOP_DESTROY) should have the same calling
10682     sequence as the usual matrix interface routines, since they
10683     are intended to be accessed via the usual matrix interface
10684     routines, e.g.,
10685 $       MatMult(Mat,Vec,Vec) -> usermult(Mat,Vec,Vec)
10686 
10687     In particular each function MUST return an error code of 0 on success and
10688     nonzero on failure.
10689 
10690     This routine is distinct from MatShellSetOperation() in that it can be called on any matrix type.
10691 
10692 .seealso: `MatGetOperation()`, `MatCreateShell()`, `MatShellSetContext()`, `MatShellSetOperation()`
10693 @*/
10694 PetscErrorCode MatSetOperation(Mat mat,MatOperation op,void (*f)(void))
10695 {
10696   PetscFunctionBegin;
10697   PetscValidHeaderSpecific(mat,MAT_CLASSID,1);
10698   if (op == MATOP_VIEW && !mat->ops->viewnative && f != (void (*)(void))(mat->ops->view)) {
10699     mat->ops->viewnative = mat->ops->view;
10700   }
10701   (((void(**)(void))mat->ops)[op]) = f;
10702   PetscFunctionReturn(0);
10703 }
10704 
10705 /*@C
10706     MatGetOperation - Gets a matrix operation for any matrix type.
10707 
10708     Not Collective
10709 
10710     Input Parameters:
10711 +   mat - the matrix
10712 -   op - the name of the operation
10713 
10714     Output Parameter:
10715 .   f - the function that provides the operation
10716 
10717     Level: developer
10718 
10719     Usage:
10720 $      PetscErrorCode (*usermult)(Mat,Vec,Vec);
10721 $      MatGetOperation(A,MATOP_MULT,(void(**)(void))&usermult);
10722 
10723     Notes:
10724     See the file include/petscmat.h for a complete list of matrix
10725     operations, which all have the form MATOP_<OPERATION>, where
10726     <OPERATION> is the name (in all capital letters) of the
10727     user interface routine (e.g., MatMult() -> MATOP_MULT).
10728 
10729     This routine is distinct from MatShellGetOperation() in that it can be called on any matrix type.
10730 
10731 .seealso: `MatSetOperation()`, `MatCreateShell()`, `MatShellGetContext()`, `MatShellGetOperation()`
10732 @*/
10733 PetscErrorCode MatGetOperation(Mat mat,MatOperation op,void(**f)(void))
10734 {
10735   PetscFunctionBegin;
10736   PetscValidHeaderSpecific(mat,MAT_CLASSID,1);
10737   *f = (((void (**)(void))mat->ops)[op]);
10738   PetscFunctionReturn(0);
10739 }
10740 
10741 /*@
10742     MatHasOperation - Determines whether the given matrix supports the particular
10743     operation.
10744 
10745    Not Collective
10746 
10747    Input Parameters:
10748 +  mat - the matrix
10749 -  op - the operation, for example, MATOP_GET_DIAGONAL
10750 
10751    Output Parameter:
10752 .  has - either PETSC_TRUE or PETSC_FALSE
10753 
10754    Level: advanced
10755 
10756    Notes:
10757    See the file include/petscmat.h for a complete list of matrix
10758    operations, which all have the form MATOP_<OPERATION>, where
10759    <OPERATION> is the name (in all capital letters) of the
10760    user-level routine.  E.g., MatNorm() -> MATOP_NORM.
10761 
10762 .seealso: `MatCreateShell()`
10763 @*/
10764 PetscErrorCode MatHasOperation(Mat mat,MatOperation op,PetscBool *has)
10765 {
10766   PetscFunctionBegin;
10767   PetscValidHeaderSpecific(mat,MAT_CLASSID,1);
10768   PetscValidBoolPointer(has,3);
10769   if (mat->ops->hasoperation) {
10770     PetscCall((*mat->ops->hasoperation)(mat,op,has));
10771   } else {
10772     if (((void**)mat->ops)[op]) *has = PETSC_TRUE;
10773     else {
10774       *has = PETSC_FALSE;
10775       if (op == MATOP_CREATE_SUBMATRIX) {
10776         PetscMPIInt size;
10777 
10778         PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)mat),&size));
10779         if (size == 1) {
10780           PetscCall(MatHasOperation(mat,MATOP_CREATE_SUBMATRICES,has));
10781         }
10782       }
10783     }
10784   }
10785   PetscFunctionReturn(0);
10786 }
10787 
10788 /*@
10789     MatHasCongruentLayouts - Determines whether the rows and columns layouts
10790     of the matrix are congruent
10791 
10792    Collective on mat
10793 
10794    Input Parameters:
10795 .  mat - the matrix
10796 
10797    Output Parameter:
10798 .  cong - either PETSC_TRUE or PETSC_FALSE
10799 
10800    Level: beginner
10801 
10802    Notes:
10803 
10804 .seealso: `MatCreate()`, `MatSetSizes()`
10805 @*/
10806 PetscErrorCode MatHasCongruentLayouts(Mat mat,PetscBool *cong)
10807 {
10808   PetscFunctionBegin;
10809   PetscValidHeaderSpecific(mat,MAT_CLASSID,1);
10810   PetscValidType(mat,1);
10811   PetscValidBoolPointer(cong,2);
10812   if (!mat->rmap || !mat->cmap) {
10813     *cong = mat->rmap == mat->cmap ? PETSC_TRUE : PETSC_FALSE;
10814     PetscFunctionReturn(0);
10815   }
10816   if (mat->congruentlayouts == PETSC_DECIDE) { /* first time we compare rows and cols layouts */
10817     PetscCall(PetscLayoutSetUp(mat->rmap));
10818     PetscCall(PetscLayoutSetUp(mat->cmap));
10819     PetscCall(PetscLayoutCompare(mat->rmap,mat->cmap,cong));
10820     if (*cong) mat->congruentlayouts = 1;
10821     else       mat->congruentlayouts = 0;
10822   } else *cong = mat->congruentlayouts ? PETSC_TRUE : PETSC_FALSE;
10823   PetscFunctionReturn(0);
10824 }
10825 
10826 PetscErrorCode MatSetInf(Mat A)
10827 {
10828   PetscFunctionBegin;
10829   PetscCheck(A->ops->setinf,PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"No support for this operation for this matrix type");
10830   PetscCall((*A->ops->setinf)(A));
10831   PetscFunctionReturn(0);
10832 }
10833