xref: /petsc/include/petscmat.h (revision 82fcb398e7dfd8593b8ac6495319e3e356da1056)
1 /*
2      Include file for the matrix component of PETSc
3 */
4 #ifndef __PETSCMAT_H
5 #define __PETSCMAT_H
6 #include "petscvec.h"
7 PETSC_EXTERN_CXX_BEGIN
8 
9 /*S
10      Mat - Abstract PETSc matrix object
11 
12    Level: beginner
13 
14   Concepts: matrix; linear operator
15 
16 .seealso:  MatCreate(), MatType, MatSetType()
17 S*/
18 typedef struct _p_Mat*           Mat;
19 
20 /*E
21     MatType - String with the name of a PETSc matrix or the creation function
22        with an optional dynamic library name, for example
23        http://www.mcs.anl.gov/petsc/lib.a:mymatcreate()
24 
25    Level: beginner
26 
27 .seealso: MatSetType(), Mat, MatSolverPackage
28 E*/
29 #define MatType char*
30 #define MATSAME            "same"
31 #define MATMAIJ            "maij"
32 #define MATSEQMAIJ         "seqmaij"
33 #define MATMPIMAIJ         "mpimaij"
34 #define MATIS              "is"
35 #define MATAIJ             "aij"
36 #define MATSEQAIJ          "seqaij"
37 #define MATSEQAIJPTHREAD   "seqaijpthread"
38 #define MATAIJPTHREAD      "aijpthread"
39 #define MATMPIAIJ          "mpiaij"
40 #define MATAIJCRL          "aijcrl"
41 #define MATSEQAIJCRL       "seqaijcrl"
42 #define MATMPIAIJCRL       "mpiaijcrl"
43 #define MATAIJCUSP         "aijcusp"
44 #define MATSEQAIJCUSP      "seqaijcusp"
45 #define MATMPIAIJCUSP      "mpiaijcusp"
46 #define MATAIJPERM         "aijperm"
47 #define MATSEQAIJPERM      "seqaijperm"
48 #define MATMPIAIJPERM      "mpiaijperm"
49 #define MATSHELL           "shell"
50 #define MATDENSE           "dense"
51 #define MATSEQDENSE        "seqdense"
52 #define MATMPIDENSE        "mpidense"
53 #define MATBAIJ            "baij"
54 #define MATSEQBAIJ         "seqbaij"
55 #define MATMPIBAIJ         "mpibaij"
56 #define MATMPIADJ          "mpiadj"
57 #define MATSBAIJ           "sbaij"
58 #define MATSEQSBAIJ        "seqsbaij"
59 #define MATMPISBAIJ        "mpisbaij"
60 
61 #define MATSEQBSTRM        "seqbstrm"
62 #define MATMPIBSTRM        "mpibstrm"
63 #define MATBSTRM           "bstrm"
64 #define MATSEQSBSTRM       "seqsbstrm"
65 #define MATMPISBSTRM       "mpisbstrm"
66 #define MATSBSTRM          "sbstrm"
67 
68 #define MATDAAD            "daad"
69 #define MATMFFD            "mffd"
70 #define MATNORMAL          "normal"
71 #define MATLRC             "lrc"
72 #define MATSCATTER         "scatter"
73 #define MATBLOCKMAT        "blockmat"
74 #define MATCOMPOSITE       "composite"
75 #define MATFFT             "fft"
76 #define MATFFTW            "fftw"
77 #define MATSEQCUFFT        "seqcufft"
78 #define MATTRANSPOSEMAT    "transpose"
79 #define MATSCHURCOMPLEMENT "schurcomplement"
80 #define MATPYTHON          "python"
81 #define MATHYPRESTRUCT     "hyprestruct"
82 #define MATHYPRESSTRUCT    "hypresstruct"
83 #define MATSUBMATRIX       "submatrix"
84 #define MATLOCALREF        "localref"
85 #define MATNEST            "nest"
86 
87 /*E
88     MatSolverPackage - String with the name of a PETSc matrix solver type.
89 
90     For example: "petsc" indicates what PETSc provides, "superlu" indicates either
91        SuperLU or SuperLU_Dist etc.
92 
93 
94    Level: beginner
95 
96 .seealso: MatGetFactor(), Mat, MatSetType(), MatType
97 E*/
98 #define MatSolverPackage char*
99 #define MATSOLVERSPOOLES      "spooles"
100 #define MATSOLVERSUPERLU      "superlu"
101 #define MATSOLVERSUPERLU_DIST "superlu_dist"
102 #define MATSOLVERUMFPACK      "umfpack"
103 #define MATSOLVERCHOLMOD      "cholmod"
104 #define MATSOLVERESSL         "essl"
105 #define MATSOLVERLUSOL        "lusol"
106 #define MATSOLVERMUMPS        "mumps"
107 #define MATSOLVERPASTIX       "pastix"
108 #define MATSOLVERDSCPACK      "dscpack"
109 #define MATSOLVERMATLAB       "matlab"
110 #define MATSOLVERPETSC        "petsc"
111 #define MATSOLVERPLAPACK      "plapack"
112 #define MATSOLVERBAS          "bas"
113 
114 #define MATSOLVERBSTRM        "bstrm"
115 #define MATSOLVERSBSTRM       "sbstrm"
116 
117 /*E
118     MatFactorType - indicates what type of factorization is requested
119 
120     Level: beginner
121 
122    Any additions/changes here MUST also be made in include/finclude/petscmat.h
123 
124 .seealso: MatSolverPackage, MatGetFactor()
125 E*/
126 typedef enum {MAT_FACTOR_NONE, MAT_FACTOR_LU, MAT_FACTOR_CHOLESKY, MAT_FACTOR_ILU, MAT_FACTOR_ICC,MAT_FACTOR_ILUDT} MatFactorType;
127 extern const char *const MatFactorTypes[];
128 
129 extern PetscErrorCode  MatGetFactor(Mat,const MatSolverPackage,MatFactorType,Mat*);
130 extern PetscErrorCode  MatGetFactorAvailable(Mat,const MatSolverPackage,MatFactorType,PetscBool *);
131 extern PetscErrorCode  MatFactorGetSolverPackage(Mat,const MatSolverPackage*);
132 extern PetscErrorCode  MatGetFactorType(Mat,MatFactorType*);
133 
134 /* Logging support */
135 #define    MAT_FILE_CLASSID 1211216    /* used to indicate matrices in binary files */
136 extern PetscClassId  MAT_CLASSID;
137 extern PetscClassId  MAT_FDCOLORING_CLASSID;
138 extern PetscClassId  MAT_PARTITIONING_CLASSID;
139 extern PetscClassId  MAT_NULLSPACE_CLASSID;
140 extern PetscClassId  MATMFFD_CLASSID;
141 
142 /*E
143     MatReuse - Indicates if matrices obtained from a previous call to MatGetSubMatrices()
144      or MatGetSubMatrix() are to be reused to store the new matrix values. For MatConvert() is used to indicate
145      that the input matrix is to be replaced with the converted matrix.
146 
147     Level: beginner
148 
149    Any additions/changes here MUST also be made in include/finclude/petscmat.h
150 
151 .seealso: MatGetSubMatrices(), MatGetSubMatrix(), MatDestroyMatrices(), MatConvert()
152 E*/
153 typedef enum {MAT_INITIAL_MATRIX,MAT_REUSE_MATRIX,MAT_IGNORE_MATRIX} MatReuse;
154 
155 /*E
156     MatGetSubMatrixOption - Indicates if matrices obtained from a call to MatGetSubMatrices()
157      include the matrix values. Currently it is only used by MatGetSeqNonzerostructure().
158 
159     Level: beginner
160 
161 .seealso: MatGetSeqNonzerostructure()
162 E*/
163 typedef enum {MAT_DO_NOT_GET_VALUES,MAT_GET_VALUES} MatGetSubMatrixOption;
164 
165 extern PetscErrorCode  MatInitializePackage(const char[]);
166 
167 extern PetscErrorCode  MatCreate(MPI_Comm,Mat*);
168 PetscPolymorphicFunction(MatCreate,(MPI_Comm comm),(comm,&A),Mat,A)
169 PetscPolymorphicFunction(MatCreate,(),(PETSC_COMM_WORLD,&A),Mat,A)
170 extern PetscErrorCode  MatSetSizes(Mat,PetscInt,PetscInt,PetscInt,PetscInt);
171 extern PetscErrorCode  MatSetType(Mat,const MatType);
172 extern PetscErrorCode  MatSetFromOptions(Mat);
173 extern PetscErrorCode  MatSetUpPreallocation(Mat);
174 extern PetscErrorCode  MatRegisterAll(const char[]);
175 extern PetscErrorCode  MatRegister(const char[],const char[],const char[],PetscErrorCode(*)(Mat));
176 extern PetscErrorCode  MatRegisterBaseName(const char[],const char[],const char[]);
177 extern PetscErrorCode  MatSetOptionsPrefix(Mat,const char[]);
178 extern PetscErrorCode  MatAppendOptionsPrefix(Mat,const char[]);
179 extern PetscErrorCode  MatGetOptionsPrefix(Mat,const char*[]);
180 
181 /*MC
182    MatRegisterDynamic - Adds a new matrix type
183 
184    Synopsis:
185    PetscErrorCode MatRegisterDynamic(const char *name,const char *path,const char *name_create,PetscErrorCode (*routine_create)(Mat))
186 
187    Not Collective
188 
189    Input Parameters:
190 +  name - name of a new user-defined matrix type
191 .  path - path (either absolute or relative) the library containing this solver
192 .  name_create - name of routine to create method context
193 -  routine_create - routine to create method context
194 
195    Notes:
196    MatRegisterDynamic() may be called multiple times to add several user-defined solvers.
197 
198    If dynamic libraries are used, then the fourth input argument (routine_create)
199    is ignored.
200 
201    Sample usage:
202 .vb
203    MatRegisterDynamic("my_mat",/home/username/my_lib/lib/libO/solaris/mylib.a,
204                "MyMatCreate",MyMatCreate);
205 .ve
206 
207    Then, your solver can be chosen with the procedural interface via
208 $     MatSetType(Mat,"my_mat")
209    or at runtime via the option
210 $     -mat_type my_mat
211 
212    Level: advanced
213 
214    Notes: ${PETSC_ARCH} occuring in pathname will be replaced with appropriate values.
215          If your function is not being put into a shared library then use VecRegister() instead
216 
217 .keywords: Mat, register
218 
219 .seealso: MatRegisterAll(), MatRegisterDestroy()
220 
221 M*/
222 #if defined(PETSC_USE_DYNAMIC_LIBRARIES)
223 #define MatRegisterDynamic(a,b,c,d) MatRegister(a,b,c,0)
224 #else
225 #define MatRegisterDynamic(a,b,c,d) MatRegister(a,b,c,d)
226 #endif
227 
228 extern PetscBool  MatRegisterAllCalled;
229 extern PetscFList MatList;
230 extern PetscFList MatColoringList;
231 extern PetscFList MatPartitioningList;
232 
233 /*E
234     MatStructure - Indicates if the matrix has the same nonzero structure
235 
236     Level: beginner
237 
238    Any additions/changes here MUST also be made in include/finclude/petscmat.h
239 
240 .seealso: MatCopy(), KSPSetOperators(), PCSetOperators()
241 E*/
242 typedef enum {DIFFERENT_NONZERO_PATTERN,SUBSET_NONZERO_PATTERN,SAME_NONZERO_PATTERN,SAME_PRECONDITIONER} MatStructure;
243 
244 extern PetscErrorCode  MatCreateSeqDense(MPI_Comm,PetscInt,PetscInt,PetscScalar[],Mat*);
245 extern PetscErrorCode  MatCreateMPIDense(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,PetscScalar[],Mat*);
246 extern PetscErrorCode  MatCreateSeqAIJ(MPI_Comm,PetscInt,PetscInt,PetscInt,const PetscInt[],Mat*);
247 PetscPolymorphicFunction(MatCreateSeqAIJ,(PetscInt m,PetscInt n,PetscInt nz,const PetscInt nnz[]),(PETSC_COMM_SELF,m,n,nz,nnz,&A),Mat,A)
248 PetscPolymorphicFunction(MatCreateSeqAIJ,(PetscInt m,PetscInt n,PetscInt nz),(PETSC_COMM_SELF,m,n,nz,PETSC_NULL,&A),Mat,A)
249 PetscPolymorphicFunction(MatCreateSeqAIJ,(PetscInt m,PetscInt n,const PetscInt nnz[]),(PETSC_COMM_SELF,m,n,0,nnz,&A),Mat,A)
250 PetscPolymorphicFunction(MatCreateSeqAIJ,(PetscInt m,PetscInt n),(PETSC_COMM_SELF,m,n,0,PETSC_NULL,&A),Mat,A)
251 PetscPolymorphicSubroutine(MatCreateSeqAIJ,(PetscInt m,PetscInt n,PetscInt nz,Mat *A),(PETSC_COMM_SELF,m,n,nz,PETSC_NULL,A))
252 PetscPolymorphicSubroutine(MatCreateSeqAIJ,(PetscInt m,PetscInt n,const PetscInt nnz[],Mat *A),(PETSC_COMM_SELF,m,n,0,nnz,A))
253 PetscPolymorphicSubroutine(MatCreateSeqAIJ,(PetscInt m,PetscInt n,Mat *A),(PETSC_COMM_SELF,m,n,0,PETSC_NULL,A))
254 extern PetscErrorCode  MatCreateMPIAIJ(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt,const PetscInt[],PetscInt,const PetscInt[],Mat*);
255 PetscPolymorphicFunction(MatCreateMPIAIJ,(MPI_Comm comm,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,const PetscInt nnz[],PetscInt onz,const PetscInt onnz[]),(comm,m,n,M,N,nz,nnz,onz,onnz,&A),Mat,A)
256 PetscPolymorphicFunction(MatCreateMPIAIJ,(MPI_Comm comm,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,PetscInt nnz),(comm,m,n,M,N,nz,PETSC_NULL,nnz,PETSC_NULL,&A),Mat,A)
257 PetscPolymorphicFunction(MatCreateMPIAIJ,(MPI_Comm comm,PetscInt m,PetscInt n,PetscInt M,PetscInt N,const PetscInt nnz[],const PetscInt onz[]),(comm,m,n,M,N,0,nnz,0,onz,&A),Mat,A)
258 PetscPolymorphicFunction(MatCreateMPIAIJ,(MPI_Comm comm,PetscInt m,PetscInt n,PetscInt M,PetscInt N),(comm,m,n,M,N,0,PETSC_NULL,0,PETSC_NULL,&A),Mat,A)
259 PetscPolymorphicSubroutine(MatCreateMPIAIJ,(MPI_Comm comm,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,PetscInt nnz,Mat *A),(comm,m,n,M,N,nz,PETSC_NULL,nnz,PETSC_NULL,A))
260 PetscPolymorphicSubroutine(MatCreateMPIAIJ,(MPI_Comm comm,PetscInt m,PetscInt n,PetscInt M,PetscInt N,const PetscInt nnz[],const PetscInt onz[],Mat *A),(comm,m,n,M,N,0,nnz,0,onz,A))
261 PetscPolymorphicSubroutine(MatCreateMPIAIJ,(MPI_Comm comm,PetscInt m,PetscInt n,PetscInt M,PetscInt N,Mat *A),(comm,m,n,M,N,0,PETSC_NULL,0,PETSC_NULL,A))
262 PetscPolymorphicFunction(MatCreateMPIAIJ,(PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,const PetscInt nnz[],PetscInt onz,const PetscInt onnz[]),(PETSC_COMM_WORLD,m,n,M,N,nz,nnz,onz,onnz,&A),Mat,A)
263 PetscPolymorphicFunction(MatCreateMPIAIJ,(PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,PetscInt nnz),(PETSC_COMM_WORLD,m,n,M,N,nz,PETSC_NULL,nnz,PETSC_NULL,&A),Mat,A)
264 PetscPolymorphicFunction(MatCreateMPIAIJ,(PetscInt m,PetscInt n,PetscInt M,PetscInt N,const PetscInt nnz[],const PetscInt onz[]),(PETSC_COMM_WORLD,m,n,M,N,0,nnz,0,onz,&A),Mat,A)
265 PetscPolymorphicFunction(MatCreateMPIAIJ,(PetscInt m,PetscInt n,PetscInt M,PetscInt N),(PETSC_COMM_WORLD,m,n,M,N,0,PETSC_NULL,0,PETSC_NULL,&A),Mat,A)
266 PetscPolymorphicSubroutine(MatCreateMPIAIJ,(PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,PetscInt nnz,Mat *A),(PETSC_COMM_WORLD,m,n,M,N,nz,PETSC_NULL,nnz,PETSC_NULL,A))
267 PetscPolymorphicSubroutine(MatCreateMPIAIJ,(PetscInt m,PetscInt n,PetscInt M,PetscInt N,const PetscInt nnz[],const PetscInt onz[],Mat *A),(PETSC_COMM_WORLD,m,n,M,N,0,nnz,0,onz,A))
268 PetscPolymorphicSubroutine(MatCreateMPIAIJ,(PetscInt m,PetscInt n,PetscInt M,PetscInt N,Mat *A),(PETSC_COMM_WORLD,m,n,M,N,0,PETSC_NULL,0,PETSC_NULL,A))
269 extern PetscErrorCode  MatCreateMPIAIJWithArrays(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,const PetscInt[],const PetscInt[],const PetscScalar[],Mat *);
270 extern PetscErrorCode  MatCreateMPIAIJWithSplitArrays(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt[],PetscInt[],PetscScalar[],PetscInt[],PetscInt[],PetscScalar[],Mat*);
271 
272 extern PetscErrorCode  MatCreateSeqBAIJ(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,const PetscInt[],Mat*);
273 PetscPolymorphicFunction(MatCreateSeqBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt nz,const PetscInt nnz[]),(PETSC_COMM_SELF,bs,m,n,nz,nnz,&A),Mat,A)
274 PetscPolymorphicFunction(MatCreateSeqBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt nz),(PETSC_COMM_SELF,bs,m,n,nz,PETSC_NULL,&A),Mat,A)
275 PetscPolymorphicFunction(MatCreateSeqBAIJ,(PetscInt bs,PetscInt m,PetscInt n,const PetscInt nnz[]),(PETSC_COMM_SELF,bs,m,n,0,nnz,&A),Mat,A)
276 PetscPolymorphicFunction(MatCreateSeqBAIJ,(PetscInt bs,PetscInt m,PetscInt n),(PETSC_COMM_SELF,bs,m,n,0,PETSC_NULL,&A),Mat,A)
277 PetscPolymorphicSubroutine(MatCreateSeqBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt nz,Mat *A),(PETSC_COMM_SELF,bs,m,n,nz,PETSC_NULL,A))
278 PetscPolymorphicSubroutine(MatCreateSeqBAIJ,(PetscInt bs,PetscInt m,PetscInt n,const PetscInt nnz[],Mat *A),(PETSC_COMM_SELF,bs,m,n,0,nnz,A))
279 PetscPolymorphicSubroutine(MatCreateSeqBAIJ,(PetscInt bs,PetscInt m,PetscInt n,Mat *A),(PETSC_COMM_SELF,bs,m,n,0,PETSC_NULL,A))
280 extern PetscErrorCode  MatCreateMPIBAIJ(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt,const PetscInt[],PetscInt,const PetscInt[],Mat*);
281 PetscPolymorphicFunction(MatCreateMPIBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,const PetscInt nnz[],PetscInt onz,const PetscInt onnz[]),(comm,bs,m,n,M,N,nz,nnz,onz,onnz,&A),Mat,A)
282 PetscPolymorphicFunction(MatCreateMPIBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,PetscInt nnz),(comm,bs,m,n,M,N,nz,PETSC_NULL,nnz,PETSC_NULL,&A),Mat,A)
283 PetscPolymorphicFunction(MatCreateMPIBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,const PetscInt nnz[],const PetscInt onz[]),(comm,bs,m,n,M,N,0,nnz,0,onz,&A),Mat,A)
284 PetscPolymorphicFunction(MatCreateMPIBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N),(comm,bs,m,n,M,N,0,PETSC_NULL,0,PETSC_NULL,&A),Mat,A)
285 PetscPolymorphicSubroutine(MatCreateMPIBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,PetscInt nnz,Mat *A),(comm,bs,m,n,M,N,nz,PETSC_NULL,nnz,PETSC_NULL,A))
286 PetscPolymorphicSubroutine(MatCreateMPIBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,const PetscInt nnz[],const PetscInt onz[],Mat *A),(comm,bs,m,n,M,N,0,nnz,0,onz,A))
287 PetscPolymorphicSubroutine(MatCreateMPIBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,Mat *A),(comm,bs,m,n,M,N,0,PETSC_NULL,0,PETSC_NULL,A))
288 PetscPolymorphicFunction(MatCreateMPIBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,const PetscInt nnz[],PetscInt onz,const PetscInt onnz[]),(PETSC_COMM_WORLD,bs,m,n,M,N,nz,nnz,onz,onnz,&A),Mat,A)
289 PetscPolymorphicFunction(MatCreateMPIBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,PetscInt nnz),(PETSC_COMM_WORLD,bs,m,n,M,N,nz,PETSC_NULL,nnz,PETSC_NULL,&A),Mat,A)
290 PetscPolymorphicFunction(MatCreateMPIBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,const PetscInt nnz[],const PetscInt onz[]),(PETSC_COMM_WORLD,bs,m,n,M,N,0,nnz,0,onz,&A),Mat,A)
291 PetscPolymorphicFunction(MatCreateMPIBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N),(PETSC_COMM_WORLD,bs,m,n,M,N,0,PETSC_NULL,0,PETSC_NULL,&A),Mat,A)
292 PetscPolymorphicSubroutine(MatCreateMPIBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,PetscInt nnz,Mat *A),(PETSC_COMM_WORLD,bs,m,n,M,N,nz,PETSC_NULL,nnz,PETSC_NULL,A))
293 PetscPolymorphicSubroutine(MatCreateMPIBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,const PetscInt nnz[],const PetscInt onz[],Mat *A),(PETSC_COMM_WORLD,bs,m,n,M,N,0,nnz,0,onz,A))
294 PetscPolymorphicSubroutine(MatCreateMPIBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,Mat *A),(PETSC_COMM_WORLD,bs,m,n,M,N,0,PETSC_NULL,0,PETSC_NULL,A))
295 extern PetscErrorCode  MatCreateMPIBAIJWithArrays(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt,const PetscInt[],const PetscInt[],const PetscScalar[],Mat*);
296 
297 extern PetscErrorCode  MatCreateMPIAdj(MPI_Comm,PetscInt,PetscInt,PetscInt[],PetscInt[],PetscInt[],Mat*);
298 extern PetscErrorCode  MatCreateSeqSBAIJ(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,const PetscInt[],Mat*);
299 PetscPolymorphicFunction(MatCreateSeqSBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt nz,const PetscInt nnz[]),(PETSC_COMM_SELF,bs,m,n,nz,nnz,&A),Mat,A)
300 PetscPolymorphicFunction(MatCreateSeqSBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt nz),(PETSC_COMM_SELF,bs,m,n,nz,PETSC_NULL,&A),Mat,A)
301 PetscPolymorphicFunction(MatCreateSeqSBAIJ,(PetscInt bs,PetscInt m,PetscInt n,const PetscInt nnz[]),(PETSC_COMM_SELF,bs,m,n,0,nnz,&A),Mat,A)
302 PetscPolymorphicFunction(MatCreateSeqSBAIJ,(PetscInt bs,PetscInt m,PetscInt n),(PETSC_COMM_SELF,bs,m,n,0,PETSC_NULL,&A),Mat,A)
303 PetscPolymorphicSubroutine(MatCreateSeqSBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt nz,Mat *A),(PETSC_COMM_SELF,bs,m,n,nz,PETSC_NULL,A))
304 PetscPolymorphicSubroutine(MatCreateSeqSBAIJ,(PetscInt bs,PetscInt m,PetscInt n,const PetscInt nnz[],Mat *A),(PETSC_COMM_SELF,bs,m,n,0,nnz,A))
305 PetscPolymorphicSubroutine(MatCreateSeqSBAIJ,(PetscInt bs,PetscInt m,PetscInt n,Mat *A),(PETSC_COMM_SELF,bs,m,n,0,PETSC_NULL,A))
306 
307 extern PetscErrorCode  MatCreateMPISBAIJ(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt,const PetscInt[],PetscInt,const PetscInt[],Mat*);
308 PetscPolymorphicFunction(MatCreateMPISBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,const PetscInt nnz[],PetscInt onz,const PetscInt onnz[]),(comm,bs,m,n,M,N,nz,nnz,onz,onnz,&A),Mat,A)
309 PetscPolymorphicFunction(MatCreateMPISBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,PetscInt nnz),(comm,bs,m,n,M,N,nz,PETSC_NULL,nnz,PETSC_NULL,&A),Mat,A)
310 PetscPolymorphicFunction(MatCreateMPISBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,const PetscInt nnz[],const PetscInt onz[]),(comm,bs,m,n,M,N,0,nnz,0,onz,&A),Mat,A)
311 PetscPolymorphicFunction(MatCreateMPISBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N),(comm,bs,m,n,M,N,0,PETSC_NULL,0,PETSC_NULL,&A),Mat,A)
312 PetscPolymorphicSubroutine(MatCreateMPISBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,PetscInt nnz,Mat *A),(comm,bs,m,n,M,N,nz,PETSC_NULL,nnz,PETSC_NULL,A))
313 PetscPolymorphicSubroutine(MatCreateMPISBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,const PetscInt nnz[],const PetscInt onz[],Mat *A),(comm,bs,m,n,M,N,0,nnz,0,onz,A))
314 PetscPolymorphicSubroutine(MatCreateMPISBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,Mat *A),(comm,bs,m,n,M,N,0,PETSC_NULL,0,PETSC_NULL,A))
315 PetscPolymorphicFunction(MatCreateMPISBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,const PetscInt nnz[],PetscInt onz,const PetscInt onnz[]),(PETSC_COMM_WORLD,bs,m,n,M,N,nz,nnz,onz,onnz,&A),Mat,A)
316 PetscPolymorphicFunction(MatCreateMPISBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,PetscInt nnz),(PETSC_COMM_WORLD,bs,m,n,M,N,nz,PETSC_NULL,nnz,PETSC_NULL,&A),Mat,A)
317 PetscPolymorphicFunction(MatCreateMPISBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,const PetscInt nnz[],const PetscInt onz[]),(PETSC_COMM_WORLD,bs,m,n,M,N,0,nnz,0,onz,&A),Mat,A)
318 PetscPolymorphicFunction(MatCreateMPISBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N),(PETSC_COMM_WORLD,bs,m,n,M,N,0,PETSC_NULL,0,PETSC_NULL,&A),Mat,A)
319 PetscPolymorphicSubroutine(MatCreateMPISBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,PetscInt nnz,Mat *A),(PETSC_COMM_WORLD,bs,m,n,M,N,nz,PETSC_NULL,nnz,PETSC_NULL,A))
320 PetscPolymorphicSubroutine(MatCreateMPISBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,const PetscInt nnz[],const PetscInt onz[],Mat *A),(PETSC_COMM_WORLD,bs,m,n,M,N,0,nnz,0,onz,A))
321 PetscPolymorphicSubroutine(MatCreateMPISBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,Mat *A),(PETSC_COMM_WORLD,bs,m,n,M,N,0,PETSC_NULL,0,PETSC_NULL,A))
322 extern PetscErrorCode  MatCreateMPISBAIJWithArrays(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt,const PetscInt[],const PetscInt[],const PetscScalar[],Mat *);
323 extern PetscErrorCode  MatMPISBAIJSetPreallocationCSR(Mat,PetscInt,const PetscInt[],const PetscInt[],const PetscScalar[]);
324 
325 extern PetscErrorCode  MatCreateShell(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,void *,Mat*);
326 PetscPolymorphicFunction(MatCreateShell,(MPI_Comm comm,PetscInt m,PetscInt n,PetscInt M,PetscInt N,void *ctx),(comm,m,n,M,N,ctx,&A),Mat,A)
327 PetscPolymorphicFunction(MatCreateShell,(PetscInt m,PetscInt n,PetscInt M,PetscInt N,void *ctx),(PETSC_COMM_WORLD,m,n,M,N,ctx,&A),Mat,A)
328 extern PetscErrorCode  MatCreateAdic(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt,void (*)(void),Mat*);
329 extern PetscErrorCode  MatCreateNormal(Mat,Mat*);
330 PetscPolymorphicFunction(MatCreateNormal,(Mat mat),(mat,&A),Mat,A)
331 extern PetscErrorCode  MatCreateLRC(Mat,Mat,Mat,Mat*);
332 extern PetscErrorCode  MatCreateIS(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,ISLocalToGlobalMapping,Mat*);
333 extern PetscErrorCode  MatCreateSeqAIJCRL(MPI_Comm,PetscInt,PetscInt,PetscInt,const PetscInt[],Mat*);
334 extern PetscErrorCode  MatCreateMPIAIJCRL(MPI_Comm,PetscInt,PetscInt,PetscInt,const PetscInt[],PetscInt,const PetscInt[],Mat*);
335 
336 extern PetscErrorCode  MatCreateSeqBSTRM(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,const PetscInt[],Mat*);
337 extern PetscErrorCode  MatCreateMPIBSTRM(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt,const PetscInt[],PetscInt,const PetscInt[],Mat*);
338 extern PetscErrorCode  MatCreateSeqSBSTRM(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,const PetscInt[],Mat*);
339 extern PetscErrorCode  MatCreateMPISBSTRM(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt,const PetscInt[],PetscInt,const PetscInt[],Mat*);
340 
341 extern PetscErrorCode  MatCreateScatter(MPI_Comm,VecScatter,Mat*);
342 extern PetscErrorCode  MatScatterSetVecScatter(Mat,VecScatter);
343 extern PetscErrorCode  MatScatterGetVecScatter(Mat,VecScatter*);
344 extern PetscErrorCode  MatCreateBlockMat(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt*,Mat*);
345 extern PetscErrorCode  MatCompositeAddMat(Mat,Mat);
346 extern PetscErrorCode  MatCompositeMerge(Mat);
347 extern PetscErrorCode  MatCreateComposite(MPI_Comm,PetscInt,const Mat*,Mat*);
348 typedef enum {MAT_COMPOSITE_ADDITIVE,MAT_COMPOSITE_MULTIPLICATIVE} MatCompositeType;
349 extern PetscErrorCode  MatCompositeSetType(Mat,MatCompositeType);
350 
351 extern PetscErrorCode  MatCreateFFT(MPI_Comm,PetscInt,const PetscInt[],const MatType,Mat*);
352 extern PetscErrorCode  MatCreateSeqCUFFT(MPI_Comm,PetscInt,const PetscInt[],Mat*);
353 
354 extern PetscErrorCode  MatCreateTranspose(Mat,Mat*);
355 extern PetscErrorCode  MatCreateSubMatrix(Mat,IS,IS,Mat*);
356 extern PetscErrorCode  MatSubMatrixUpdate(Mat,Mat,IS,IS);
357 extern PetscErrorCode  MatCreateLocalRef(Mat,IS,IS,Mat*);
358 
359 extern PetscErrorCode  MatPythonSetType(Mat,const char[]);
360 
361 extern PetscErrorCode  MatSetUp(Mat);
362 extern PetscErrorCode  MatDestroy(Mat*);
363 
364 extern PetscErrorCode  MatConjugate(Mat);
365 extern PetscErrorCode  MatRealPart(Mat);
366 extern PetscErrorCode  MatImaginaryPart(Mat);
367 extern PetscErrorCode  MatGetDiagonalBlock(Mat,Mat*);
368 extern PetscErrorCode  MatGetTrace(Mat,PetscScalar*);
369 extern PetscErrorCode  MatInvertBlockDiagonal(Mat,PetscScalar **);
370 
371 /* ------------------------------------------------------------*/
372 extern PetscErrorCode  MatSetValues(Mat,PetscInt,const PetscInt[],PetscInt,const PetscInt[],const PetscScalar[],InsertMode);
373 extern PetscErrorCode  MatSetValuesBlocked(Mat,PetscInt,const PetscInt[],PetscInt,const PetscInt[],const PetscScalar[],InsertMode);
374 extern PetscErrorCode  MatSetValuesRow(Mat,PetscInt,const PetscScalar[]);
375 extern PetscErrorCode  MatSetValuesRowLocal(Mat,PetscInt,const PetscScalar[]);
376 
377 /*S
378      MatStencil - Data structure (C struct) for storing information about a single row or
379         column of a matrix as index on an associated grid.
380 
381    Level: beginner
382 
383   Concepts: matrix; linear operator
384 
385 .seealso:  MatSetValuesStencil(), MatSetStencil(), MatSetValuesBlockStencil()
386 S*/
387 typedef struct {
388   PetscInt k,j,i,c;
389 } MatStencil;
390 
391 extern PetscErrorCode  MatSetValuesStencil(Mat,PetscInt,const MatStencil[],PetscInt,const MatStencil[],const PetscScalar[],InsertMode);
392 extern PetscErrorCode  MatSetValuesBlockedStencil(Mat,PetscInt,const MatStencil[],PetscInt,const MatStencil[],const PetscScalar[],InsertMode);
393 extern PetscErrorCode  MatSetStencil(Mat,PetscInt,const PetscInt[],const PetscInt[],PetscInt);
394 
395 extern PetscErrorCode  MatSetColoring(Mat,ISColoring);
396 extern PetscErrorCode  MatSetValuesAdic(Mat,void*);
397 extern PetscErrorCode  MatSetValuesAdifor(Mat,PetscInt,void*);
398 
399 /*E
400     MatAssemblyType - Indicates if the matrix is now to be used, or if you plan
401      to continue to add values to it
402 
403     Level: beginner
404 
405 .seealso: MatAssemblyBegin(), MatAssemblyEnd()
406 E*/
407 typedef enum {MAT_FLUSH_ASSEMBLY=1,MAT_FINAL_ASSEMBLY=0} MatAssemblyType;
408 extern PetscErrorCode  MatAssemblyBegin(Mat,MatAssemblyType);
409 extern PetscErrorCode  MatAssemblyEnd(Mat,MatAssemblyType);
410 extern PetscErrorCode  MatAssembled(Mat,PetscBool *);
411 
412 
413 
414 /*E
415     MatOption - Options that may be set for a matrix and its behavior or storage
416 
417     Level: beginner
418 
419    Any additions/changes here MUST also be made in include/finclude/petscmat.h
420 
421 .seealso: MatSetOption()
422 E*/
423 typedef enum {MAT_ROW_ORIENTED,MAT_NEW_NONZERO_LOCATIONS,
424               MAT_SYMMETRIC,
425               MAT_STRUCTURALLY_SYMMETRIC,
426               MAT_NEW_DIAGONALS,MAT_IGNORE_OFF_PROC_ENTRIES,
427               MAT_NEW_NONZERO_LOCATION_ERR,
428               MAT_NEW_NONZERO_ALLOCATION_ERR,MAT_USE_HASH_TABLE,
429               MAT_KEEP_NONZERO_PATTERN,MAT_IGNORE_ZERO_ENTRIES,
430               MAT_USE_INODES,
431               MAT_HERMITIAN,
432               MAT_SYMMETRY_ETERNAL,
433               MAT_CHECK_COMPRESSED_ROW,
434               MAT_IGNORE_LOWER_TRIANGULAR,MAT_ERROR_LOWER_TRIANGULAR,
435               MAT_GETROW_UPPERTRIANGULAR,MAT_UNUSED_NONZERO_LOCATION_ERR,
436               MAT_SPD,MAT_NO_OFF_PROC_ENTRIES,MAT_NO_OFF_PROC_ZERO_ROWS,
437               NUM_MAT_OPTIONS} MatOption;
438 extern const char *MatOptions[];
439 extern PetscErrorCode  MatSetOption(Mat,MatOption,PetscBool );
440 extern PetscErrorCode  MatGetType(Mat,const MatType*);
441 PetscPolymorphicFunction(MatGetType,(Mat mat),(mat,&t),const MatType,t)
442 
443 extern PetscErrorCode  MatGetValues(Mat,PetscInt,const PetscInt[],PetscInt,const PetscInt[],PetscScalar[]);
444 extern PetscErrorCode  MatGetRow(Mat,PetscInt,PetscInt *,const PetscInt *[],const PetscScalar*[]);
445 extern PetscErrorCode  MatRestoreRow(Mat,PetscInt,PetscInt *,const PetscInt *[],const PetscScalar*[]);
446 extern PetscErrorCode  MatGetRowUpperTriangular(Mat);
447 extern PetscErrorCode  MatRestoreRowUpperTriangular(Mat);
448 extern PetscErrorCode  MatGetColumn(Mat,PetscInt,PetscInt *,const PetscInt *[],const PetscScalar*[]);
449 extern PetscErrorCode  MatRestoreColumn(Mat,PetscInt,PetscInt *,const PetscInt *[],const PetscScalar*[]);
450 extern PetscErrorCode  MatGetColumnVector(Mat,Vec,PetscInt);
451 extern PetscErrorCode  MatGetArray(Mat,PetscScalar *[]);
452 PetscPolymorphicFunction(MatGetArray,(Mat mat),(mat,&a),PetscScalar*,a)
453 extern PetscErrorCode  MatRestoreArray(Mat,PetscScalar *[]);
454 extern PetscErrorCode  MatGetBlockSize(Mat,PetscInt *);
455 PetscPolymorphicFunction(MatGetBlockSize,(Mat mat),(mat,&a),PetscInt,a)
456 extern PetscErrorCode  MatSetBlockSize(Mat,PetscInt);
457 
458 
459 extern PetscErrorCode  MatMult(Mat,Vec,Vec);
460 extern PetscErrorCode  MatMultDiagonalBlock(Mat,Vec,Vec);
461 extern PetscErrorCode  MatMultAdd(Mat,Vec,Vec,Vec);
462 PetscPolymorphicSubroutine(MatMultAdd,(Mat A,Vec x,Vec y),(A,x,y,y))
463 extern PetscErrorCode  MatMultTranspose(Mat,Vec,Vec);
464 extern PetscErrorCode  MatMultHermitianTranspose(Mat,Vec,Vec);
465 extern PetscErrorCode  MatIsTranspose(Mat,Mat,PetscReal,PetscBool *);
466 PetscPolymorphicFunction(MatIsTranspose,(Mat A,Mat B,PetscReal tol),(A,B,tol,&t),PetscBool ,t)
467 PetscPolymorphicFunction(MatIsTranspose,(Mat A,Mat B),(A,B,0,&t),PetscBool ,t)
468 extern PetscErrorCode  MatIsHermitianTranspose(Mat,Mat,PetscReal,PetscBool *);
469 extern PetscErrorCode  MatMultTransposeAdd(Mat,Vec,Vec,Vec);
470 extern PetscErrorCode  MatMultHermitianTransposeAdd(Mat,Vec,Vec,Vec);
471 PetscPolymorphicSubroutine(MatMultTransposeAdd,(Mat A,Vec x,Vec y),(A,x,y,y))
472 extern PetscErrorCode  MatMultConstrained(Mat,Vec,Vec);
473 extern PetscErrorCode  MatMultTransposeConstrained(Mat,Vec,Vec);
474 extern PetscErrorCode  MatMatSolve(Mat,Mat,Mat);
475 
476 /*E
477     MatDuplicateOption - Indicates if a duplicated sparse matrix should have
478   its numerical values copied over or just its nonzero structure.
479 
480     Level: beginner
481 
482    Any additions/changes here MUST also be made in include/finclude/petscmat.h
483 
484 $   MAT_SHARE_NONZERO_PATTERN - the i and j arrays in the new matrix will be shared with the original matrix
485 $                               this also triggers the MAT_DO_NOT_COPY_VALUES option. This is used when you
486 $                               have several matrices with the same nonzero pattern.
487 
488 .seealso: MatDuplicate()
489 E*/
490 typedef enum {MAT_DO_NOT_COPY_VALUES,MAT_COPY_VALUES,MAT_SHARE_NONZERO_PATTERN} MatDuplicateOption;
491 
492 extern PetscErrorCode  MatConvert(Mat,const MatType,MatReuse,Mat*);
493 PetscPolymorphicFunction(MatConvert,(Mat A,const MatType t),(A,t,MAT_INITIAL_MATRIX,&a),Mat,a)
494 extern PetscErrorCode  MatDuplicate(Mat,MatDuplicateOption,Mat*);
495 PetscPolymorphicFunction(MatDuplicate,(Mat A,MatDuplicateOption o),(A,o,&a),Mat,a)
496 PetscPolymorphicFunction(MatDuplicate,(Mat A),(A,MAT_COPY_VALUES,&a),Mat,a)
497 
498 
499 extern PetscErrorCode  MatCopy(Mat,Mat,MatStructure);
500 extern PetscErrorCode  MatView(Mat,PetscViewer);
501 extern PetscErrorCode  MatIsSymmetric(Mat,PetscReal,PetscBool *);
502 PetscPolymorphicFunction(MatIsSymmetric,(Mat A,PetscReal tol),(A,tol,&t),PetscBool ,t)
503 PetscPolymorphicFunction(MatIsSymmetric,(Mat A),(A,0,&t),PetscBool ,t)
504 extern PetscErrorCode  MatIsStructurallySymmetric(Mat,PetscBool *);
505 PetscPolymorphicFunction(MatIsStructurallySymmetric,(Mat A),(A,&t),PetscBool ,t)
506 extern PetscErrorCode  MatIsHermitian(Mat,PetscReal,PetscBool *);
507 PetscPolymorphicFunction(MatIsHermitian,(Mat A),(A,0,&t),PetscBool ,t)
508 extern PetscErrorCode  MatIsSymmetricKnown(Mat,PetscBool *,PetscBool *);
509 extern PetscErrorCode  MatIsHermitianKnown(Mat,PetscBool *,PetscBool *);
510 extern PetscErrorCode  MatMissingDiagonal(Mat,PetscBool  *,PetscInt *);
511 extern PetscErrorCode  MatLoad(Mat, PetscViewer);
512 
513 extern PetscErrorCode  MatGetRowIJ(Mat,PetscInt,PetscBool ,PetscBool ,PetscInt*,PetscInt *[],PetscInt *[],PetscBool  *);
514 extern PetscErrorCode  MatRestoreRowIJ(Mat,PetscInt,PetscBool ,PetscBool ,PetscInt *,PetscInt *[],PetscInt *[],PetscBool  *);
515 extern PetscErrorCode  MatGetColumnIJ(Mat,PetscInt,PetscBool ,PetscBool ,PetscInt*,PetscInt *[],PetscInt *[],PetscBool  *);
516 extern PetscErrorCode  MatRestoreColumnIJ(Mat,PetscInt,PetscBool ,PetscBool ,PetscInt *,PetscInt *[],PetscInt *[],PetscBool  *);
517 
518 /*S
519      MatInfo - Context of matrix information, used with MatGetInfo()
520 
521    In Fortran this is simply a double precision array of dimension MAT_INFO_SIZE
522 
523    Level: intermediate
524 
525   Concepts: matrix^nonzero information
526 
527 .seealso:  MatGetInfo(), MatInfoType
528 S*/
529 typedef struct {
530   PetscLogDouble block_size;                         /* block size */
531   PetscLogDouble nz_allocated,nz_used,nz_unneeded;   /* number of nonzeros */
532   PetscLogDouble memory;                             /* memory allocated */
533   PetscLogDouble assemblies;                         /* number of matrix assemblies called */
534   PetscLogDouble mallocs;                            /* number of mallocs during MatSetValues() */
535   PetscLogDouble fill_ratio_given,fill_ratio_needed; /* fill ratio for LU/ILU */
536   PetscLogDouble factor_mallocs;                     /* number of mallocs during factorization */
537 } MatInfo;
538 
539 /*E
540     MatInfoType - Indicates if you want information about the local part of the matrix,
541      the entire parallel matrix or the maximum over all the local parts.
542 
543     Level: beginner
544 
545    Any additions/changes here MUST also be made in include/finclude/petscmat.h
546 
547 .seealso: MatGetInfo(), MatInfo
548 E*/
549 typedef enum {MAT_LOCAL=1,MAT_GLOBAL_MAX=2,MAT_GLOBAL_SUM=3} MatInfoType;
550 extern PetscErrorCode  MatGetInfo(Mat,MatInfoType,MatInfo*);
551 extern PetscErrorCode  MatGetDiagonal(Mat,Vec);
552 extern PetscErrorCode  MatGetRowMax(Mat,Vec,PetscInt[]);
553 extern PetscErrorCode  MatGetRowMin(Mat,Vec,PetscInt[]);
554 extern PetscErrorCode  MatGetRowMaxAbs(Mat,Vec,PetscInt[]);
555 extern PetscErrorCode  MatGetRowMinAbs(Mat,Vec,PetscInt[]);
556 extern PetscErrorCode  MatGetRowSum(Mat,Vec);
557 extern PetscErrorCode  MatTranspose(Mat,MatReuse,Mat*);
558 PetscPolymorphicFunction(MatTranspose,(Mat A),(A,MAT_INITIAL_MATRIX,&t),Mat,t)
559 extern PetscErrorCode  MatHermitianTranspose(Mat,MatReuse,Mat*);
560 extern PetscErrorCode  MatPermute(Mat,IS,IS,Mat *);
561 PetscPolymorphicFunction(MatPermute,(Mat A,IS is1,IS is2),(A,is1,is2,&t),Mat,t)
562 extern PetscErrorCode  MatDiagonalScale(Mat,Vec,Vec);
563 extern PetscErrorCode  MatDiagonalSet(Mat,Vec,InsertMode);
564 extern PetscErrorCode  MatEqual(Mat,Mat,PetscBool *);
565 PetscPolymorphicFunction(MatEqual,(Mat A,Mat B),(A,B,&t),PetscBool ,t)
566 extern PetscErrorCode  MatMultEqual(Mat,Mat,PetscInt,PetscBool *);
567 extern PetscErrorCode  MatMultAddEqual(Mat,Mat,PetscInt,PetscBool *);
568 extern PetscErrorCode  MatMultTransposeEqual(Mat,Mat,PetscInt,PetscBool *);
569 extern PetscErrorCode  MatMultTransposeAddEqual(Mat,Mat,PetscInt,PetscBool *);
570 
571 extern PetscErrorCode  MatNorm(Mat,NormType,PetscReal *);
572 PetscPolymorphicFunction(MatNorm,(Mat A,NormType t),(A,t,&n),PetscReal,n)
573 extern PetscErrorCode  MatGetColumnNorms(Mat,NormType,PetscReal *);
574 extern PetscErrorCode  MatZeroEntries(Mat);
575 extern PetscErrorCode  MatZeroRows(Mat,PetscInt,const PetscInt [],PetscScalar,Vec,Vec);
576 extern PetscErrorCode  MatZeroRowsIS(Mat,IS,PetscScalar,Vec,Vec);
577 extern PetscErrorCode  MatZeroRowsStencil(Mat,PetscInt,const MatStencil [],PetscScalar,Vec,Vec);
578 extern PetscErrorCode  MatZeroRowsColumns(Mat,PetscInt,const PetscInt [],PetscScalar,Vec,Vec);
579 extern PetscErrorCode  MatZeroRowsColumnsIS(Mat,IS,PetscScalar,Vec,Vec);
580 
581 extern PetscErrorCode  MatUseScaledForm(Mat,PetscBool );
582 extern PetscErrorCode  MatScaleSystem(Mat,Vec,Vec);
583 extern PetscErrorCode  MatUnScaleSystem(Mat,Vec,Vec);
584 
585 extern PetscErrorCode  MatGetSize(Mat,PetscInt*,PetscInt*);
586 extern PetscErrorCode  MatGetLocalSize(Mat,PetscInt*,PetscInt*);
587 extern PetscErrorCode  MatGetOwnershipRange(Mat,PetscInt*,PetscInt*);
588 extern PetscErrorCode  MatGetOwnershipRanges(Mat,const PetscInt**);
589 extern PetscErrorCode  MatGetOwnershipRangeColumn(Mat,PetscInt*,PetscInt*);
590 extern PetscErrorCode  MatGetOwnershipRangesColumn(Mat,const PetscInt**);
591 
592 extern PetscErrorCode  MatGetSubMatrices(Mat,PetscInt,const IS[],const IS[],MatReuse,Mat *[]);
593 extern PetscErrorCode  MatGetSubMatricesParallel(Mat,PetscInt,const IS[],const IS[],MatReuse,Mat *[]);
594 extern PetscErrorCode  MatDestroyMatrices(PetscInt,Mat *[]);
595 extern PetscErrorCode  MatGetSubMatrix(Mat,IS,IS,MatReuse,Mat *);
596 extern PetscErrorCode  MatGetLocalSubMatrix(Mat,IS,IS,Mat*);
597 extern PetscErrorCode  MatRestoreLocalSubMatrix(Mat,IS,IS,Mat*);
598 extern PetscErrorCode  MatGetSeqNonzeroStructure(Mat,Mat*);
599 extern PetscErrorCode  MatDestroySeqNonzeroStructure(Mat*);
600 
601 extern PetscErrorCode  MatMerge(MPI_Comm,Mat,PetscInt,MatReuse,Mat*);
602 extern PetscErrorCode  MatMerge_SeqsToMPI(MPI_Comm,Mat,PetscInt,PetscInt,MatReuse,Mat*);
603 extern PetscErrorCode  MatMerge_SeqsToMPISymbolic(MPI_Comm,Mat,PetscInt,PetscInt,Mat*);
604 extern PetscErrorCode  MatMerge_SeqsToMPINumeric(Mat,Mat);
605 extern PetscErrorCode  MatMPIAIJGetLocalMat(Mat,MatReuse,Mat*);
606 extern PetscErrorCode  MatMPIAIJGetLocalMatCondensed(Mat,MatReuse,IS*,IS*,Mat*);
607 extern PetscErrorCode  MatGetBrowsOfAcols(Mat,Mat,MatReuse,IS*,IS*,PetscInt*,Mat*);
608 extern PetscErrorCode  MatGetBrowsOfAoCols(Mat,Mat,MatReuse,PetscInt**,PetscInt**,MatScalar**,Mat*);
609 #if defined (PETSC_USE_CTABLE)
610 #include "petscctable.h"
611 extern PetscErrorCode  MatGetCommunicationStructs(Mat, Vec *, PetscTable *, VecScatter *);
612 #else
613 extern PetscErrorCode  MatGetCommunicationStructs(Mat, Vec *, PetscInt *[], VecScatter *);
614 #endif
615 extern PetscErrorCode  MatGetGhosts(Mat, PetscInt *,const PetscInt *[]);
616 
617 extern PetscErrorCode  MatIncreaseOverlap(Mat,PetscInt,IS[],PetscInt);
618 
619 extern PetscErrorCode  MatMatMult(Mat,Mat,MatReuse,PetscReal,Mat*);
620 extern PetscErrorCode  MatMatMultSymbolic(Mat,Mat,PetscReal,Mat*);
621 extern PetscErrorCode  MatMatMultNumeric(Mat,Mat,Mat);
622 
623 extern PetscErrorCode  MatPtAP(Mat,Mat,MatReuse,PetscReal,Mat*);
624 extern PetscErrorCode  MatPtAPSymbolic(Mat,Mat,PetscReal,Mat*);
625 extern PetscErrorCode  MatPtAPNumeric(Mat,Mat,Mat);
626 
627 extern PetscErrorCode  MatMatMultTranspose(Mat,Mat,MatReuse,PetscReal,Mat*);
628 extern PetscErrorCode  MatMatMultTransposeSymbolic(Mat,Mat,PetscReal,Mat*);
629 extern PetscErrorCode  MatMatMultTransposeNumeric(Mat,Mat,Mat);
630 
631 extern PetscErrorCode  MatAXPY(Mat,PetscScalar,Mat,MatStructure);
632 extern PetscErrorCode  MatAYPX(Mat,PetscScalar,Mat,MatStructure);
633 
634 extern PetscErrorCode  MatScale(Mat,PetscScalar);
635 extern PetscErrorCode  MatShift(Mat,PetscScalar);
636 
637 extern PetscErrorCode  MatSetLocalToGlobalMapping(Mat,ISLocalToGlobalMapping,ISLocalToGlobalMapping);
638 extern PetscErrorCode  MatSetLocalToGlobalMappingBlock(Mat,ISLocalToGlobalMapping,ISLocalToGlobalMapping);
639 extern PetscErrorCode  MatGetLocalToGlobalMapping(Mat,ISLocalToGlobalMapping*,ISLocalToGlobalMapping*);
640 extern PetscErrorCode  MatGetLocalToGlobalMappingBlock(Mat,ISLocalToGlobalMapping*,ISLocalToGlobalMapping*);
641 extern PetscErrorCode  MatZeroRowsLocal(Mat,PetscInt,const PetscInt [],PetscScalar,Vec,Vec);
642 extern PetscErrorCode  MatZeroRowsLocalIS(Mat,IS,PetscScalar,Vec,Vec);
643 extern PetscErrorCode  MatZeroRowsColumnsLocal(Mat,PetscInt,const PetscInt [],PetscScalar,Vec,Vec);
644 extern PetscErrorCode  MatZeroRowsColumnsLocalIS(Mat,IS,PetscScalar,Vec,Vec);
645 extern PetscErrorCode  MatSetValuesLocal(Mat,PetscInt,const PetscInt[],PetscInt,const PetscInt[],const PetscScalar[],InsertMode);
646 extern PetscErrorCode  MatSetValuesBlockedLocal(Mat,PetscInt,const PetscInt[],PetscInt,const PetscInt[],const PetscScalar[],InsertMode);
647 
648 extern PetscErrorCode  MatStashSetInitialSize(Mat,PetscInt,PetscInt);
649 extern PetscErrorCode  MatStashGetInfo(Mat,PetscInt*,PetscInt*,PetscInt*,PetscInt*);
650 
651 extern PetscErrorCode  MatInterpolate(Mat,Vec,Vec);
652 extern PetscErrorCode  MatInterpolateAdd(Mat,Vec,Vec,Vec);
653 PetscPolymorphicSubroutine(MatInterpolateAdd,(Mat A,Vec x,Vec y),(A,x,y,y))
654 extern PetscErrorCode  MatRestrict(Mat,Vec,Vec);
655 extern PetscErrorCode  MatGetVecs(Mat,Vec*,Vec*);
656 extern PetscErrorCode  MatGetRedundantMatrix(Mat,PetscInt,MPI_Comm,PetscInt,MatReuse,Mat*);
657 extern PetscErrorCode  MatGetMultiProcBlock(Mat,MPI_Comm,Mat*);
658 extern PetscErrorCode  MatFindZeroDiagonals(Mat,IS*);
659 
660 /*MC
661    MatSetValue - Set a single entry into a matrix.
662 
663    Not collective
664 
665    Input Parameters:
666 +  m - the matrix
667 .  row - the row location of the entry
668 .  col - the column location of the entry
669 .  value - the value to insert
670 -  mode - either INSERT_VALUES or ADD_VALUES
671 
672    Notes:
673    For efficiency one should use MatSetValues() and set several or many
674    values simultaneously if possible.
675 
676    Level: beginner
677 
678 .seealso: MatSetValues(), MatSetValueLocal()
679 M*/
680 PETSC_STATIC_INLINE PetscErrorCode MatSetValue(Mat v,PetscInt i,PetscInt j,PetscScalar va,InsertMode mode) {return MatSetValues(v,1,&i,1,&j,&va,mode);}
681 
682 PETSC_STATIC_INLINE PetscErrorCode MatGetValue(Mat v,PetscInt i,PetscInt j,PetscScalar *va) {return MatGetValues(v,1,&i,1,&j,va);}
683 
684 PETSC_STATIC_INLINE PetscErrorCode MatSetValueLocal(Mat v,PetscInt i,PetscInt j,PetscScalar va,InsertMode mode) {return MatSetValuesLocal(v,1,&i,1,&j,&va,mode);}
685 
686 extern PetscErrorCode MatSeqAIJSetValuesBatch(Mat, PetscInt, PetscInt, PetscInt *, PetscScalar *);
687 extern PetscErrorCode MatMPIAIJSetValuesBatch(Mat, PetscInt, PetscInt, PetscInt *, PetscScalar *);
688 
689 /*MC
690    MatPreallocateInitialize - Begins the block of code that will count the number of nonzeros per
691        row in a matrix providing the data that one can use to correctly preallocate the matrix.
692 
693    Synopsis:
694    PetscErrorCode MatPreallocateInitialize(MPI_Comm comm, PetscInt nrows, PetscInt ncols, PetscInt *dnz, PetscInt *onz)
695 
696    Collective on MPI_Comm
697 
698    Input Parameters:
699 +  comm - the communicator that will share the eventually allocated matrix
700 .  nrows - the number of LOCAL rows in the matrix
701 -  ncols - the number of LOCAL columns in the matrix
702 
703    Output Parameters:
704 +  dnz - the array that will be passed to the matrix preallocation routines
705 -  ozn - the other array passed to the matrix preallocation routines
706 
707 
708    Level: intermediate
709 
710    Notes:
711     See the <A href="../../docs/manual.pdf#nameddest=ch_performance">Hints for Performance Improvment</A> chapter in the users manual for more details.
712 
713    Do not malloc or free dnz and onz, that is handled internally by these routines
714 
715    Use MatPreallocateInitializeSymmetric() for symmetric matrices (MPISBAIJ matrices)
716 
717    This is a MACRO not a function because it has a leading { that is closed by PetscPreallocateFinalize().
718 
719   Concepts: preallocation^Matrix
720 
721 .seealso: MatPreallocateFinalize(), MatPreallocateSet(), MatPreallocateSymmetricSet(), MatPreallocateSetLocal(),
722           MatPreallocateInitializeSymmetric(), MatPreallocateSymmetricSetLocal()
723 M*/
724 #define MatPreallocateInitialize(comm,nrows,ncols,dnz,onz) 0; \
725 { \
726   PetscErrorCode _4_ierr; PetscInt __nrows = (nrows),__ctmp = (ncols),__rstart,__start,__end; \
727   _4_ierr = PetscMalloc2(__nrows,PetscInt,&dnz,__nrows,PetscInt,&onz);CHKERRQ(_4_ierr); \
728   _4_ierr = PetscMemzero(dnz,__nrows*sizeof(PetscInt));CHKERRQ(_4_ierr);\
729   _4_ierr = PetscMemzero(onz,__nrows*sizeof(PetscInt));CHKERRQ(_4_ierr);\
730   _4_ierr = MPI_Scan(&__ctmp,&__end,1,MPIU_INT,MPI_SUM,comm);CHKERRQ(_4_ierr); __start = __end - __ctmp;\
731   _4_ierr = MPI_Scan(&__nrows,&__rstart,1,MPIU_INT,MPI_SUM,comm);CHKERRQ(_4_ierr); __rstart = __rstart - __nrows;
732 
733 /*MC
734    MatPreallocateSymmetricInitialize - Begins the block of code that will count the number of nonzeros per
735        row in a matrix providing the data that one can use to correctly preallocate the matrix.
736 
737    Synopsis:
738    PetscErrorCode MatPreallocateSymmetricInitialize(MPI_Comm comm, PetscInt nrows, PetscInt ncols, PetscInt *dnz, PetscInt *onz)
739 
740    Collective on MPI_Comm
741 
742    Input Parameters:
743 +  comm - the communicator that will share the eventually allocated matrix
744 .  nrows - the number of LOCAL rows in the matrix
745 -  ncols - the number of LOCAL columns in the matrix
746 
747    Output Parameters:
748 +  dnz - the array that will be passed to the matrix preallocation routines
749 -  ozn - the other array passed to the matrix preallocation routines
750 
751 
752    Level: intermediate
753 
754    Notes:
755     See the <A href="../../docs/manual.pdf#nameddest=ch_performance">Hints for Performance Improvment</A> chapter in the users manual for more details.
756 
757    Do not malloc or free dnz and onz, that is handled internally by these routines
758 
759    This is a MACRO not a function because it has a leading { that is closed by PetscPreallocateFinalize().
760 
761   Concepts: preallocation^Matrix
762 
763 .seealso: MatPreallocateFinalize(), MatPreallocateSet(), MatPreallocateSymmetricSet(), MatPreallocateSetLocal(),
764           MatPreallocateInitialize(), MatPreallocateSymmetricSetLocal()
765 M*/
766 #define MatPreallocateSymmetricInitialize(comm,nrows,ncols,dnz,onz) 0; \
767 { \
768   PetscErrorCode _4_ierr; PetscInt __nrows = (nrows),__ctmp = (ncols),__rstart,__end; \
769   _4_ierr = PetscMalloc2(__nrows,PetscInt,&dnz,__nrows,PetscInt,&onz);CHKERRQ(_4_ierr); \
770   _4_ierr = PetscMemzero(dnz,__nrows*sizeof(PetscInt));CHKERRQ(_4_ierr);\
771   _4_ierr = PetscMemzero(onz,__nrows*sizeof(PetscInt));CHKERRQ(_4_ierr);\
772   _4_ierr = MPI_Scan(&__ctmp,&__end,1,MPIU_INT,MPI_SUM,comm);CHKERRQ(_4_ierr);\
773   _4_ierr = MPI_Scan(&__nrows,&__rstart,1,MPIU_INT,MPI_SUM,comm);CHKERRQ(_4_ierr); __rstart = __rstart - __nrows;
774 
775 /*MC
776    MatPreallocateSetLocal - Indicates the locations (rows and columns) in the matrix where nonzeros will be
777        inserted using a local number of the rows and columns
778 
779    Synopsis:
780    PetscErrorCode MatPreallocateSetLocal(ISLocalToGlobalMappping map,PetscInt nrows, PetscInt *rows,PetscInt ncols, PetscInt *cols,PetscInt *dnz, PetscInt *onz)
781 
782    Not Collective
783 
784    Input Parameters:
785 +  map - the row mapping from local numbering to global numbering
786 .  nrows - the number of rows indicated
787 .  rows - the indices of the rows
788 .  cmap - the column mapping from local to global numbering
789 .  ncols - the number of columns in the matrix
790 .  cols - the columns indicated
791 .  dnz - the array that will be passed to the matrix preallocation routines
792 -  ozn - the other array passed to the matrix preallocation routines
793 
794 
795    Level: intermediate
796 
797    Notes:
798     See the <A href="../../docs/manual.pdf#nameddest=ch_performance">Hints for Performance Improvment</A> chapter in the users manual for more details.
799 
800    Do not malloc or free dnz and onz, that is handled internally by these routines
801 
802   Concepts: preallocation^Matrix
803 
804 .seealso: MatPreallocateFinalize(), MatPreallocateSet(), MatPreallocateSymmetricSet(), MatPreallocateInitialize(),
805           MatPreallocateInitialize(), MatPreallocateSymmetricSetLocal()
806 M*/
807 #define MatPreallocateSetLocal(rmap,nrows,rows,cmap,ncols,cols,dnz,onz) 0; \
808 {\
809   PetscInt __l;\
810   _4_ierr = ISLocalToGlobalMappingApply(rmap,nrows,rows,rows);CHKERRQ(_4_ierr);\
811   _4_ierr = ISLocalToGlobalMappingApply(cmap,ncols,cols,cols);CHKERRQ(_4_ierr);\
812   for (__l=0;__l<nrows;__l++) {\
813     _4_ierr = MatPreallocateSet((rows)[__l],ncols,cols,dnz,onz);CHKERRQ(_4_ierr);\
814   }\
815 }
816 
817 /*MC
818    MatPreallocateSymmetricSetLocal - Indicates the locations (rows and columns) in the matrix where nonzeros will be
819        inserted using a local number of the rows and columns
820 
821    Synopsis:
822    PetscErrorCode MatPreallocateSymmetricSetLocal(ISLocalToGlobalMappping map,PetscInt nrows, PetscInt *rows,PetscInt ncols, PetscInt *cols,PetscInt *dnz, PetscInt *onz)
823 
824    Not Collective
825 
826    Input Parameters:
827 +  map - the mapping between local numbering and global numbering
828 .  nrows - the number of rows indicated
829 .  rows - the indices of the rows
830 .  ncols - the number of columns in the matrix
831 .  cols - the columns indicated
832 .  dnz - the array that will be passed to the matrix preallocation routines
833 -  ozn - the other array passed to the matrix preallocation routines
834 
835 
836    Level: intermediate
837 
838    Notes:
839     See the <A href="../../docs/manual.pdf#nameddest=ch_performance">Hints for Performance Improvment</A> chapter in the users manual for more details.
840 
841    Do not malloc or free dnz and onz that is handled internally by these routines
842 
843   Concepts: preallocation^Matrix
844 
845 .seealso: MatPreallocateFinalize(), MatPreallocateSet(), MatPreallocateSymmetricSet(), MatPreallocateInitialize(),
846           MatPreallocateInitialize(), MatPreallocateSymmetricSetLocal(), MatPreallocateSetLocal()
847 M*/
848 #define MatPreallocateSymmetricSetLocal(map,nrows,rows,ncols,cols,dnz,onz) 0;\
849 {\
850   PetscInt __l;\
851   _4_ierr = ISLocalToGlobalMappingApply(map,nrows,rows,rows);CHKERRQ(_4_ierr);\
852   _4_ierr = ISLocalToGlobalMappingApply(map,ncols,cols,cols);CHKERRQ(_4_ierr);\
853   for (__l=0;__l<nrows;__l++) {\
854     _4_ierr = MatPreallocateSymmetricSet((rows)[__l],ncols,cols,dnz,onz);CHKERRQ(_4_ierr);\
855   }\
856 }
857 
858 /*MC
859    MatPreallocateSet - Indicates the locations (rows and columns) in the matrix where nonzeros will be
860        inserted using a local number of the rows and columns
861 
862    Synopsis:
863    PetscErrorCode MatPreallocateSet(PetscInt nrows, PetscInt *rows,PetscInt ncols, PetscInt *cols,PetscInt *dnz, PetscInt *onz)
864 
865    Not Collective
866 
867    Input Parameters:
868 +  row - the row
869 .  ncols - the number of columns in the matrix
870 -  cols - the columns indicated
871 
872    Output Parameters:
873 +  dnz - the array that will be passed to the matrix preallocation routines
874 -  ozn - the other array passed to the matrix preallocation routines
875 
876 
877    Level: intermediate
878 
879    Notes:
880     See the <A href="../../docs/manual.pdf#nameddest=ch_performance">Hints for Performance Improvment</A> chapter in the users manual for more details.
881 
882    Do not malloc or free dnz and onz that is handled internally by these routines
883 
884    This is a MACRO not a function because it uses variables declared in MatPreallocateInitialize().
885 
886   Concepts: preallocation^Matrix
887 
888 .seealso: MatPreallocateFinalize(), MatPreallocateSet(), MatPreallocateSymmetricSet(), MatPreallocateInitialize(),
889           MatPreallocateInitialize(), MatPreallocateSymmetricSetLocal(), MatPreallocateSetLocal()
890 M*/
891 #define MatPreallocateSet(row,nc,cols,dnz,onz) 0;\
892 { PetscInt __i; \
893   if (row < __rstart) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Trying to set preallocation for row %D less than first local row %D",row,__rstart);\
894   if (row >= __rstart+__nrows) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Trying to set preallocation for row %D greater than last local row %D",row,__rstart+__nrows-1);\
895   for (__i=0; __i<nc; __i++) {\
896     if ((cols)[__i] < __start || (cols)[__i] >= __end) onz[row - __rstart]++; \
897     else dnz[row - __rstart]++;\
898   }\
899 }
900 
901 /*MC
902    MatPreallocateSymmetricSet - Indicates the locations (rows and columns) in the matrix where nonzeros will be
903        inserted using a local number of the rows and columns
904 
905    Synopsis:
906    PetscErrorCode MatPreallocateSymmetricSet(PetscInt nrows, PetscInt *rows,PetscInt ncols, PetscInt *cols,PetscInt *dnz, PetscInt *onz)
907 
908    Not Collective
909 
910    Input Parameters:
911 +  nrows - the number of rows indicated
912 .  rows - the indices of the rows
913 .  ncols - the number of columns in the matrix
914 .  cols - the columns indicated
915 .  dnz - the array that will be passed to the matrix preallocation routines
916 -  ozn - the other array passed to the matrix preallocation routines
917 
918 
919    Level: intermediate
920 
921    Notes:
922     See the <A href="../../docs/manual.pdf#nameddest=ch_performance">Hints for Performance Improvment</A> chapter in the users manual for more details.
923 
924    Do not malloc or free dnz and onz that is handled internally by these routines
925 
926    This is a MACRO not a function because it uses variables declared in MatPreallocateInitialize().
927 
928   Concepts: preallocation^Matrix
929 
930 .seealso: MatPreallocateFinalize(), MatPreallocateSet(), MatPreallocateSymmetricSet(), MatPreallocateInitialize(),
931           MatPreallocateInitialize(), MatPreallocateSymmetricSetLocal(), MatPreallocateSetLocal()
932 M*/
933 #define MatPreallocateSymmetricSet(row,nc,cols,dnz,onz) 0;\
934 { PetscInt __i; \
935   for (__i=0; __i<nc; __i++) {\
936     if (cols[__i] >= __end) onz[row - __rstart]++; \
937     else if (cols[__i] >= row) dnz[row - __rstart]++;\
938   }\
939 }
940 
941 /*MC
942    MatPreallocateLocation -  An alternative to MatPreallocationSet() that puts the nonzero locations into the matrix if it exists
943 
944    Synopsis:
945    PetscErrorCode MatPreallocateLocations(Mat A,PetscInt row,PetscInt ncols,PetscInt *cols,PetscInt *dnz,PetscInt *onz)
946 
947    Not Collective
948 
949    Input Parameters:
950 .  A - matrix
951 .  row - row where values exist (must be local to this process)
952 .  ncols - number of columns
953 .  cols - columns with nonzeros
954 .  dnz - the array that will be passed to the matrix preallocation routines
955 -  ozn - the other array passed to the matrix preallocation routines
956 
957 
958    Level: intermediate
959 
960    Notes:
961     See the <A href="../../docs/manual.pdf#nameddest=ch_performance">Hints for Performance Improvment</A> chapter in the users manual for more details.
962 
963    Do not malloc or free dnz and onz that is handled internally by these routines
964 
965    This is a MACRO not a function because it uses a bunch of variables private to the MatPreallocation.... routines.
966 
967   Concepts: preallocation^Matrix
968 
969 .seealso: MatPreallocateInitialize(), MatPreallocateSet(), MatPreallocateSymmetricSet(), MatPreallocateSetLocal(),
970           MatPreallocateSymmetricInitialize(), MatPreallocateSymmetricSetLocal()
971 M*/
972 #define MatPreallocateLocation(A,row,ncols,cols,dnz,onz) 0;if (A) {ierr = MatSetValues(A,1,&row,ncols,cols,PETSC_NULL,INSERT_VALUES);CHKERRQ(ierr);} else {ierr =  MatPreallocateSet(row,ncols,cols,dnz,onz);CHKERRQ(ierr);}
973 
974 
975 /*MC
976    MatPreallocateFinalize - Ends the block of code that will count the number of nonzeros per
977        row in a matrix providing the data that one can use to correctly preallocate the matrix.
978 
979    Synopsis:
980    PetscErrorCode MatPreallocateFinalize(PetscInt *dnz, PetscInt *onz)
981 
982    Collective on MPI_Comm
983 
984    Input Parameters:
985 +  dnz - the array that was be passed to the matrix preallocation routines
986 -  ozn - the other array passed to the matrix preallocation routines
987 
988 
989    Level: intermediate
990 
991    Notes:
992     See the <A href="../../docs/manual.pdf#nameddest=ch_performance">Hints for Performance Improvment</A> chapter in the users manual for more details.
993 
994    Do not malloc or free dnz and onz that is handled internally by these routines
995 
996    This is a MACRO not a function because it closes the { started in MatPreallocateInitialize().
997 
998   Concepts: preallocation^Matrix
999 
1000 .seealso: MatPreallocateInitialize(), MatPreallocateSet(), MatPreallocateSymmetricSet(), MatPreallocateSetLocal(),
1001           MatPreallocateSymmetricInitialize(), MatPreallocateSymmetricSetLocal()
1002 M*/
1003 #define MatPreallocateFinalize(dnz,onz) 0;_4_ierr = PetscFree2(dnz,onz);CHKERRQ(_4_ierr);}
1004 
1005 
1006 
1007 /* Routines unique to particular data structures */
1008 extern PetscErrorCode  MatShellGetContext(Mat,void *);
1009 PetscPolymorphicFunction(MatShellGetContext,(Mat A),(A,&t),void*,t)
1010 
1011 extern PetscErrorCode  MatInodeAdjustForInodes(Mat,IS*,IS*);
1012 extern PetscErrorCode  MatInodeGetInodeSizes(Mat,PetscInt *,PetscInt *[],PetscInt *);
1013 
1014 extern PetscErrorCode  MatSeqAIJSetColumnIndices(Mat,PetscInt[]);
1015 extern PetscErrorCode  MatSeqBAIJSetColumnIndices(Mat,PetscInt[]);
1016 extern PetscErrorCode  MatCreateSeqAIJWithArrays(MPI_Comm,PetscInt,PetscInt,PetscInt[],PetscInt[],PetscScalar[],Mat*);
1017 extern PetscErrorCode  MatCreateSeqBAIJWithArrays(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt[],PetscInt[],PetscScalar[],Mat*);
1018 extern PetscErrorCode  MatCreateSeqSBAIJWithArrays(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt[],PetscInt[],PetscScalar[],Mat*);
1019 
1020 #define MAT_SKIP_ALLOCATION -4
1021 
1022 extern PetscErrorCode  MatSeqBAIJSetPreallocation(Mat,PetscInt,PetscInt,const PetscInt[]);
1023 PetscPolymorphicSubroutine(MatSeqBAIJSetPreallocation,(Mat A,PetscInt bs,const PetscInt nnz[]),(A,bs,0,nnz))
1024 extern PetscErrorCode  MatSeqSBAIJSetPreallocation(Mat,PetscInt,PetscInt,const PetscInt[]);
1025 PetscPolymorphicSubroutine(MatSeqSBAIJSetPreallocation,(Mat A,PetscInt bs,const PetscInt nnz[]),(A,bs,0,nnz))
1026 extern PetscErrorCode  MatSeqAIJSetPreallocation(Mat,PetscInt,const PetscInt[]);
1027 PetscPolymorphicSubroutine(MatSeqAIJSetPreallocation,(Mat A,const PetscInt nnz[]),(A,0,nnz))
1028 
1029 extern PetscErrorCode  MatMPIBAIJSetPreallocation(Mat,PetscInt,PetscInt,const PetscInt[],PetscInt,const PetscInt[]);
1030 PetscPolymorphicSubroutine(MatMPIBAIJSetPreallocation,(Mat A,PetscInt bs,const PetscInt nnz[],const PetscInt onz[]),(A,bs,0,nnz,0,onz))
1031 extern PetscErrorCode  MatMPISBAIJSetPreallocation(Mat,PetscInt,PetscInt,const PetscInt[],PetscInt,const PetscInt[]);
1032 extern PetscErrorCode  MatMPIAIJSetPreallocation(Mat,PetscInt,const PetscInt[],PetscInt,const PetscInt[]);
1033 extern PetscErrorCode  MatSeqAIJSetPreallocationCSR(Mat,const PetscInt [],const PetscInt [],const PetscScalar []);
1034 extern PetscErrorCode  MatSeqBAIJSetPreallocationCSR(Mat,PetscInt,const PetscInt[],const PetscInt[],const PetscScalar[]);
1035 extern PetscErrorCode  MatMPIAIJSetPreallocationCSR(Mat,const PetscInt[],const PetscInt[],const PetscScalar[]);
1036 extern PetscErrorCode  MatMPIBAIJSetPreallocationCSR(Mat,PetscInt,const PetscInt[],const PetscInt[],const PetscScalar[]);
1037 extern PetscErrorCode  MatMPIAdjSetPreallocation(Mat,PetscInt[],PetscInt[],PetscInt[]);
1038 extern PetscErrorCode  MatMPIDenseSetPreallocation(Mat,PetscScalar[]);
1039 extern PetscErrorCode  MatSeqDenseSetPreallocation(Mat,PetscScalar[]);
1040 extern PetscErrorCode  MatMPIAIJGetSeqAIJ(Mat,Mat*,Mat*,PetscInt*[]);
1041 extern PetscErrorCode  MatMPIBAIJGetSeqBAIJ(Mat,Mat*,Mat*,PetscInt*[]);
1042 extern PetscErrorCode  MatAdicSetLocalFunction(Mat,void (*)(void));
1043 
1044 extern PetscErrorCode  MatSeqDenseSetLDA(Mat,PetscInt);
1045 extern PetscErrorCode  MatDenseGetLocalMatrix(Mat,Mat*);
1046 
1047 extern PetscErrorCode  MatStoreValues(Mat);
1048 extern PetscErrorCode  MatRetrieveValues(Mat);
1049 
1050 extern PetscErrorCode  MatDAADSetCtx(Mat,void*);
1051 
1052 extern PetscErrorCode  MatFindNonzeroRows(Mat,IS*);
1053 /*
1054   These routines are not usually accessed directly, rather solving is
1055   done through the KSP and PC interfaces.
1056 */
1057 
1058 /*E
1059     MatOrderingType - String with the name of a PETSc matrix ordering or the creation function
1060        with an optional dynamic library name, for example
1061        http://www.mcs.anl.gov/petsc/lib.a:orderingcreate()
1062 
1063    Level: beginner
1064 
1065    Cannot use const because the PC objects manipulate the string
1066 
1067 .seealso: MatGetOrdering()
1068 E*/
1069 #define MatOrderingType char*
1070 #define MATORDERINGNATURAL     "natural"
1071 #define MATORDERINGND          "nd"
1072 #define MATORDERING1WD         "1wd"
1073 #define MATORDERINGRCM         "rcm"
1074 #define MATORDERINGQMD         "qmd"
1075 #define MATORDERINGROWLENGTH   "rowlength"
1076 #define MATORDERINGDSC_ND      "dsc_nd"         /* these three are only for DSCPACK, see its documentation for details */
1077 #define MATORDERINGDSC_MMD     "dsc_mmd"
1078 #define MATORDERINGDSC_MDF     "dsc_mdf"
1079 #define MATORDERINGAMD         "amd"            /* only works if UMFPACK is installed with PETSc */
1080 
1081 extern PetscErrorCode  MatGetOrdering(Mat,const MatOrderingType,IS*,IS*);
1082 extern PetscErrorCode  MatGetOrderingList(PetscFList *list);
1083 extern PetscErrorCode  MatOrderingRegister(const char[],const char[],const char[],PetscErrorCode(*)(Mat,const MatOrderingType,IS*,IS*));
1084 
1085 /*MC
1086    MatOrderingRegisterDynamic - Adds a new sparse matrix ordering to the matrix package.
1087 
1088    Synopsis:
1089    PetscErrorCode MatOrderingRegisterDynamic(const char *name_ordering,const char *path,const char *name_create,PetscErrorCode (*routine_create)(MatOrdering))
1090 
1091    Not Collective
1092 
1093    Input Parameters:
1094 +  sname - name of ordering (for example MATORDERINGND)
1095 .  path - location of library where creation routine is
1096 .  name - name of function that creates the ordering type,a string
1097 -  function - function pointer that creates the ordering
1098 
1099    Level: developer
1100 
1101    If dynamic libraries are used, then the fourth input argument (function)
1102    is ignored.
1103 
1104    Sample usage:
1105 .vb
1106    MatOrderingRegisterDynamic("my_order",/home/username/my_lib/lib/libO/solaris/mylib.a,
1107                "MyOrder",MyOrder);
1108 .ve
1109 
1110    Then, your partitioner can be chosen with the procedural interface via
1111 $     MatOrderingSetType(part,"my_order)
1112    or at runtime via the option
1113 $     -pc_factor_mat_ordering_type my_order
1114 
1115    ${PETSC_ARCH} occuring in pathname will be replaced with appropriate values.
1116 
1117 .keywords: matrix, ordering, register
1118 
1119 .seealso: MatOrderingRegisterDestroy(), MatOrderingRegisterAll()
1120 M*/
1121 #if defined(PETSC_USE_DYNAMIC_LIBRARIES)
1122 #define MatOrderingRegisterDynamic(a,b,c,d) MatOrderingRegister(a,b,c,0)
1123 #else
1124 #define MatOrderingRegisterDynamic(a,b,c,d) MatOrderingRegister(a,b,c,d)
1125 #endif
1126 
1127 extern PetscErrorCode  MatOrderingRegisterDestroy(void);
1128 extern PetscErrorCode  MatOrderingRegisterAll(const char[]);
1129 extern PetscBool  MatOrderingRegisterAllCalled;
1130 extern PetscFList MatOrderingList;
1131 
1132 extern PetscErrorCode  MatReorderForNonzeroDiagonal(Mat,PetscReal,IS,IS);
1133 
1134 /*S
1135     MatFactorShiftType - Numeric Shift.
1136 
1137    Level: beginner
1138 
1139 S*/
1140 typedef enum {MAT_SHIFT_NONE,MAT_SHIFT_NONZERO,MAT_SHIFT_POSITIVE_DEFINITE,MAT_SHIFT_INBLOCKS} MatFactorShiftType;
1141 extern const char *MatFactorShiftTypes[];
1142 
1143 /*S
1144    MatFactorInfo - Data passed into the matrix factorization routines
1145 
1146    In Fortran these are simply double precision arrays of size MAT_FACTORINFO_SIZE, that is use
1147 $     MatFactorInfo  info(MAT_FACTORINFO_SIZE)
1148 
1149    Notes: These are not usually directly used by users, instead use PC type of LU, ILU, CHOLESKY or ICC.
1150 
1151       You can use MatFactorInfoInitialize() to set default values.
1152 
1153    Level: developer
1154 
1155 .seealso: MatLUFactorSymbolic(), MatILUFactorSymbolic(), MatCholeskyFactorSymbolic(), MatICCFactorSymbolic(), MatICCFactor(),
1156           MatFactorInfoInitialize()
1157 
1158 S*/
1159 typedef struct {
1160   PetscReal     diagonal_fill;  /* force diagonal to fill in if initially not filled */
1161   PetscReal     usedt;
1162   PetscReal     dt;             /* drop tolerance */
1163   PetscReal     dtcol;          /* tolerance for pivoting */
1164   PetscReal     dtcount;        /* maximum nonzeros to be allowed per row */
1165   PetscReal     fill;           /* expected fill, nonzeros in factored matrix/nonzeros in original matrix */
1166   PetscReal     levels;         /* ICC/ILU(levels) */
1167   PetscReal     pivotinblocks;  /* for BAIJ and SBAIJ matrices pivot in factorization on blocks, default 1.0
1168                                    factorization may be faster if do not pivot */
1169   PetscReal     zeropivot;      /* pivot is called zero if less than this */
1170   PetscReal     shifttype;      /* type of shift added to matrix factor to prevent zero pivots */
1171   PetscReal     shiftamount;     /* how large the shift is */
1172 } MatFactorInfo;
1173 
1174 extern PetscErrorCode  MatFactorInfoInitialize(MatFactorInfo*);
1175 extern PetscErrorCode  MatCholeskyFactor(Mat,IS,const MatFactorInfo*);
1176 extern PetscErrorCode  MatCholeskyFactorSymbolic(Mat,Mat,IS,const MatFactorInfo*);
1177 extern PetscErrorCode  MatCholeskyFactorNumeric(Mat,Mat,const MatFactorInfo*);
1178 extern PetscErrorCode  MatLUFactor(Mat,IS,IS,const MatFactorInfo*);
1179 extern PetscErrorCode  MatILUFactor(Mat,IS,IS,const MatFactorInfo*);
1180 extern PetscErrorCode  MatLUFactorSymbolic(Mat,Mat,IS,IS,const MatFactorInfo*);
1181 extern PetscErrorCode  MatILUFactorSymbolic(Mat,Mat,IS,IS,const MatFactorInfo*);
1182 extern PetscErrorCode  MatICCFactorSymbolic(Mat,Mat,IS,const MatFactorInfo*);
1183 extern PetscErrorCode  MatICCFactor(Mat,IS,const MatFactorInfo*);
1184 extern PetscErrorCode  MatLUFactorNumeric(Mat,Mat,const MatFactorInfo*);
1185 extern PetscErrorCode  MatGetInertia(Mat,PetscInt*,PetscInt*,PetscInt*);
1186 extern PetscErrorCode  MatSolve(Mat,Vec,Vec);
1187 extern PetscErrorCode  MatForwardSolve(Mat,Vec,Vec);
1188 extern PetscErrorCode  MatBackwardSolve(Mat,Vec,Vec);
1189 extern PetscErrorCode  MatSolveAdd(Mat,Vec,Vec,Vec);
1190 extern PetscErrorCode  MatSolveTranspose(Mat,Vec,Vec);
1191 extern PetscErrorCode  MatSolveTransposeAdd(Mat,Vec,Vec,Vec);
1192 extern PetscErrorCode  MatSolves(Mat,Vecs,Vecs);
1193 
1194 extern PetscErrorCode  MatSetUnfactored(Mat);
1195 
1196 /*E
1197     MatSORType - What type of (S)SOR to perform
1198 
1199     Level: beginner
1200 
1201    May be bitwise ORd together
1202 
1203    Any additions/changes here MUST also be made in include/finclude/petscmat.h
1204 
1205    MatSORType may be bitwise ORd together, so do not change the numbers
1206 
1207 .seealso: MatSOR()
1208 E*/
1209 typedef enum {SOR_FORWARD_SWEEP=1,SOR_BACKWARD_SWEEP=2,SOR_SYMMETRIC_SWEEP=3,
1210               SOR_LOCAL_FORWARD_SWEEP=4,SOR_LOCAL_BACKWARD_SWEEP=8,
1211               SOR_LOCAL_SYMMETRIC_SWEEP=12,SOR_ZERO_INITIAL_GUESS=16,
1212               SOR_EISENSTAT=32,SOR_APPLY_UPPER=64,SOR_APPLY_LOWER=128} MatSORType;
1213 extern PetscErrorCode  MatSOR(Mat,Vec,PetscReal,MatSORType,PetscReal,PetscInt,PetscInt,Vec);
1214 
1215 /*
1216     These routines are for efficiently computing Jacobians via finite differences.
1217 */
1218 
1219 /*E
1220     MatColoringType - String with the name of a PETSc matrix coloring or the creation function
1221        with an optional dynamic library name, for example
1222        http://www.mcs.anl.gov/petsc/lib.a:coloringcreate()
1223 
1224    Level: beginner
1225 
1226 .seealso: MatGetColoring()
1227 E*/
1228 #define MatColoringType char*
1229 #define MATCOLORINGNATURAL "natural"
1230 #define MATCOLORINGSL      "sl"
1231 #define MATCOLORINGLF      "lf"
1232 #define MATCOLORINGID      "id"
1233 
1234 extern PetscErrorCode  MatGetColoring(Mat,const MatColoringType,ISColoring*);
1235 extern PetscErrorCode  MatColoringRegister(const char[],const char[],const char[],PetscErrorCode(*)(Mat,MatColoringType,ISColoring *));
1236 
1237 /*MC
1238    MatColoringRegisterDynamic - Adds a new sparse matrix coloring to the
1239                                matrix package.
1240 
1241    Synopsis:
1242    PetscErrorCode MatColoringRegisterDynamic(const char *name_coloring,const char *path,const char *name_create,PetscErrorCode (*routine_create)(MatColoring))
1243 
1244    Not Collective
1245 
1246    Input Parameters:
1247 +  sname - name of Coloring (for example MATCOLORINGSL)
1248 .  path - location of library where creation routine is
1249 .  name - name of function that creates the Coloring type, a string
1250 -  function - function pointer that creates the coloring
1251 
1252    Level: developer
1253 
1254    If dynamic libraries are used, then the fourth input argument (function)
1255    is ignored.
1256 
1257    Sample usage:
1258 .vb
1259    MatColoringRegisterDynamic("my_color",/home/username/my_lib/lib/libO/solaris/mylib.a,
1260                "MyColor",MyColor);
1261 .ve
1262 
1263    Then, your partitioner can be chosen with the procedural interface via
1264 $     MatColoringSetType(part,"my_color")
1265    or at runtime via the option
1266 $     -mat_coloring_type my_color
1267 
1268    $PETSC_ARCH occuring in pathname will be replaced with appropriate values.
1269 
1270 .keywords: matrix, Coloring, register
1271 
1272 .seealso: MatColoringRegisterDestroy(), MatColoringRegisterAll()
1273 M*/
1274 #if defined(PETSC_USE_DYNAMIC_LIBRARIES)
1275 #define MatColoringRegisterDynamic(a,b,c,d) MatColoringRegister(a,b,c,0)
1276 #else
1277 #define MatColoringRegisterDynamic(a,b,c,d) MatColoringRegister(a,b,c,d)
1278 #endif
1279 
1280 extern PetscBool  MatColoringRegisterAllCalled;
1281 
1282 extern PetscErrorCode  MatColoringRegisterAll(const char[]);
1283 extern PetscErrorCode  MatColoringRegisterDestroy(void);
1284 extern PetscErrorCode  MatColoringPatch(Mat,PetscInt,PetscInt,ISColoringValue[],ISColoring*);
1285 
1286 /*S
1287      MatFDColoring - Object for computing a sparse Jacobian via finite differences
1288         and coloring
1289 
1290    Level: beginner
1291 
1292   Concepts: coloring, sparse Jacobian, finite differences
1293 
1294 .seealso:  MatFDColoringCreate()
1295 S*/
1296 typedef struct _p_MatFDColoring* MatFDColoring;
1297 
1298 extern PetscErrorCode  MatFDColoringCreate(Mat,ISColoring,MatFDColoring *);
1299 extern PetscErrorCode  MatFDColoringDestroy(MatFDColoring*);
1300 extern PetscErrorCode  MatFDColoringView(MatFDColoring,PetscViewer);
1301 extern PetscErrorCode  MatFDColoringSetFunction(MatFDColoring,PetscErrorCode (*)(void),void*);
1302 extern PetscErrorCode  MatFDColoringGetFunction(MatFDColoring,PetscErrorCode (**)(void),void**);
1303 extern PetscErrorCode  MatFDColoringSetParameters(MatFDColoring,PetscReal,PetscReal);
1304 extern PetscErrorCode  MatFDColoringSetFromOptions(MatFDColoring);
1305 extern PetscErrorCode  MatFDColoringApply(Mat,MatFDColoring,Vec,MatStructure*,void *);
1306 extern PetscErrorCode  MatFDColoringApplyTS(Mat,MatFDColoring,PetscReal,Vec,MatStructure*,void *);
1307 extern PetscErrorCode  MatFDColoringSetF(MatFDColoring,Vec);
1308 extern PetscErrorCode  MatFDColoringGetPerturbedColumns(MatFDColoring,PetscInt*,PetscInt*[]);
1309 /*
1310     These routines are for partitioning matrices: currently used only
1311   for adjacency matrix, MatCreateMPIAdj().
1312 */
1313 
1314 /*S
1315      MatPartitioning - Object for managing the partitioning of a matrix or graph
1316 
1317    Level: beginner
1318 
1319   Concepts: partitioning
1320 
1321 .seealso:  MatPartitioningCreate(), MatPartitioningType
1322 S*/
1323 typedef struct _p_MatPartitioning* MatPartitioning;
1324 
1325 /*E
1326     MatPartitioningType - String with the name of a PETSc matrix partitioning or the creation function
1327        with an optional dynamic library name, for example
1328        http://www.mcs.anl.gov/petsc/lib.a:partitioningcreate()
1329 
1330    Level: beginner
1331 
1332 .seealso: MatPartitioningCreate(), MatPartitioning
1333 E*/
1334 #define MatPartitioningType char*
1335 #define MATPARTITIONINGCURRENT  "current"
1336 #define MATPARTITIONINGSQUARE   "square"
1337 #define MATPARTITIONINGPARMETIS "parmetis"
1338 #define MATPARTITIONINGCHACO    "chaco"
1339 #define MATPARTITIONINGJOSTLE   "jostle"
1340 #define MATPARTITIONINGPARTY    "party"
1341 #define MATPARTITIONINGSCOTCH   "scotch"
1342 
1343 
1344 extern PetscErrorCode  MatPartitioningCreate(MPI_Comm,MatPartitioning*);
1345 extern PetscErrorCode  MatPartitioningSetType(MatPartitioning,const MatPartitioningType);
1346 extern PetscErrorCode  MatPartitioningSetNParts(MatPartitioning,PetscInt);
1347 extern PetscErrorCode  MatPartitioningSetAdjacency(MatPartitioning,Mat);
1348 extern PetscErrorCode  MatPartitioningSetVertexWeights(MatPartitioning,const PetscInt[]);
1349 extern PetscErrorCode  MatPartitioningSetPartitionWeights(MatPartitioning,const PetscReal []);
1350 extern PetscErrorCode  MatPartitioningApply(MatPartitioning,IS*);
1351 extern PetscErrorCode  MatPartitioningDestroy(MatPartitioning*);
1352 
1353 extern PetscErrorCode  MatPartitioningRegister(const char[],const char[],const char[],PetscErrorCode (*)(MatPartitioning));
1354 
1355 /*MC
1356    MatPartitioningRegisterDynamic - Adds a new sparse matrix partitioning to the
1357    matrix package.
1358 
1359    Synopsis:
1360    PetscErrorCode MatPartitioningRegisterDynamic(const char *name_partitioning,const char *path,const char *name_create,PetscErrorCode (*routine_create)(MatPartitioning))
1361 
1362    Not Collective
1363 
1364    Input Parameters:
1365 +  sname - name of partitioning (for example MATPARTITIONINGCURRENT) or parmetis
1366 .  path - location of library where creation routine is
1367 .  name - name of function that creates the partitioning type, a string
1368 -  function - function pointer that creates the partitioning type
1369 
1370    Level: developer
1371 
1372    If dynamic libraries are used, then the fourth input argument (function)
1373    is ignored.
1374 
1375    Sample usage:
1376 .vb
1377    MatPartitioningRegisterDynamic("my_part",/home/username/my_lib/lib/libO/solaris/mylib.a,
1378                "MyPartCreate",MyPartCreate);
1379 .ve
1380 
1381    Then, your partitioner can be chosen with the procedural interface via
1382 $     MatPartitioningSetType(part,"my_part")
1383    or at runtime via the option
1384 $     -mat_partitioning_type my_part
1385 
1386    $PETSC_ARCH occuring in pathname will be replaced with appropriate values.
1387 
1388 .keywords: matrix, partitioning, register
1389 
1390 .seealso: MatPartitioningRegisterDestroy(), MatPartitioningRegisterAll()
1391 M*/
1392 #if defined(PETSC_USE_DYNAMIC_LIBRARIES)
1393 #define MatPartitioningRegisterDynamic(a,b,c,d) MatPartitioningRegister(a,b,c,0)
1394 #else
1395 #define MatPartitioningRegisterDynamic(a,b,c,d) MatPartitioningRegister(a,b,c,d)
1396 #endif
1397 
1398 extern PetscBool  MatPartitioningRegisterAllCalled;
1399 
1400 extern PetscErrorCode  MatPartitioningRegisterAll(const char[]);
1401 extern PetscErrorCode  MatPartitioningRegisterDestroy(void);
1402 
1403 extern PetscErrorCode  MatPartitioningView(MatPartitioning,PetscViewer);
1404 extern PetscErrorCode  MatPartitioningSetFromOptions(MatPartitioning);
1405 extern PetscErrorCode  MatPartitioningGetType(MatPartitioning,const MatPartitioningType*);
1406 
1407 extern PetscErrorCode  MatPartitioningParmetisSetCoarseSequential(MatPartitioning);
1408 extern PetscErrorCode  MatPartitioningParmetisGetEdgeCut(MatPartitioning, PetscInt *);
1409 
1410 extern PetscErrorCode  MatPartitioningJostleSetCoarseLevel(MatPartitioning,PetscReal);
1411 extern PetscErrorCode  MatPartitioningJostleSetCoarseSequential(MatPartitioning);
1412 
1413 typedef enum {MP_CHACO_MULTILEVEL_KL,MP_CHACO_SPECTRAL,MP_CHACO_LINEAR,MP_CHACO_RANDOM, MP_CHACO_SCATTERED} MPChacoGlobalType;
1414 extern PetscErrorCode  MatPartitioningChacoSetGlobal(MatPartitioning, MPChacoGlobalType);
1415 typedef enum { MP_CHACO_KERNIGHAN_LIN, MP_CHACO_NONE } MPChacoLocalType;
1416 extern PetscErrorCode  MatPartitioningChacoSetLocal(MatPartitioning, MPChacoLocalType);
1417 extern PetscErrorCode  MatPartitioningChacoSetCoarseLevel(MatPartitioning,PetscReal);
1418 typedef enum { MP_CHACO_LANCZOS, MP_CHACO_RQI_SYMMLQ } MPChacoEigenType;
1419 extern PetscErrorCode  MatPartitioningChacoSetEigenSolver(MatPartitioning,MPChacoEigenType);
1420 extern PetscErrorCode  MatPartitioningChacoSetEigenTol(MatPartitioning, PetscReal);
1421 extern PetscErrorCode  MatPartitioningChacoSetEigenNumber(MatPartitioning, PetscInt);
1422 
1423 #define MP_PARTY_OPT "opt"
1424 #define MP_PARTY_LIN "lin"
1425 #define MP_PARTY_SCA "sca"
1426 #define MP_PARTY_RAN "ran"
1427 #define MP_PARTY_GBF "gbf"
1428 #define MP_PARTY_GCF "gcf"
1429 #define MP_PARTY_BUB "bub"
1430 #define MP_PARTY_DEF "def"
1431 extern PetscErrorCode  MatPartitioningPartySetGlobal(MatPartitioning, const char*);
1432 #define MP_PARTY_HELPFUL_SETS "hs"
1433 #define MP_PARTY_KERNIGHAN_LIN "kl"
1434 #define MP_PARTY_NONE "no"
1435 extern PetscErrorCode  MatPartitioningPartySetLocal(MatPartitioning, const char*);
1436 extern PetscErrorCode  MatPartitioningPartySetCoarseLevel(MatPartitioning,PetscReal);
1437 extern PetscErrorCode  MatPartitioningPartySetBipart(MatPartitioning,PetscBool );
1438 extern PetscErrorCode  MatPartitioningPartySetMatchOptimization(MatPartitioning,PetscBool );
1439 
1440 typedef enum { MP_SCOTCH_GREEDY, MP_SCOTCH_GPS, MP_SCOTCH_GR_GPS } MPScotchGlobalType;
1441 extern PetscErrorCode  MatPartitioningScotchSetArch(MatPartitioning,const char*);
1442 extern PetscErrorCode  MatPartitioningScotchSetMultilevel(MatPartitioning);
1443 extern PetscErrorCode  MatPartitioningScotchSetGlobal(MatPartitioning,MPScotchGlobalType);
1444 extern PetscErrorCode  MatPartitioningScotchSetCoarseLevel(MatPartitioning,PetscReal);
1445 extern PetscErrorCode  MatPartitioningScotchSetHostList(MatPartitioning,const char*);
1446 typedef enum { MP_SCOTCH_KERNIGHAN_LIN, MP_SCOTCH_NONE } MPScotchLocalType;
1447 extern PetscErrorCode  MatPartitioningScotchSetLocal(MatPartitioning,MPScotchLocalType);
1448 extern PetscErrorCode  MatPartitioningScotchSetMapping(MatPartitioning);
1449 extern PetscErrorCode  MatPartitioningScotchSetStrategy(MatPartitioning,char*);
1450 
1451 extern PetscErrorCode MatMeshToVertexGraph(Mat,PetscInt,Mat*);
1452 extern PetscErrorCode MatMeshToCellGraph(Mat,PetscInt,Mat*);
1453 
1454 /*
1455     If you add entries here you must also add them to finclude/petscmat.h
1456 */
1457 typedef enum { MATOP_SET_VALUES=0,
1458                MATOP_GET_ROW=1,
1459                MATOP_RESTORE_ROW=2,
1460                MATOP_MULT=3,
1461                MATOP_MULT_ADD=4,
1462                MATOP_MULT_TRANSPOSE=5,
1463                MATOP_MULT_TRANSPOSE_ADD=6,
1464                MATOP_SOLVE=7,
1465                MATOP_SOLVE_ADD=8,
1466                MATOP_SOLVE_TRANSPOSE=9,
1467                MATOP_SOLVE_TRANSPOSE_ADD=10,
1468                MATOP_LUFACTOR=11,
1469                MATOP_CHOLESKYFACTOR=12,
1470                MATOP_SOR=13,
1471                MATOP_TRANSPOSE=14,
1472                MATOP_GETINFO=15,
1473                MATOP_EQUAL=16,
1474                MATOP_GET_DIAGONAL=17,
1475                MATOP_DIAGONAL_SCALE=18,
1476                MATOP_NORM=19,
1477                MATOP_ASSEMBLY_BEGIN=20,
1478                MATOP_ASSEMBLY_END=21,
1479                MATOP_SET_OPTION=22,
1480                MATOP_ZERO_ENTRIES=23,
1481                MATOP_ZERO_ROWS=24,
1482                MATOP_LUFACTOR_SYMBOLIC=25,
1483                MATOP_LUFACTOR_NUMERIC=26,
1484                MATOP_CHOLESKY_FACTOR_SYMBOLIC=27,
1485                MATOP_CHOLESKY_FACTOR_NUMERIC=28,
1486                MATOP_SETUP_PREALLOCATION=29,
1487                MATOP_ILUFACTOR_SYMBOLIC=30,
1488                MATOP_ICCFACTOR_SYMBOLIC=31,
1489                MATOP_GET_ARRAY=32,
1490                MATOP_RESTORE_ARRAY=33,
1491                MATOP_DUPLICATE=34,
1492                MATOP_FORWARD_SOLVE=35,
1493                MATOP_BACKWARD_SOLVE=36,
1494                MATOP_ILUFACTOR=37,
1495                MATOP_ICCFACTOR=38,
1496                MATOP_AXPY=39,
1497                MATOP_GET_SUBMATRICES=40,
1498                MATOP_INCREASE_OVERLAP=41,
1499                MATOP_GET_VALUES=42,
1500                MATOP_COPY=43,
1501                MATOP_GET_ROW_MAX=44,
1502                MATOP_SCALE=45,
1503                MATOP_SHIFT=46,
1504                MATOP_DIAGONAL_SET=47,
1505                MATOP_ILUDT_FACTOR=48,
1506                MATOP_SET_BLOCK_SIZE=49,
1507                MATOP_GET_ROW_IJ=50,
1508                MATOP_RESTORE_ROW_IJ=51,
1509                MATOP_GET_COLUMN_IJ=52,
1510                MATOP_RESTORE_COLUMN_IJ=53,
1511                MATOP_FDCOLORING_CREATE=54,
1512                MATOP_COLORING_PATCH=55,
1513                MATOP_SET_UNFACTORED=56,
1514                MATOP_PERMUTE=57,
1515                MATOP_SET_VALUES_BLOCKED=58,
1516                MATOP_GET_SUBMATRIX=59,
1517                MATOP_DESTROY=60,
1518                MATOP_VIEW=61,
1519                MATOP_CONVERT_FROM=62,
1520                MATOP_USE_SCALED_FORM=63,
1521                MATOP_SCALE_SYSTEM=64,
1522                MATOP_UNSCALE_SYSTEM=65,
1523                MATOP_SET_LOCAL_TO_GLOBAL_MAP=66,
1524                MATOP_SET_VALUES_LOCAL=67,
1525                MATOP_ZERO_ROWS_LOCAL=68,
1526                MATOP_GET_ROW_MAX_ABS=69,
1527                MATOP_GET_ROW_MIN_ABS=70,
1528                MATOP_CONVERT=71,
1529                MATOP_SET_COLORING=72,
1530                MATOP_SET_VALUES_ADIC=73,
1531                MATOP_SET_VALUES_ADIFOR=74,
1532                MATOP_FD_COLORING_APPLY=75,
1533                MATOP_SET_FROM_OPTIONS=76,
1534                MATOP_MULT_CON=77,
1535                MATOP_MULT_TRANSPOSE_CON=78,
1536                MATOP_PERMUTE_SPARSIFY=79,
1537                MATOP_MULT_MULTIPLE=80,
1538                MATOP_SOLVE_MULTIPLE=81,
1539                MATOP_GET_INERTIA=82,
1540                MATOP_LOAD=83,
1541                MATOP_IS_SYMMETRIC=84,
1542                MATOP_IS_HERMITIAN=85,
1543                MATOP_IS_STRUCTURALLY_SYMMETRIC=86,
1544                MATOP_DUMMY=87,
1545                MATOP_GET_VECS=88,
1546                MATOP_MAT_MULT=89,
1547                MATOP_MAT_MULT_SYMBOLIC=90,
1548                MATOP_MAT_MULT_NUMERIC=91,
1549                MATOP_PTAP=92,
1550                MATOP_PTAP_SYMBOLIC=93,
1551                MATOP_PTAP_NUMERIC=94,
1552                MATOP_MAT_MULTTRANSPOSE=95,
1553                MATOP_MAT_MULTTRANSPOSE_SYM=96,
1554                MATOP_MAT_MULTTRANSPOSE_NUM=97,
1555                MATOP_PTAP_SYMBOLIC_SEQAIJ=98,
1556                MATOP_PTAP_NUMERIC_SEQAIJ=99,
1557                MATOP_PTAP_SYMBOLIC_MPIAIJ=100,
1558                MATOP_PTAP_NUMERIC_MPIAIJ=101,
1559                MATOP_CONJUGATE=102,
1560                MATOP_SET_SIZES=103,
1561                MATOP_SET_VALUES_ROW=104,
1562                MATOP_REAL_PART=105,
1563                MATOP_IMAG_PART=106,
1564                MATOP_GET_ROW_UTRIANGULAR=107,
1565                MATOP_RESTORE_ROW_UTRIANGULAR=108,
1566                MATOP_MATSOLVE=109,
1567                MATOP_GET_REDUNDANTMATRIX=110,
1568                MATOP_GET_ROW_MIN=111,
1569                MATOP_GET_COLUMN_VEC=112,
1570                MATOP_MISSING_DIAGONAL=113,
1571                MATOP_MATGETSEQNONZEROSTRUCTURE=114,
1572                MATOP_CREATE=115,
1573                MATOP_GET_GHOSTS=116,
1574                MATOP_GET_LOCALSUBMATRIX=117,
1575                MATOP_RESTORE_LOCALSUBMATRIX=118,
1576                MATOP_MULT_DIAGONAL_BLOCK=119,
1577                MATOP_HERMITIANTRANSPOSE=120,
1578                MATOP_MULTHERMITIANTRANSPOSE=121,
1579                MATOP_MULTHERMITIANTRANSPOSEADD=122,
1580                MATOP_GETMULTIPROCBLOCK=123,
1581                MATOP_GETCOLUMNNORMS=125,
1582 	       MATOP_GET_SUBMATRICES_PARALLEL=128
1583              } MatOperation;
1584 extern PetscErrorCode  MatHasOperation(Mat,MatOperation,PetscBool *);
1585 extern PetscErrorCode  MatShellSetOperation(Mat,MatOperation,void(*)(void));
1586 extern PetscErrorCode  MatShellGetOperation(Mat,MatOperation,void(**)(void));
1587 extern PetscErrorCode  MatShellSetContext(Mat,void*);
1588 
1589 /*
1590    Codes for matrices stored on disk. By default they are
1591    stored in a universal format. By changing the format with
1592    PetscViewerSetFormat(viewer,PETSC_VIEWER_NATIVE); the matrices will
1593    be stored in a way natural for the matrix, for example dense matrices
1594    would be stored as dense. Matrices stored this way may only be
1595    read into matrices of the same type.
1596 */
1597 #define MATRIX_BINARY_FORMAT_DENSE -1
1598 
1599 extern PetscErrorCode  MatMPIBAIJSetHashTableFactor(Mat,PetscReal);
1600 extern PetscErrorCode  MatISGetLocalMat(Mat,Mat*);
1601 
1602 /*S
1603      MatNullSpace - Object that removes a null space from a vector, i.e.
1604          orthogonalizes the vector to a subsapce
1605 
1606    Level: advanced
1607 
1608   Concepts: matrix; linear operator, null space
1609 
1610   Users manual sections:
1611 .   sec_singular
1612 
1613 .seealso:  MatNullSpaceCreate()
1614 S*/
1615 typedef struct _p_MatNullSpace* MatNullSpace;
1616 
1617 extern PetscErrorCode  MatNullSpaceCreate(MPI_Comm,PetscBool ,PetscInt,const Vec[],MatNullSpace*);
1618 extern PetscErrorCode  MatNullSpaceSetFunction(MatNullSpace,PetscErrorCode (*)(MatNullSpace,Vec,void*),void*);
1619 extern PetscErrorCode  MatNullSpaceDestroy(MatNullSpace*);
1620 extern PetscErrorCode  MatNullSpaceRemove(MatNullSpace,Vec,Vec*);
1621 extern PetscErrorCode  MatNullSpaceAttach(Mat,MatNullSpace);
1622 extern PetscErrorCode  MatNullSpaceTest(MatNullSpace,Mat,PetscBool  *);
1623 extern PetscErrorCode  MatNullSpaceView(MatNullSpace,PetscViewer);
1624 
1625 extern PetscErrorCode  MatReorderingSeqSBAIJ(Mat,IS);
1626 extern PetscErrorCode  MatMPISBAIJSetHashTableFactor(Mat,PetscReal);
1627 extern PetscErrorCode  MatSeqSBAIJSetColumnIndices(Mat,PetscInt *);
1628 extern PetscErrorCode  MatSeqBAIJInvertBlockDiagonal(Mat);
1629 
1630 extern PetscErrorCode  MatCreateMAIJ(Mat,PetscInt,Mat*);
1631 extern PetscErrorCode  MatMAIJRedimension(Mat,PetscInt,Mat*);
1632 extern PetscErrorCode  MatMAIJGetAIJ(Mat,Mat*);
1633 
1634 extern PetscErrorCode  MatComputeExplicitOperator(Mat,Mat*);
1635 
1636 extern PetscErrorCode  MatDiagonalScaleLocal(Mat,Vec);
1637 
1638 extern PetscErrorCode  MatCreateMFFD(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,Mat*);
1639 extern PetscErrorCode  MatMFFDSetBase(Mat,Vec,Vec);
1640 extern PetscErrorCode  MatMFFDSetFunction(Mat,PetscErrorCode(*)(void*,Vec,Vec),void*);
1641 extern PetscErrorCode  MatMFFDSetFunctioni(Mat,PetscErrorCode (*)(void*,PetscInt,Vec,PetscScalar*));
1642 extern PetscErrorCode  MatMFFDSetFunctioniBase(Mat,PetscErrorCode (*)(void*,Vec));
1643 extern PetscErrorCode  MatMFFDAddNullSpace(Mat,MatNullSpace);
1644 extern PetscErrorCode  MatMFFDSetHHistory(Mat,PetscScalar[],PetscInt);
1645 extern PetscErrorCode  MatMFFDResetHHistory(Mat);
1646 extern PetscErrorCode  MatMFFDSetFunctionError(Mat,PetscReal);
1647 extern PetscErrorCode  MatMFFDSetPeriod(Mat,PetscInt);
1648 extern PetscErrorCode  MatMFFDGetH(Mat,PetscScalar *);
1649 extern PetscErrorCode  MatMFFDSetOptionsPrefix(Mat,const char[]);
1650 extern PetscErrorCode  MatMFFDCheckPositivity(void*,Vec,Vec,PetscScalar*);
1651 extern PetscErrorCode  MatMFFDSetCheckh(Mat,PetscErrorCode (*)(void*,Vec,Vec,PetscScalar*),void*);
1652 
1653 /*S
1654     MatMFFD - A data structured used to manage the computation of the h differencing parameter for matrix-free
1655               Jacobian vector products
1656 
1657     Notes: MATMFFD is a specific MatType which uses the MatMFFD data structure
1658 
1659            MatMFFD*() methods actually take the Mat as their first argument. Not a MatMFFD data structure
1660 
1661     Level: developer
1662 
1663 .seealso: MATMFFD, MatCreateMFFD(), MatMFFDSetFuction(), MatMFFDSetType(), MatMFFDRegister()
1664 S*/
1665 typedef struct _p_MatMFFD* MatMFFD;
1666 
1667 /*E
1668     MatMFFDType - algorithm used to compute the h used in computing matrix-vector products via differencing of the function
1669 
1670    Level: beginner
1671 
1672 .seealso: MatMFFDSetType(), MatMFFDRegister()
1673 E*/
1674 #define MatMFFDType char*
1675 #define MATMFFD_DS  "ds"
1676 #define MATMFFD_WP  "wp"
1677 
1678 extern PetscErrorCode  MatMFFDSetType(Mat,const MatMFFDType);
1679 extern PetscErrorCode  MatMFFDRegister(const char[],const char[],const char[],PetscErrorCode (*)(MatMFFD));
1680 
1681 /*MC
1682    MatMFFDRegisterDynamic - Adds a method to the MatMFFD registry.
1683 
1684    Synopsis:
1685    PetscErrorCode MatMFFDRegisterDynamic(const char *name_solver,const char *path,const char *name_create,PetscErrorCode (*routine_create)(MatMFFD))
1686 
1687    Not Collective
1688 
1689    Input Parameters:
1690 +  name_solver - name of a new user-defined compute-h module
1691 .  path - path (either absolute or relative) the library containing this solver
1692 .  name_create - name of routine to create method context
1693 -  routine_create - routine to create method context
1694 
1695    Level: developer
1696 
1697    Notes:
1698    MatMFFDRegisterDynamic() may be called multiple times to add several user-defined solvers.
1699 
1700    If dynamic libraries are used, then the fourth input argument (routine_create)
1701    is ignored.
1702 
1703    Sample usage:
1704 .vb
1705    MatMFFDRegisterDynamic("my_h",/home/username/my_lib/lib/libO/solaris/mylib.a,
1706                "MyHCreate",MyHCreate);
1707 .ve
1708 
1709    Then, your solver can be chosen with the procedural interface via
1710 $     MatMFFDSetType(mfctx,"my_h")
1711    or at runtime via the option
1712 $     -snes_mf_type my_h
1713 
1714 .keywords: MatMFFD, register
1715 
1716 .seealso: MatMFFDRegisterAll(), MatMFFDRegisterDestroy()
1717 M*/
1718 #if defined(PETSC_USE_DYNAMIC_LIBRARIES)
1719 #define MatMFFDRegisterDynamic(a,b,c,d) MatMFFDRegister(a,b,c,0)
1720 #else
1721 #define MatMFFDRegisterDynamic(a,b,c,d) MatMFFDRegister(a,b,c,d)
1722 #endif
1723 
1724 extern PetscErrorCode  MatMFFDRegisterAll(const char[]);
1725 extern PetscErrorCode  MatMFFDRegisterDestroy(void);
1726 extern PetscErrorCode  MatMFFDDSSetUmin(Mat,PetscReal);
1727 extern PetscErrorCode  MatMFFDWPSetComputeNormU(Mat,PetscBool );
1728 
1729 
1730 extern PetscErrorCode  PetscViewerMathematicaPutMatrix(PetscViewer, PetscInt, PetscInt, PetscReal *);
1731 extern PetscErrorCode  PetscViewerMathematicaPutCSRMatrix(PetscViewer, PetscInt, PetscInt, PetscInt *, PetscInt *, PetscReal *);
1732 
1733 /*
1734    PETSc interface to MUMPS
1735 */
1736 #ifdef PETSC_HAVE_MUMPS
1737 extern PetscErrorCode  MatMumpsSetIcntl(Mat,PetscInt,PetscInt);
1738 #endif
1739 
1740 /*
1741    PETSc interface to SUPERLU
1742 */
1743 #ifdef PETSC_HAVE_SUPERLU
1744 extern PetscErrorCode  MatSuperluSetILUDropTol(Mat,PetscReal);
1745 #endif
1746 
1747 #if defined(PETSC_HAVE_CUSP)
1748 extern PetscErrorCode  MatCreateSeqAIJCUSP(MPI_Comm,PetscInt,PetscInt,PetscInt,const PetscInt[],Mat*);
1749 extern PetscErrorCode  MatCreateMPIAIJCUSP(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt,const PetscInt[],PetscInt,const PetscInt[],Mat*);
1750 #endif
1751 
1752 /*
1753    PETSc interface to FFTW
1754 */
1755 #if defined(PETSC_HAVE_FFTW)
1756 extern PetscErrorCode VecScatterPetscToFFTW(Mat,Vec,Vec);
1757 extern PetscErrorCode VecScatterFFTWToPetsc(Mat,Vec,Vec);
1758 extern PetscErrorCode MatGetVecsFFTW(Mat,Vec*,Vec*,Vec*);
1759 #endif
1760 
1761 extern PetscErrorCode MatCreateNest(MPI_Comm,PetscInt,const IS[],PetscInt,const IS[],const Mat[],Mat*);
1762 extern PetscErrorCode MatNestGetSize(Mat,PetscInt*,PetscInt*);
1763 extern PetscErrorCode MatNestGetSubMats(Mat,PetscInt*,PetscInt*,Mat***);
1764 extern PetscErrorCode MatNestGetSubMat(Mat,PetscInt,PetscInt,Mat*);
1765 extern PetscErrorCode MatNestSetVecType(Mat,const VecType);
1766 extern PetscErrorCode MatNestSetSubMats(Mat,PetscInt,const IS[],PetscInt,const IS[],const Mat[]);
1767 extern PetscErrorCode MatNestSetSubMat(Mat,PetscInt,PetscInt,Mat);
1768 
1769 PETSC_EXTERN_CXX_END
1770 #endif
1771