xref: /petsc/include/petscmat.h (revision 6ce1633cb736e3bd2a11b0bc146401a5bd4cb96c)
1 /*
2      Include file for the matrix component of PETSc
3 */
4 #ifndef __PETSCMAT_H
5 #define __PETSCMAT_H
6 #include "petscvec.h"
7 PETSC_EXTERN_CXX_BEGIN
8 
9 /*S
10      Mat - Abstract PETSc matrix object
11 
12    Level: beginner
13 
14   Concepts: matrix; linear operator
15 
16 .seealso:  MatCreate(), MatType, MatSetType()
17 S*/
18 typedef struct _p_Mat*           Mat;
19 
20 /*E
21     MatType - String with the name of a PETSc matrix or the creation function
22        with an optional dynamic library name, for example
23        http://www.mcs.anl.gov/petsc/lib.a:mymatcreate()
24 
25    Level: beginner
26 
27 .seealso: MatSetType(), Mat, MatSolverPackage
28 E*/
29 #define MatType char*
30 #define MATSAME            "same"
31 #define MATMAIJ            "maij"
32 #define MATSEQMAIJ           "seqmaij"
33 #define MATMPIMAIJ           "mpimaij"
34 #define MATIS              "is"
35 #define MATAIJ             "aij"
36 #define MATSEQAIJ            "seqaij"
37 #define MATMPIAIJ            "mpiaij"
38 #define MATAIJCRL              "aijcrl"
39 #define MATSEQAIJCRL             "seqaijcrl"
40 #define MATMPIAIJCRL             "mpiaijcrl"
41 #define MATAIJCUDA             "aijcuda"
42 #define MATSEQAIJCUDA            "seqaijcuda"
43 #define MATMPIAIJCUDA            "mpiaijcuda"
44 #define MATAIJPERM             "aijperm"
45 #define MATSEQAIJPERM            "seqaijperm"
46 #define MATMPIAIJPERM            "mpiaijperm"
47 #define MATSHELL           "shell"
48 #define MATDENSE           "dense"
49 #define MATSEQDENSE          "seqdense"
50 #define MATMPIDENSE          "mpidense"
51 #define MATBAIJ            "baij"
52 #define MATSEQBAIJ           "seqbaij"
53 #define MATMPIBAIJ           "mpibaij"
54 #define MATMPIADJ          "mpiadj"
55 #define MATSBAIJ           "sbaij"
56 #define MATSEQSBAIJ          "seqsbaij"
57 #define MATMPISBAIJ          "mpisbaij"
58 #define MATDAAD            "daad"
59 #define MATMFFD            "mffd"
60 #define MATNORMAL          "normal"
61 #define MATLRC             "lrc"
62 #define MATSCATTER         "scatter"
63 #define MATBLOCKMAT        "blockmat"
64 #define MATCOMPOSITE       "composite"
65 #define MATSEQFFTW         "seqfftw"
66 #define MATSEQCUFFT        "seqcufft"
67 #define MATTRANSPOSEMAT    "transpose"
68 #define MATSCHURCOMPLEMENT "schurcomplement"
69 #define MATPYTHON          "python"
70 #define MATHYPRESTRUCT     "hyprestruct"
71 #define MATHYPRESSTRUCT    "hypresstruct"
72 #define MATSUBMATRIX       "submatrix"
73 #define MATLOCALREF        "localref"
74 #define MATNEST            "nest"
75 
76 /*E
77     MatSolverPackage - String with the name of a PETSc matrix solver type.
78 
79     For example: "petsc" indicates what PETSc provides, "superlu" indicates either
80        SuperLU or SuperLU_Dist etc.
81 
82 
83    Level: beginner
84 
85 .seealso: MatGetFactor(), Mat, MatSetType(), MatType
86 E*/
87 #define MatSolverPackage char*
88 #define MATSOLVERSPOOLES      "spooles"
89 #define MATSOLVERSUPERLU      "superlu"
90 #define MATSOLVERSUPERLU_DIST "superlu_dist"
91 #define MATSOLVERUMFPACK      "umfpack"
92 #define MATSOLVERCHOLMOD      "cholmod"
93 #define MATSOLVERESSL         "essl"
94 #define MATSOLVERLUSOL        "lusol"
95 #define MATSOLVERMUMPS        "mumps"
96 #define MATSOLVERPASTIX       "pastix"
97 #define MATSOLVERDSCPACK      "dscpack"
98 #define MATSOLVERMATLAB       "matlab"
99 #define MATSOLVERPETSC        "petsc"
100 #define MATSOLVERPLAPACK      "plapack"
101 #define MATSOLVERBAS          "bas"
102 
103 /*E
104     MatFactorType - indicates what type of factorization is requested
105 
106     Level: beginner
107 
108    Any additions/changes here MUST also be made in include/finclude/petscmat.h
109 
110 .seealso: MatSolverPackage, MatGetFactor()
111 E*/
112 typedef enum {MAT_FACTOR_NONE, MAT_FACTOR_LU, MAT_FACTOR_CHOLESKY, MAT_FACTOR_ILU, MAT_FACTOR_ICC,MAT_FACTOR_ILUDT} MatFactorType;
113 extern const char *const MatFactorTypes[];
114 
115 extern PetscErrorCode  MatGetFactor(Mat,const MatSolverPackage,MatFactorType,Mat*);
116 extern PetscErrorCode  MatGetFactorAvailable(Mat,const MatSolverPackage,MatFactorType,PetscBool *);
117 extern PetscErrorCode  MatFactorGetSolverPackage(Mat,const MatSolverPackage*);
118 extern PetscErrorCode  MatGetFactorType(Mat,MatFactorType*);
119 
120 /* Logging support */
121 #define    MAT_FILE_CLASSID 1211216    /* used to indicate matrices in binary files */
122 extern PetscClassId  MAT_CLASSID;
123 extern PetscClassId  MAT_FDCOLORING_CLASSID;
124 extern PetscClassId  MAT_PARTITIONING_CLASSID;
125 extern PetscClassId  MAT_NULLSPACE_CLASSID;
126 extern PetscClassId  MATMFFD_CLASSID;
127 
128 /*E
129     MatReuse - Indicates if matrices obtained from a previous call to MatGetSubMatrices()
130      or MatGetSubMatrix() are to be reused to store the new matrix values. For MatConvert() is used to indicate
131      that the input matrix is to be replaced with the converted matrix.
132 
133     Level: beginner
134 
135    Any additions/changes here MUST also be made in include/finclude/petscmat.h
136 
137 .seealso: MatGetSubMatrices(), MatGetSubMatrix(), MatDestroyMatrices(), MatConvert()
138 E*/
139 typedef enum {MAT_INITIAL_MATRIX,MAT_REUSE_MATRIX,MAT_IGNORE_MATRIX} MatReuse;
140 
141 /*E
142     MatGetSubMatrixOption - Indicates if matrices obtained from a call to MatGetSubMatrices()
143      include the matrix values. Currently it is only used by MatGetSeqNonzerostructure().
144 
145     Level: beginner
146 
147 .seealso: MatGetSeqNonzerostructure()
148 E*/
149 typedef enum {MAT_DO_NOT_GET_VALUES,MAT_GET_VALUES} MatGetSubMatrixOption;
150 
151 extern PetscErrorCode  MatInitializePackage(const char[]);
152 
153 extern PetscErrorCode  MatCreate(MPI_Comm,Mat*);
154 PetscPolymorphicFunction(MatCreate,(MPI_Comm comm),(comm,&A),Mat,A)
155 PetscPolymorphicFunction(MatCreate,(),(PETSC_COMM_WORLD,&A),Mat,A)
156 extern PetscErrorCode  MatSetSizes(Mat,PetscInt,PetscInt,PetscInt,PetscInt);
157 extern PetscErrorCode  MatSetType(Mat,const MatType);
158 extern PetscErrorCode  MatSetFromOptions(Mat);
159 extern PetscErrorCode  MatSetUpPreallocation(Mat);
160 extern PetscErrorCode  MatRegisterAll(const char[]);
161 extern PetscErrorCode  MatRegister(const char[],const char[],const char[],PetscErrorCode(*)(Mat));
162 
163 extern PetscErrorCode  MatSetOptionsPrefix(Mat,const char[]);
164 extern PetscErrorCode  MatAppendOptionsPrefix(Mat,const char[]);
165 extern PetscErrorCode  MatGetOptionsPrefix(Mat,const char*[]);
166 
167 /*MC
168    MatRegisterDynamic - Adds a new matrix type
169 
170    Synopsis:
171    PetscErrorCode MatRegisterDynamic(const char *name,const char *path,const char *name_create,PetscErrorCode (*routine_create)(Mat))
172 
173    Not Collective
174 
175    Input Parameters:
176 +  name - name of a new user-defined matrix type
177 .  path - path (either absolute or relative) the library containing this solver
178 .  name_create - name of routine to create method context
179 -  routine_create - routine to create method context
180 
181    Notes:
182    MatRegisterDynamic() may be called multiple times to add several user-defined solvers.
183 
184    If dynamic libraries are used, then the fourth input argument (routine_create)
185    is ignored.
186 
187    Sample usage:
188 .vb
189    MatRegisterDynamic("my_mat",/home/username/my_lib/lib/libO/solaris/mylib.a,
190                "MyMatCreate",MyMatCreate);
191 .ve
192 
193    Then, your solver can be chosen with the procedural interface via
194 $     MatSetType(Mat,"my_mat")
195    or at runtime via the option
196 $     -mat_type my_mat
197 
198    Level: advanced
199 
200    Notes: ${PETSC_ARCH} occuring in pathname will be replaced with appropriate values.
201          If your function is not being put into a shared library then use VecRegister() instead
202 
203 .keywords: Mat, register
204 
205 .seealso: MatRegisterAll(), MatRegisterDestroy()
206 
207 M*/
208 #if defined(PETSC_USE_DYNAMIC_LIBRARIES)
209 #define MatRegisterDynamic(a,b,c,d) MatRegister(a,b,c,0)
210 #else
211 #define MatRegisterDynamic(a,b,c,d) MatRegister(a,b,c,d)
212 #endif
213 
214 extern PetscBool  MatRegisterAllCalled;
215 extern PetscFList MatList;
216 extern PetscFList MatColoringList;
217 extern PetscFList MatPartitioningList;
218 
219 /*E
220     MatStructure - Indicates if the matrix has the same nonzero structure
221 
222     Level: beginner
223 
224    Any additions/changes here MUST also be made in include/finclude/petscmat.h
225 
226 .seealso: MatCopy(), KSPSetOperators(), PCSetOperators()
227 E*/
228 typedef enum {SAME_NONZERO_PATTERN,DIFFERENT_NONZERO_PATTERN,SAME_PRECONDITIONER,SUBSET_NONZERO_PATTERN} MatStructure;
229 
230 extern PetscErrorCode  MatCreateSeqDense(MPI_Comm,PetscInt,PetscInt,PetscScalar[],Mat*);
231 extern PetscErrorCode  MatCreateMPIDense(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,PetscScalar[],Mat*);
232 extern PetscErrorCode  MatCreateSeqAIJ(MPI_Comm,PetscInt,PetscInt,PetscInt,const PetscInt[],Mat*);
233 PetscPolymorphicFunction(MatCreateSeqAIJ,(PetscInt m,PetscInt n,PetscInt nz,const PetscInt nnz[]),(PETSC_COMM_SELF,m,n,nz,nnz,&A),Mat,A)
234 PetscPolymorphicFunction(MatCreateSeqAIJ,(PetscInt m,PetscInt n,PetscInt nz),(PETSC_COMM_SELF,m,n,nz,PETSC_NULL,&A),Mat,A)
235 PetscPolymorphicFunction(MatCreateSeqAIJ,(PetscInt m,PetscInt n,const PetscInt nnz[]),(PETSC_COMM_SELF,m,n,0,nnz,&A),Mat,A)
236 PetscPolymorphicFunction(MatCreateSeqAIJ,(PetscInt m,PetscInt n),(PETSC_COMM_SELF,m,n,0,PETSC_NULL,&A),Mat,A)
237 PetscPolymorphicSubroutine(MatCreateSeqAIJ,(PetscInt m,PetscInt n,PetscInt nz,Mat *A),(PETSC_COMM_SELF,m,n,nz,PETSC_NULL,A))
238 PetscPolymorphicSubroutine(MatCreateSeqAIJ,(PetscInt m,PetscInt n,const PetscInt nnz[],Mat *A),(PETSC_COMM_SELF,m,n,0,nnz,A))
239 PetscPolymorphicSubroutine(MatCreateSeqAIJ,(PetscInt m,PetscInt n,Mat *A),(PETSC_COMM_SELF,m,n,0,PETSC_NULL,A))
240 extern PetscErrorCode  MatCreateMPIAIJ(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt,const PetscInt[],PetscInt,const PetscInt[],Mat*);
241 PetscPolymorphicFunction(MatCreateMPIAIJ,(MPI_Comm comm,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,const PetscInt nnz[],PetscInt onz,const PetscInt onnz[]),(comm,m,n,M,N,nz,nnz,onz,onnz,&A),Mat,A)
242 PetscPolymorphicFunction(MatCreateMPIAIJ,(MPI_Comm comm,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,PetscInt nnz),(comm,m,n,M,N,nz,PETSC_NULL,nnz,PETSC_NULL,&A),Mat,A)
243 PetscPolymorphicFunction(MatCreateMPIAIJ,(MPI_Comm comm,PetscInt m,PetscInt n,PetscInt M,PetscInt N,const PetscInt nnz[],const PetscInt onz[]),(comm,m,n,M,N,0,nnz,0,onz,&A),Mat,A)
244 PetscPolymorphicFunction(MatCreateMPIAIJ,(MPI_Comm comm,PetscInt m,PetscInt n,PetscInt M,PetscInt N),(comm,m,n,M,N,0,PETSC_NULL,0,PETSC_NULL,&A),Mat,A)
245 PetscPolymorphicSubroutine(MatCreateMPIAIJ,(MPI_Comm comm,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,PetscInt nnz,Mat *A),(comm,m,n,M,N,nz,PETSC_NULL,nnz,PETSC_NULL,A))
246 PetscPolymorphicSubroutine(MatCreateMPIAIJ,(MPI_Comm comm,PetscInt m,PetscInt n,PetscInt M,PetscInt N,const PetscInt nnz[],const PetscInt onz[],Mat *A),(comm,m,n,M,N,0,nnz,0,onz,A))
247 PetscPolymorphicSubroutine(MatCreateMPIAIJ,(MPI_Comm comm,PetscInt m,PetscInt n,PetscInt M,PetscInt N,Mat *A),(comm,m,n,M,N,0,PETSC_NULL,0,PETSC_NULL,A))
248 PetscPolymorphicFunction(MatCreateMPIAIJ,(PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,const PetscInt nnz[],PetscInt onz,const PetscInt onnz[]),(PETSC_COMM_WORLD,m,n,M,N,nz,nnz,onz,onnz,&A),Mat,A)
249 PetscPolymorphicFunction(MatCreateMPIAIJ,(PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,PetscInt nnz),(PETSC_COMM_WORLD,m,n,M,N,nz,PETSC_NULL,nnz,PETSC_NULL,&A),Mat,A)
250 PetscPolymorphicFunction(MatCreateMPIAIJ,(PetscInt m,PetscInt n,PetscInt M,PetscInt N,const PetscInt nnz[],const PetscInt onz[]),(PETSC_COMM_WORLD,m,n,M,N,0,nnz,0,onz,&A),Mat,A)
251 PetscPolymorphicFunction(MatCreateMPIAIJ,(PetscInt m,PetscInt n,PetscInt M,PetscInt N),(PETSC_COMM_WORLD,m,n,M,N,0,PETSC_NULL,0,PETSC_NULL,&A),Mat,A)
252 PetscPolymorphicSubroutine(MatCreateMPIAIJ,(PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,PetscInt nnz,Mat *A),(PETSC_COMM_WORLD,m,n,M,N,nz,PETSC_NULL,nnz,PETSC_NULL,A))
253 PetscPolymorphicSubroutine(MatCreateMPIAIJ,(PetscInt m,PetscInt n,PetscInt M,PetscInt N,const PetscInt nnz[],const PetscInt onz[],Mat *A),(PETSC_COMM_WORLD,m,n,M,N,0,nnz,0,onz,A))
254 PetscPolymorphicSubroutine(MatCreateMPIAIJ,(PetscInt m,PetscInt n,PetscInt M,PetscInt N,Mat *A),(PETSC_COMM_WORLD,m,n,M,N,0,PETSC_NULL,0,PETSC_NULL,A))
255 extern PetscErrorCode  MatCreateMPIAIJWithArrays(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,const PetscInt[],const PetscInt[],const PetscScalar[],Mat *);
256 extern PetscErrorCode  MatCreateMPIAIJWithSplitArrays(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt[],PetscInt[],PetscScalar[],PetscInt[],PetscInt[],PetscScalar[],Mat*);
257 
258 extern PetscErrorCode  MatCreateSeqBAIJ(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,const PetscInt[],Mat*);
259 PetscPolymorphicFunction(MatCreateSeqBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt nz,const PetscInt nnz[]),(PETSC_COMM_SELF,bs,m,n,nz,nnz,&A),Mat,A)
260 PetscPolymorphicFunction(MatCreateSeqBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt nz),(PETSC_COMM_SELF,bs,m,n,nz,PETSC_NULL,&A),Mat,A)
261 PetscPolymorphicFunction(MatCreateSeqBAIJ,(PetscInt bs,PetscInt m,PetscInt n,const PetscInt nnz[]),(PETSC_COMM_SELF,bs,m,n,0,nnz,&A),Mat,A)
262 PetscPolymorphicFunction(MatCreateSeqBAIJ,(PetscInt bs,PetscInt m,PetscInt n),(PETSC_COMM_SELF,bs,m,n,0,PETSC_NULL,&A),Mat,A)
263 PetscPolymorphicSubroutine(MatCreateSeqBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt nz,Mat *A),(PETSC_COMM_SELF,bs,m,n,nz,PETSC_NULL,A))
264 PetscPolymorphicSubroutine(MatCreateSeqBAIJ,(PetscInt bs,PetscInt m,PetscInt n,const PetscInt nnz[],Mat *A),(PETSC_COMM_SELF,bs,m,n,0,nnz,A))
265 PetscPolymorphicSubroutine(MatCreateSeqBAIJ,(PetscInt bs,PetscInt m,PetscInt n,Mat *A),(PETSC_COMM_SELF,bs,m,n,0,PETSC_NULL,A))
266 extern PetscErrorCode  MatCreateMPIBAIJ(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt,const PetscInt[],PetscInt,const PetscInt[],Mat*);
267 PetscPolymorphicFunction(MatCreateMPIBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,const PetscInt nnz[],PetscInt onz,const PetscInt onnz[]),(comm,bs,m,n,M,N,nz,nnz,onz,onnz,&A),Mat,A)
268 PetscPolymorphicFunction(MatCreateMPIBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,PetscInt nnz),(comm,bs,m,n,M,N,nz,PETSC_NULL,nnz,PETSC_NULL,&A),Mat,A)
269 PetscPolymorphicFunction(MatCreateMPIBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,const PetscInt nnz[],const PetscInt onz[]),(comm,bs,m,n,M,N,0,nnz,0,onz,&A),Mat,A)
270 PetscPolymorphicFunction(MatCreateMPIBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N),(comm,bs,m,n,M,N,0,PETSC_NULL,0,PETSC_NULL,&A),Mat,A)
271 PetscPolymorphicSubroutine(MatCreateMPIBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,PetscInt nnz,Mat *A),(comm,bs,m,n,M,N,nz,PETSC_NULL,nnz,PETSC_NULL,A))
272 PetscPolymorphicSubroutine(MatCreateMPIBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,const PetscInt nnz[],const PetscInt onz[],Mat *A),(comm,bs,m,n,M,N,0,nnz,0,onz,A))
273 PetscPolymorphicSubroutine(MatCreateMPIBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,Mat *A),(comm,bs,m,n,M,N,0,PETSC_NULL,0,PETSC_NULL,A))
274 PetscPolymorphicFunction(MatCreateMPIBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,const PetscInt nnz[],PetscInt onz,const PetscInt onnz[]),(PETSC_COMM_WORLD,bs,m,n,M,N,nz,nnz,onz,onnz,&A),Mat,A)
275 PetscPolymorphicFunction(MatCreateMPIBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,PetscInt nnz),(PETSC_COMM_WORLD,bs,m,n,M,N,nz,PETSC_NULL,nnz,PETSC_NULL,&A),Mat,A)
276 PetscPolymorphicFunction(MatCreateMPIBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,const PetscInt nnz[],const PetscInt onz[]),(PETSC_COMM_WORLD,bs,m,n,M,N,0,nnz,0,onz,&A),Mat,A)
277 PetscPolymorphicFunction(MatCreateMPIBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N),(PETSC_COMM_WORLD,bs,m,n,M,N,0,PETSC_NULL,0,PETSC_NULL,&A),Mat,A)
278 PetscPolymorphicSubroutine(MatCreateMPIBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,PetscInt nnz,Mat *A),(PETSC_COMM_WORLD,bs,m,n,M,N,nz,PETSC_NULL,nnz,PETSC_NULL,A))
279 PetscPolymorphicSubroutine(MatCreateMPIBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,const PetscInt nnz[],const PetscInt onz[],Mat *A),(PETSC_COMM_WORLD,bs,m,n,M,N,0,nnz,0,onz,A))
280 PetscPolymorphicSubroutine(MatCreateMPIBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,Mat *A),(PETSC_COMM_WORLD,bs,m,n,M,N,0,PETSC_NULL,0,PETSC_NULL,A))
281 extern PetscErrorCode  MatCreateMPIBAIJWithArrays(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt,const PetscInt[],const PetscInt[],const PetscScalar[],Mat*);
282 
283 extern PetscErrorCode  MatCreateMPIAdj(MPI_Comm,PetscInt,PetscInt,PetscInt[],PetscInt[],PetscInt[],Mat*);
284 extern PetscErrorCode  MatCreateSeqSBAIJ(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,const PetscInt[],Mat*);
285 PetscPolymorphicFunction(MatCreateSeqSBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt nz,const PetscInt nnz[]),(PETSC_COMM_SELF,bs,m,n,nz,nnz,&A),Mat,A)
286 PetscPolymorphicFunction(MatCreateSeqSBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt nz),(PETSC_COMM_SELF,bs,m,n,nz,PETSC_NULL,&A),Mat,A)
287 PetscPolymorphicFunction(MatCreateSeqSBAIJ,(PetscInt bs,PetscInt m,PetscInt n,const PetscInt nnz[]),(PETSC_COMM_SELF,bs,m,n,0,nnz,&A),Mat,A)
288 PetscPolymorphicFunction(MatCreateSeqSBAIJ,(PetscInt bs,PetscInt m,PetscInt n),(PETSC_COMM_SELF,bs,m,n,0,PETSC_NULL,&A),Mat,A)
289 PetscPolymorphicSubroutine(MatCreateSeqSBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt nz,Mat *A),(PETSC_COMM_SELF,bs,m,n,nz,PETSC_NULL,A))
290 PetscPolymorphicSubroutine(MatCreateSeqSBAIJ,(PetscInt bs,PetscInt m,PetscInt n,const PetscInt nnz[],Mat *A),(PETSC_COMM_SELF,bs,m,n,0,nnz,A))
291 PetscPolymorphicSubroutine(MatCreateSeqSBAIJ,(PetscInt bs,PetscInt m,PetscInt n,Mat *A),(PETSC_COMM_SELF,bs,m,n,0,PETSC_NULL,A))
292 
293 extern PetscErrorCode  MatCreateMPISBAIJ(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt,const PetscInt[],PetscInt,const PetscInt[],Mat*);
294 PetscPolymorphicFunction(MatCreateMPISBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,const PetscInt nnz[],PetscInt onz,const PetscInt onnz[]),(comm,bs,m,n,M,N,nz,nnz,onz,onnz,&A),Mat,A)
295 PetscPolymorphicFunction(MatCreateMPISBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,PetscInt nnz),(comm,bs,m,n,M,N,nz,PETSC_NULL,nnz,PETSC_NULL,&A),Mat,A)
296 PetscPolymorphicFunction(MatCreateMPISBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,const PetscInt nnz[],const PetscInt onz[]),(comm,bs,m,n,M,N,0,nnz,0,onz,&A),Mat,A)
297 PetscPolymorphicFunction(MatCreateMPISBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N),(comm,bs,m,n,M,N,0,PETSC_NULL,0,PETSC_NULL,&A),Mat,A)
298 PetscPolymorphicSubroutine(MatCreateMPISBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,PetscInt nnz,Mat *A),(comm,bs,m,n,M,N,nz,PETSC_NULL,nnz,PETSC_NULL,A))
299 PetscPolymorphicSubroutine(MatCreateMPISBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,const PetscInt nnz[],const PetscInt onz[],Mat *A),(comm,bs,m,n,M,N,0,nnz,0,onz,A))
300 PetscPolymorphicSubroutine(MatCreateMPISBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,Mat *A),(comm,bs,m,n,M,N,0,PETSC_NULL,0,PETSC_NULL,A))
301 PetscPolymorphicFunction(MatCreateMPISBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,const PetscInt nnz[],PetscInt onz,const PetscInt onnz[]),(PETSC_COMM_WORLD,bs,m,n,M,N,nz,nnz,onz,onnz,&A),Mat,A)
302 PetscPolymorphicFunction(MatCreateMPISBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,PetscInt nnz),(PETSC_COMM_WORLD,bs,m,n,M,N,nz,PETSC_NULL,nnz,PETSC_NULL,&A),Mat,A)
303 PetscPolymorphicFunction(MatCreateMPISBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,const PetscInt nnz[],const PetscInt onz[]),(PETSC_COMM_WORLD,bs,m,n,M,N,0,nnz,0,onz,&A),Mat,A)
304 PetscPolymorphicFunction(MatCreateMPISBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N),(PETSC_COMM_WORLD,bs,m,n,M,N,0,PETSC_NULL,0,PETSC_NULL,&A),Mat,A)
305 PetscPolymorphicSubroutine(MatCreateMPISBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,PetscInt nnz,Mat *A),(PETSC_COMM_WORLD,bs,m,n,M,N,nz,PETSC_NULL,nnz,PETSC_NULL,A))
306 PetscPolymorphicSubroutine(MatCreateMPISBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,const PetscInt nnz[],const PetscInt onz[],Mat *A),(PETSC_COMM_WORLD,bs,m,n,M,N,0,nnz,0,onz,A))
307 PetscPolymorphicSubroutine(MatCreateMPISBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,Mat *A),(PETSC_COMM_WORLD,bs,m,n,M,N,0,PETSC_NULL,0,PETSC_NULL,A))
308 extern PetscErrorCode  MatCreateMPISBAIJWithArrays(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt,const PetscInt[],const PetscInt[],const PetscScalar[],Mat *);
309 extern PetscErrorCode  MatMPISBAIJSetPreallocationCSR(Mat,PetscInt,const PetscInt[],const PetscInt[],const PetscScalar[]);
310 
311 extern PetscErrorCode  MatCreateShell(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,void *,Mat*);
312 PetscPolymorphicFunction(MatCreateShell,(MPI_Comm comm,PetscInt m,PetscInt n,PetscInt M,PetscInt N,void *ctx),(comm,m,n,M,N,ctx,&A),Mat,A)
313 PetscPolymorphicFunction(MatCreateShell,(PetscInt m,PetscInt n,PetscInt M,PetscInt N,void *ctx),(PETSC_COMM_WORLD,m,n,M,N,ctx,&A),Mat,A)
314 extern PetscErrorCode  MatCreateAdic(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt,void (*)(void),Mat*);
315 extern PetscErrorCode  MatCreateNormal(Mat,Mat*);
316 PetscPolymorphicFunction(MatCreateNormal,(Mat mat),(mat,&A),Mat,A)
317 extern PetscErrorCode  MatCreateLRC(Mat,Mat,Mat,Mat*);
318 extern PetscErrorCode  MatCreateIS(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,ISLocalToGlobalMapping,Mat*);
319 extern PetscErrorCode  MatCreateSeqAIJCRL(MPI_Comm,PetscInt,PetscInt,PetscInt,const PetscInt[],Mat*);
320 extern PetscErrorCode  MatCreateMPIAIJCRL(MPI_Comm,PetscInt,PetscInt,PetscInt,const PetscInt[],PetscInt,const PetscInt[],Mat*);
321 extern PetscErrorCode  MatCreateScatter(MPI_Comm,VecScatter,Mat*);
322 extern PetscErrorCode  MatScatterSetVecScatter(Mat,VecScatter);
323 extern PetscErrorCode  MatScatterGetVecScatter(Mat,VecScatter*);
324 extern PetscErrorCode  MatCreateBlockMat(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt*,Mat*);
325 extern PetscErrorCode  MatCompositeAddMat(Mat,Mat);
326 extern PetscErrorCode  MatCompositeMerge(Mat);
327 extern PetscErrorCode  MatCreateComposite(MPI_Comm,PetscInt,const Mat*,Mat*);
328 typedef enum {MAT_COMPOSITE_ADDITIVE,MAT_COMPOSITE_MULTIPLICATIVE} MatCompositeType;
329 extern PetscErrorCode  MatCompositeSetType(Mat,MatCompositeType);
330 
331 extern PetscErrorCode  MatCreateSeqFFTW(MPI_Comm,PetscInt,const PetscInt[],Mat*);
332 extern PetscErrorCode  MatCreateSeqCUFFT(MPI_Comm,PetscInt,const PetscInt[],Mat*);
333 extern PetscErrorCode  MatCreateTranspose(Mat,Mat*);
334 extern PetscErrorCode  MatCreateSubMatrix(Mat,IS,IS,Mat*);
335 extern PetscErrorCode  MatSubMatrixUpdate(Mat,Mat,IS,IS);
336 extern PetscErrorCode  MatCreateLocalRef(Mat,IS,IS,Mat*);
337 
338 extern PetscErrorCode  MatCreatePython(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,const char[],Mat*);
339 extern PetscErrorCode  MatPythonSetType(Mat,const char[]);
340 
341 
342 extern PetscErrorCode  MatSetUp(Mat);
343 extern PetscErrorCode  MatDestroy(Mat);
344 
345 extern PetscErrorCode  MatConjugate(Mat);
346 extern PetscErrorCode  MatRealPart(Mat);
347 extern PetscErrorCode  MatImaginaryPart(Mat);
348 extern PetscErrorCode  MatGetDiagonalBlock(Mat,PetscBool *,MatReuse,Mat*);
349 extern PetscErrorCode  MatGetTrace(Mat,PetscScalar*);
350 
351 /* ------------------------------------------------------------*/
352 extern PetscErrorCode  MatSetValues(Mat,PetscInt,const PetscInt[],PetscInt,const PetscInt[],const PetscScalar[],InsertMode);
353 extern PetscErrorCode  MatSetValuesBlocked(Mat,PetscInt,const PetscInt[],PetscInt,const PetscInt[],const PetscScalar[],InsertMode);
354 extern PetscErrorCode  MatSetValuesRow(Mat,PetscInt,const PetscScalar[]);
355 extern PetscErrorCode  MatSetValuesRowLocal(Mat,PetscInt,const PetscScalar[]);
356 
357 /*S
358      MatStencil - Data structure (C struct) for storing information about a single row or
359         column of a matrix as index on an associated grid.
360 
361    Level: beginner
362 
363   Concepts: matrix; linear operator
364 
365 .seealso:  MatSetValuesStencil(), MatSetStencil(), MatSetValuesBlockStencil()
366 S*/
367 typedef struct {
368   PetscInt k,j,i,c;
369 } MatStencil;
370 
371 extern PetscErrorCode  MatSetValuesStencil(Mat,PetscInt,const MatStencil[],PetscInt,const MatStencil[],const PetscScalar[],InsertMode);
372 extern PetscErrorCode  MatSetValuesBlockedStencil(Mat,PetscInt,const MatStencil[],PetscInt,const MatStencil[],const PetscScalar[],InsertMode);
373 extern PetscErrorCode  MatSetStencil(Mat,PetscInt,const PetscInt[],const PetscInt[],PetscInt);
374 
375 extern PetscErrorCode  MatSetColoring(Mat,ISColoring);
376 extern PetscErrorCode  MatSetValuesAdic(Mat,void*);
377 extern PetscErrorCode  MatSetValuesAdifor(Mat,PetscInt,void*);
378 
379 /*E
380     MatAssemblyType - Indicates if the matrix is now to be used, or if you plan
381      to continue to add values to it
382 
383     Level: beginner
384 
385 .seealso: MatAssemblyBegin(), MatAssemblyEnd()
386 E*/
387 typedef enum {MAT_FLUSH_ASSEMBLY=1,MAT_FINAL_ASSEMBLY=0} MatAssemblyType;
388 extern PetscErrorCode  MatAssemblyBegin(Mat,MatAssemblyType);
389 extern PetscErrorCode  MatAssemblyEnd(Mat,MatAssemblyType);
390 extern PetscErrorCode  MatAssembled(Mat,PetscBool *);
391 
392 
393 
394 /*E
395     MatOption - Options that may be set for a matrix and its behavior or storage
396 
397     Level: beginner
398 
399    Any additions/changes here MUST also be made in include/finclude/petscmat.h
400 
401 .seealso: MatSetOption()
402 E*/
403 typedef enum {MAT_ROW_ORIENTED,MAT_NEW_NONZERO_LOCATIONS,
404               MAT_SYMMETRIC,
405               MAT_STRUCTURALLY_SYMMETRIC,
406               MAT_NEW_DIAGONALS,MAT_IGNORE_OFF_PROC_ENTRIES,
407               MAT_NEW_NONZERO_LOCATION_ERR,
408               MAT_NEW_NONZERO_ALLOCATION_ERR,MAT_USE_HASH_TABLE,
409               MAT_KEEP_NONZERO_PATTERN,MAT_IGNORE_ZERO_ENTRIES,
410               MAT_USE_INODES,
411               MAT_HERMITIAN,
412               MAT_SYMMETRY_ETERNAL,
413               MAT_CHECK_COMPRESSED_ROW,
414               MAT_IGNORE_LOWER_TRIANGULAR,MAT_ERROR_LOWER_TRIANGULAR,
415               MAT_GETROW_UPPERTRIANGULAR,MAT_UNUSED_NONZERO_LOCATION_ERR,
416               MAT_SPD,MAT_NO_OFF_PROC_ENTRIES,MAT_NO_OFF_PROC_ZERO_ROWS,
417               NUM_MAT_OPTIONS} MatOption;
418 extern const char *MatOptions[];
419 extern PetscErrorCode  MatSetOption(Mat,MatOption,PetscBool );
420 extern PetscErrorCode  MatGetType(Mat,const MatType*);
421 PetscPolymorphicFunction(MatGetType,(Mat mat),(mat,&t),const MatType,t)
422 
423 extern PetscErrorCode  MatGetValues(Mat,PetscInt,const PetscInt[],PetscInt,const PetscInt[],PetscScalar[]);
424 extern PetscErrorCode  MatGetRow(Mat,PetscInt,PetscInt *,const PetscInt *[],const PetscScalar*[]);
425 extern PetscErrorCode  MatRestoreRow(Mat,PetscInt,PetscInt *,const PetscInt *[],const PetscScalar*[]);
426 extern PetscErrorCode  MatGetRowUpperTriangular(Mat);
427 extern PetscErrorCode  MatRestoreRowUpperTriangular(Mat);
428 extern PetscErrorCode  MatGetColumn(Mat,PetscInt,PetscInt *,const PetscInt *[],const PetscScalar*[]);
429 extern PetscErrorCode  MatRestoreColumn(Mat,PetscInt,PetscInt *,const PetscInt *[],const PetscScalar*[]);
430 extern PetscErrorCode  MatGetColumnVector(Mat,Vec,PetscInt);
431 extern PetscErrorCode  MatGetArray(Mat,PetscScalar *[]);
432 PetscPolymorphicFunction(MatGetArray,(Mat mat),(mat,&a),PetscScalar*,a)
433 extern PetscErrorCode  MatRestoreArray(Mat,PetscScalar *[]);
434 extern PetscErrorCode  MatGetBlockSize(Mat,PetscInt *);
435 PetscPolymorphicFunction(MatGetBlockSize,(Mat mat),(mat,&a),PetscInt,a)
436 extern PetscErrorCode  MatSetBlockSize(Mat,PetscInt);
437 
438 
439 extern PetscErrorCode  MatMult(Mat,Vec,Vec);
440 extern PetscErrorCode  MatMultDiagonalBlock(Mat,Vec,Vec);
441 extern PetscErrorCode  MatMultAdd(Mat,Vec,Vec,Vec);
442 PetscPolymorphicSubroutine(MatMultAdd,(Mat A,Vec x,Vec y),(A,x,y,y))
443 extern PetscErrorCode  MatMultTranspose(Mat,Vec,Vec);
444 extern PetscErrorCode  MatMultHermitianTranspose(Mat,Vec,Vec);
445 extern PetscErrorCode  MatIsTranspose(Mat,Mat,PetscReal,PetscBool *);
446 PetscPolymorphicFunction(MatIsTranspose,(Mat A,Mat B,PetscReal tol),(A,B,tol,&t),PetscBool ,t)
447 PetscPolymorphicFunction(MatIsTranspose,(Mat A,Mat B),(A,B,0,&t),PetscBool ,t)
448 extern PetscErrorCode  MatIsHermitianTranspose(Mat,Mat,PetscReal,PetscBool *);
449 extern PetscErrorCode  MatMultTransposeAdd(Mat,Vec,Vec,Vec);
450 extern PetscErrorCode  MatMultHermitianTransposeAdd(Mat,Vec,Vec,Vec);
451 PetscPolymorphicSubroutine(MatMultTransposeAdd,(Mat A,Vec x,Vec y),(A,x,y,y))
452 extern PetscErrorCode  MatMultConstrained(Mat,Vec,Vec);
453 extern PetscErrorCode  MatMultTransposeConstrained(Mat,Vec,Vec);
454 extern PetscErrorCode  MatMatSolve(Mat,Mat,Mat);
455 
456 /*E
457     MatDuplicateOption - Indicates if a duplicated sparse matrix should have
458   its numerical values copied over or just its nonzero structure.
459 
460     Level: beginner
461 
462    Any additions/changes here MUST also be made in include/finclude/petscmat.h
463 
464 $   MAT_SHARE_NONZERO_PATTERN - the i and j arrays in the new matrix will be shared with the original matrix
465 $                               this also triggers the MAT_DO_NOT_COPY_VALUES option. This is used when you
466 $                               have several matrices with the same nonzero pattern.
467 
468 .seealso: MatDuplicate()
469 E*/
470 typedef enum {MAT_DO_NOT_COPY_VALUES,MAT_COPY_VALUES,MAT_SHARE_NONZERO_PATTERN} MatDuplicateOption;
471 
472 extern PetscErrorCode  MatConvert(Mat,const MatType,MatReuse,Mat*);
473 PetscPolymorphicFunction(MatConvert,(Mat A,const MatType t),(A,t,MAT_INITIAL_MATRIX,&a),Mat,a)
474 extern PetscErrorCode  MatDuplicate(Mat,MatDuplicateOption,Mat*);
475 PetscPolymorphicFunction(MatDuplicate,(Mat A,MatDuplicateOption o),(A,o,&a),Mat,a)
476 PetscPolymorphicFunction(MatDuplicate,(Mat A),(A,MAT_COPY_VALUES,&a),Mat,a)
477 
478 
479 extern PetscErrorCode  MatCopy(Mat,Mat,MatStructure);
480 extern PetscErrorCode  MatView(Mat,PetscViewer);
481 extern PetscErrorCode  MatIsSymmetric(Mat,PetscReal,PetscBool *);
482 PetscPolymorphicFunction(MatIsSymmetric,(Mat A,PetscReal tol),(A,tol,&t),PetscBool ,t)
483 PetscPolymorphicFunction(MatIsSymmetric,(Mat A),(A,0,&t),PetscBool ,t)
484 extern PetscErrorCode  MatIsStructurallySymmetric(Mat,PetscBool *);
485 PetscPolymorphicFunction(MatIsStructurallySymmetric,(Mat A),(A,&t),PetscBool ,t)
486 extern PetscErrorCode  MatIsHermitian(Mat,PetscReal,PetscBool *);
487 PetscPolymorphicFunction(MatIsHermitian,(Mat A),(A,0,&t),PetscBool ,t)
488 extern PetscErrorCode  MatIsSymmetricKnown(Mat,PetscBool *,PetscBool *);
489 extern PetscErrorCode  MatIsHermitianKnown(Mat,PetscBool *,PetscBool *);
490 extern PetscErrorCode  MatMissingDiagonal(Mat,PetscBool  *,PetscInt *);
491 extern PetscErrorCode  MatLoad(Mat, PetscViewer);
492 
493 extern PetscErrorCode  MatGetRowIJ(Mat,PetscInt,PetscBool ,PetscBool ,PetscInt*,PetscInt *[],PetscInt *[],PetscBool  *);
494 extern PetscErrorCode  MatRestoreRowIJ(Mat,PetscInt,PetscBool ,PetscBool ,PetscInt *,PetscInt *[],PetscInt *[],PetscBool  *);
495 extern PetscErrorCode  MatGetColumnIJ(Mat,PetscInt,PetscBool ,PetscBool ,PetscInt*,PetscInt *[],PetscInt *[],PetscBool  *);
496 extern PetscErrorCode  MatRestoreColumnIJ(Mat,PetscInt,PetscBool ,PetscBool ,PetscInt *,PetscInt *[],PetscInt *[],PetscBool  *);
497 
498 /*S
499      MatInfo - Context of matrix information, used with MatGetInfo()
500 
501    In Fortran this is simply a double precision array of dimension MAT_INFO_SIZE
502 
503    Level: intermediate
504 
505   Concepts: matrix^nonzero information
506 
507 .seealso:  MatGetInfo(), MatInfoType
508 S*/
509 typedef struct {
510   PetscLogDouble block_size;                         /* block size */
511   PetscLogDouble nz_allocated,nz_used,nz_unneeded;   /* number of nonzeros */
512   PetscLogDouble memory;                             /* memory allocated */
513   PetscLogDouble assemblies;                         /* number of matrix assemblies called */
514   PetscLogDouble mallocs;                            /* number of mallocs during MatSetValues() */
515   PetscLogDouble fill_ratio_given,fill_ratio_needed; /* fill ratio for LU/ILU */
516   PetscLogDouble factor_mallocs;                     /* number of mallocs during factorization */
517 } MatInfo;
518 
519 /*E
520     MatInfoType - Indicates if you want information about the local part of the matrix,
521      the entire parallel matrix or the maximum over all the local parts.
522 
523     Level: beginner
524 
525    Any additions/changes here MUST also be made in include/finclude/petscmat.h
526 
527 .seealso: MatGetInfo(), MatInfo
528 E*/
529 typedef enum {MAT_LOCAL=1,MAT_GLOBAL_MAX=2,MAT_GLOBAL_SUM=3} MatInfoType;
530 extern PetscErrorCode  MatGetInfo(Mat,MatInfoType,MatInfo*);
531 extern PetscErrorCode  MatGetDiagonal(Mat,Vec);
532 extern PetscErrorCode  MatGetRowMax(Mat,Vec,PetscInt[]);
533 extern PetscErrorCode  MatGetRowMin(Mat,Vec,PetscInt[]);
534 extern PetscErrorCode  MatGetRowMaxAbs(Mat,Vec,PetscInt[]);
535 extern PetscErrorCode  MatGetRowMinAbs(Mat,Vec,PetscInt[]);
536 extern PetscErrorCode  MatGetRowSum(Mat,Vec);
537 extern PetscErrorCode  MatTranspose(Mat,MatReuse,Mat*);
538 PetscPolymorphicFunction(MatTranspose,(Mat A),(A,MAT_INITIAL_MATRIX,&t),Mat,t)
539 extern PetscErrorCode  MatHermitianTranspose(Mat,MatReuse,Mat*);
540 extern PetscErrorCode  MatPermute(Mat,IS,IS,Mat *);
541 PetscPolymorphicFunction(MatPermute,(Mat A,IS is1,IS is2),(A,is1,is2,&t),Mat,t)
542 extern PetscErrorCode  MatDiagonalScale(Mat,Vec,Vec);
543 extern PetscErrorCode  MatDiagonalSet(Mat,Vec,InsertMode);
544 extern PetscErrorCode  MatEqual(Mat,Mat,PetscBool *);
545 PetscPolymorphicFunction(MatEqual,(Mat A,Mat B),(A,B,&t),PetscBool ,t)
546 extern PetscErrorCode  MatMultEqual(Mat,Mat,PetscInt,PetscBool *);
547 extern PetscErrorCode  MatMultAddEqual(Mat,Mat,PetscInt,PetscBool *);
548 extern PetscErrorCode  MatMultTransposeEqual(Mat,Mat,PetscInt,PetscBool *);
549 extern PetscErrorCode  MatMultTransposeAddEqual(Mat,Mat,PetscInt,PetscBool *);
550 
551 extern PetscErrorCode  MatNorm(Mat,NormType,PetscReal *);
552 PetscPolymorphicFunction(MatNorm,(Mat A,NormType t),(A,t,&n),PetscReal,n)
553 extern PetscErrorCode MatGetColumnNorms(Mat,NormType,PetscReal *);
554 extern PetscErrorCode  MatZeroEntries(Mat);
555 extern PetscErrorCode  MatZeroRows(Mat,PetscInt,const PetscInt [],PetscScalar,Vec,Vec);
556 extern PetscErrorCode  MatZeroRowsIS(Mat,IS,PetscScalar,Vec,Vec);
557 extern PetscErrorCode  MatZeroRowsStencil(Mat,PetscInt,const MatStencil [],PetscScalar,Vec,Vec);
558 extern PetscErrorCode  MatZeroRowsColumns(Mat,PetscInt,const PetscInt [],PetscScalar,Vec,Vec);
559 extern PetscErrorCode  MatZeroRowsColumnsIS(Mat,IS,PetscScalar,Vec,Vec);
560 
561 extern PetscErrorCode  MatUseScaledForm(Mat,PetscBool );
562 extern PetscErrorCode  MatScaleSystem(Mat,Vec,Vec);
563 extern PetscErrorCode  MatUnScaleSystem(Mat,Vec,Vec);
564 
565 extern PetscErrorCode  MatGetSize(Mat,PetscInt*,PetscInt*);
566 extern PetscErrorCode  MatGetLocalSize(Mat,PetscInt*,PetscInt*);
567 extern PetscErrorCode  MatGetOwnershipRange(Mat,PetscInt*,PetscInt*);
568 extern PetscErrorCode  MatGetOwnershipRanges(Mat,const PetscInt**);
569 extern PetscErrorCode  MatGetOwnershipRangeColumn(Mat,PetscInt*,PetscInt*);
570 extern PetscErrorCode  MatGetOwnershipRangesColumn(Mat,const PetscInt**);
571 
572 extern PetscErrorCode  MatGetSubMatrices(Mat,PetscInt,const IS[],const IS[],MatReuse,Mat *[]);
573 extern PetscErrorCode  MatGetSubMatricesParallel(Mat,PetscInt,const IS[],const IS[],MatReuse,Mat *[]);
574 extern PetscErrorCode  MatDestroyMatrices(PetscInt,Mat *[]);
575 extern PetscErrorCode  MatGetSubMatrix(Mat,IS,IS,MatReuse,Mat *);
576 extern PetscErrorCode  MatGetLocalSubMatrix(Mat,IS,IS,Mat*);
577 extern PetscErrorCode  MatRestoreLocalSubMatrix(Mat,IS,IS,Mat*);
578 extern PetscErrorCode  MatGetSeqNonzeroStructure(Mat,Mat*);
579 extern PetscErrorCode  MatDestroySeqNonzeroStructure(Mat*);
580 
581 extern PetscErrorCode  MatMerge(MPI_Comm,Mat,PetscInt,MatReuse,Mat*);
582 extern PetscErrorCode  MatMerge_SeqsToMPI(MPI_Comm,Mat,PetscInt,PetscInt,MatReuse,Mat*);
583 extern PetscErrorCode  MatMerge_SeqsToMPISymbolic(MPI_Comm,Mat,PetscInt,PetscInt,Mat*);
584 extern PetscErrorCode  MatMerge_SeqsToMPINumeric(Mat,Mat);
585 extern PetscErrorCode  MatDestroy_MPIAIJ_SeqsToMPI(Mat);
586 extern PetscErrorCode  MatGetLocalMat(Mat,MatReuse,Mat*);
587 extern PetscErrorCode  MatGetLocalMatCondensed(Mat,MatReuse,IS*,IS*,Mat*);
588 extern PetscErrorCode  MatGetBrowsOfAcols(Mat,Mat,MatReuse,IS*,IS*,PetscInt*,Mat*);
589 extern PetscErrorCode  MatGetBrowsOfAoCols(Mat,Mat,MatReuse,PetscInt**,PetscInt**,MatScalar**,Mat*);
590 #if defined (PETSC_USE_CTABLE)
591 #include "petscctable.h"
592 extern PetscErrorCode  MatGetCommunicationStructs(Mat, Vec *, PetscTable *, VecScatter *);
593 #else
594 extern PetscErrorCode  MatGetCommunicationStructs(Mat, Vec *, PetscInt *[], VecScatter *);
595 #endif
596 extern PetscErrorCode  MatGetGhosts(Mat, PetscInt *,const PetscInt *[]);
597 
598 extern PetscErrorCode  MatIncreaseOverlap(Mat,PetscInt,IS[],PetscInt);
599 
600 extern PetscErrorCode  MatMatMult(Mat,Mat,MatReuse,PetscReal,Mat*);
601 extern PetscErrorCode  MatMatMultSymbolic(Mat,Mat,PetscReal,Mat*);
602 extern PetscErrorCode  MatMatMultNumeric(Mat,Mat,Mat);
603 
604 extern PetscErrorCode  MatPtAP(Mat,Mat,MatReuse,PetscReal,Mat*);
605 extern PetscErrorCode  MatPtAPSymbolic(Mat,Mat,PetscReal,Mat*);
606 extern PetscErrorCode  MatPtAPNumeric(Mat,Mat,Mat);
607 
608 extern PetscErrorCode  MatMatMultTranspose(Mat,Mat,MatReuse,PetscReal,Mat*);
609 extern PetscErrorCode  MatMatMultTransposeSymbolic(Mat,Mat,PetscReal,Mat*);
610 extern PetscErrorCode  MatMatMultTransposeNumeric(Mat,Mat,Mat);
611 
612 extern PetscErrorCode  MatAXPY(Mat,PetscScalar,Mat,MatStructure);
613 extern PetscErrorCode  MatAYPX(Mat,PetscScalar,Mat,MatStructure);
614 
615 extern PetscErrorCode  MatScale(Mat,PetscScalar);
616 extern PetscErrorCode  MatShift(Mat,PetscScalar);
617 
618 extern PetscErrorCode  MatSetLocalToGlobalMapping(Mat,ISLocalToGlobalMapping,ISLocalToGlobalMapping);
619 extern PetscErrorCode  MatSetLocalToGlobalMappingBlock(Mat,ISLocalToGlobalMapping,ISLocalToGlobalMapping);
620 extern PetscErrorCode  MatGetLocalToGlobalMapping(Mat,ISLocalToGlobalMapping*,ISLocalToGlobalMapping*);
621 extern PetscErrorCode  MatGetLocalToGlobalMappingBlock(Mat,ISLocalToGlobalMapping*,ISLocalToGlobalMapping*);
622 extern PetscErrorCode  MatZeroRowsLocal(Mat,PetscInt,const PetscInt [],PetscScalar,Vec,Vec);
623 extern PetscErrorCode  MatZeroRowsLocalIS(Mat,IS,PetscScalar,Vec,Vec);
624 extern PetscErrorCode  MatZeroRowsColumnsLocal(Mat,PetscInt,const PetscInt [],PetscScalar,Vec,Vec);
625 extern PetscErrorCode  MatZeroRowsColumnsLocalIS(Mat,IS,PetscScalar,Vec,Vec);
626 extern PetscErrorCode  MatSetValuesLocal(Mat,PetscInt,const PetscInt[],PetscInt,const PetscInt[],const PetscScalar[],InsertMode);
627 extern PetscErrorCode  MatSetValuesBlockedLocal(Mat,PetscInt,const PetscInt[],PetscInt,const PetscInt[],const PetscScalar[],InsertMode);
628 
629 extern PetscErrorCode  MatStashSetInitialSize(Mat,PetscInt,PetscInt);
630 extern PetscErrorCode  MatStashGetInfo(Mat,PetscInt*,PetscInt*,PetscInt*,PetscInt*);
631 
632 extern PetscErrorCode  MatInterpolate(Mat,Vec,Vec);
633 extern PetscErrorCode  MatInterpolateAdd(Mat,Vec,Vec,Vec);
634 PetscPolymorphicSubroutine(MatInterpolateAdd,(Mat A,Vec x,Vec y),(A,x,y,y))
635 extern PetscErrorCode  MatRestrict(Mat,Vec,Vec);
636 extern PetscErrorCode  MatGetVecs(Mat,Vec*,Vec*);
637 extern PetscErrorCode  MatGetRedundantMatrix(Mat,PetscInt,MPI_Comm,PetscInt,MatReuse,Mat*);
638 extern PetscErrorCode  MatGetMultiProcBlock(Mat,MPI_Comm,Mat*);
639 extern PetscErrorCode  MatFindZeroDiagonals(Mat,IS*);
640 
641 /*MC
642    MatSetValue - Set a single entry into a matrix.
643 
644    Not collective
645 
646    Input Parameters:
647 +  m - the matrix
648 .  row - the row location of the entry
649 .  col - the column location of the entry
650 .  value - the value to insert
651 -  mode - either INSERT_VALUES or ADD_VALUES
652 
653    Notes:
654    For efficiency one should use MatSetValues() and set several or many
655    values simultaneously if possible.
656 
657    Level: beginner
658 
659 .seealso: MatSetValues(), MatSetValueLocal()
660 M*/
661 PETSC_STATIC_INLINE PetscErrorCode MatSetValue(Mat v,PetscInt i,PetscInt j,PetscScalar va,InsertMode mode) {return MatSetValues(v,1,&i,1,&j,&va,mode);}
662 
663 PETSC_STATIC_INLINE PetscErrorCode MatGetValue(Mat v,PetscInt i,PetscInt j,PetscScalar *va) {return MatGetValues(v,1,&i,1,&j,va);}
664 
665 PETSC_STATIC_INLINE PetscErrorCode MatSetValueLocal(Mat v,PetscInt i,PetscInt j,PetscScalar va,InsertMode mode) {return MatSetValuesLocal(v,1,&i,1,&j,&va,mode);}
666 
667 /*MC
668    MatPreallocateInitialize - Begins the block of code that will count the number of nonzeros per
669        row in a matrix providing the data that one can use to correctly preallocate the matrix.
670 
671    Synopsis:
672    PetscErrorCode MatPreallocateInitialize(MPI_Comm comm, PetscInt nrows, PetscInt ncols, PetscInt *dnz, PetscInt *onz)
673 
674    Collective on MPI_Comm
675 
676    Input Parameters:
677 +  comm - the communicator that will share the eventually allocated matrix
678 .  nrows - the number of LOCAL rows in the matrix
679 -  ncols - the number of LOCAL columns in the matrix
680 
681    Output Parameters:
682 +  dnz - the array that will be passed to the matrix preallocation routines
683 -  ozn - the other array passed to the matrix preallocation routines
684 
685 
686    Level: intermediate
687 
688    Notes:
689     See the <A href="../../docs/manual.pdf#nameddest=ch_performance">Hints for Performance Improvment</A> chapter in the users manual for more details.
690 
691    Do not malloc or free dnz and onz, that is handled internally by these routines
692 
693    Use MatPreallocateInitializeSymmetric() for symmetric matrices (MPISBAIJ matrices)
694 
695    This is a MACRO not a function because it has a leading { that is closed by PetscPreallocateFinalize().
696 
697   Concepts: preallocation^Matrix
698 
699 .seealso: MatPreallocateFinalize(), MatPreallocateSet(), MatPreallocateSymmetricSet(), MatPreallocateSetLocal(),
700           MatPreallocateInitializeSymmetric(), MatPreallocateSymmetricSetLocal()
701 M*/
702 #define MatPreallocateInitialize(comm,nrows,ncols,dnz,onz) 0; \
703 { \
704   PetscErrorCode _4_ierr; PetscInt __nrows = (nrows),__ctmp = (ncols),__rstart,__start,__end; \
705   _4_ierr = PetscMalloc2(__nrows,PetscInt,&dnz,__nrows,PetscInt,&onz);CHKERRQ(_4_ierr); \
706   _4_ierr = PetscMemzero(dnz,__nrows*sizeof(PetscInt));CHKERRQ(_4_ierr);\
707   _4_ierr = PetscMemzero(onz,__nrows*sizeof(PetscInt));CHKERRQ(_4_ierr);\
708   _4_ierr = MPI_Scan(&__ctmp,&__end,1,MPIU_INT,MPI_SUM,comm);CHKERRQ(_4_ierr); __start = __end - __ctmp;\
709   _4_ierr = MPI_Scan(&__nrows,&__rstart,1,MPIU_INT,MPI_SUM,comm);CHKERRQ(_4_ierr); __rstart = __rstart - __nrows;
710 
711 /*MC
712    MatPreallocateSymmetricInitialize - Begins the block of code that will count the number of nonzeros per
713        row in a matrix providing the data that one can use to correctly preallocate the matrix.
714 
715    Synopsis:
716    PetscErrorCode MatPreallocateSymmetricInitialize(MPI_Comm comm, PetscInt nrows, PetscInt ncols, PetscInt *dnz, PetscInt *onz)
717 
718    Collective on MPI_Comm
719 
720    Input Parameters:
721 +  comm - the communicator that will share the eventually allocated matrix
722 .  nrows - the number of LOCAL rows in the matrix
723 -  ncols - the number of LOCAL columns in the matrix
724 
725    Output Parameters:
726 +  dnz - the array that will be passed to the matrix preallocation routines
727 -  ozn - the other array passed to the matrix preallocation routines
728 
729 
730    Level: intermediate
731 
732    Notes:
733     See the <A href="../../docs/manual.pdf#nameddest=ch_performance">Hints for Performance Improvment</A> chapter in the users manual for more details.
734 
735    Do not malloc or free dnz and onz, that is handled internally by these routines
736 
737    This is a MACRO not a function because it has a leading { that is closed by PetscPreallocateFinalize().
738 
739   Concepts: preallocation^Matrix
740 
741 .seealso: MatPreallocateFinalize(), MatPreallocateSet(), MatPreallocateSymmetricSet(), MatPreallocateSetLocal(),
742           MatPreallocateInitialize(), MatPreallocateSymmetricSetLocal()
743 M*/
744 #define MatPreallocateSymmetricInitialize(comm,nrows,ncols,dnz,onz) 0; \
745 { \
746   PetscErrorCode _4_ierr; PetscInt __nrows = (nrows),__ctmp = (ncols),__rstart,__end; \
747   _4_ierr = PetscMalloc2(__nrows,PetscInt,&dnz,__nrows,PetscInt,&onz);CHKERRQ(_4_ierr); \
748   _4_ierr = PetscMemzero(dnz,__nrows*sizeof(PetscInt));CHKERRQ(_4_ierr);\
749   _4_ierr = PetscMemzero(onz,__nrows*sizeof(PetscInt));CHKERRQ(_4_ierr);\
750   _4_ierr = MPI_Scan(&__ctmp,&__end,1,MPIU_INT,MPI_SUM,comm);CHKERRQ(_4_ierr);\
751   _4_ierr = MPI_Scan(&__nrows,&__rstart,1,MPIU_INT,MPI_SUM,comm);CHKERRQ(_4_ierr); __rstart = __rstart - __nrows;
752 
753 /*MC
754    MatPreallocateSetLocal - Indicates the locations (rows and columns) in the matrix where nonzeros will be
755        inserted using a local number of the rows and columns
756 
757    Synopsis:
758    PetscErrorCode MatPreallocateSetLocal(ISLocalToGlobalMappping map,PetscInt nrows, PetscInt *rows,PetscInt ncols, PetscInt *cols,PetscInt *dnz, PetscInt *onz)
759 
760    Not Collective
761 
762    Input Parameters:
763 +  map - the row mapping from local numbering to global numbering
764 .  nrows - the number of rows indicated
765 .  rows - the indices of the rows
766 .  cmap - the column mapping from local to global numbering
767 .  ncols - the number of columns in the matrix
768 .  cols - the columns indicated
769 .  dnz - the array that will be passed to the matrix preallocation routines
770 -  ozn - the other array passed to the matrix preallocation routines
771 
772 
773    Level: intermediate
774 
775    Notes:
776     See the <A href="../../docs/manual.pdf#nameddest=ch_performance">Hints for Performance Improvment</A> chapter in the users manual for more details.
777 
778    Do not malloc or free dnz and onz, that is handled internally by these routines
779 
780   Concepts: preallocation^Matrix
781 
782 .seealso: MatPreallocateFinalize(), MatPreallocateSet(), MatPreallocateSymmetricSet(), MatPreallocateInitialize(),
783           MatPreallocateInitialize(), MatPreallocateSymmetricSetLocal()
784 M*/
785 #define MatPreallocateSetLocal(rmap,nrows,rows,cmap,ncols,cols,dnz,onz) 0; \
786 {\
787   PetscInt __l;\
788   _4_ierr = ISLocalToGlobalMappingApply(rmap,nrows,rows,rows);CHKERRQ(_4_ierr);\
789   _4_ierr = ISLocalToGlobalMappingApply(cmap,ncols,cols,cols);CHKERRQ(_4_ierr);\
790   for (__l=0;__l<nrows;__l++) {\
791     _4_ierr = MatPreallocateSet((rows)[__l],ncols,cols,dnz,onz);CHKERRQ(_4_ierr);\
792   }\
793 }
794 
795 /*MC
796    MatPreallocateSymmetricSetLocal - Indicates the locations (rows and columns) in the matrix where nonzeros will be
797        inserted using a local number of the rows and columns
798 
799    Synopsis:
800    PetscErrorCode MatPreallocateSymmetricSetLocal(ISLocalToGlobalMappping map,PetscInt nrows, PetscInt *rows,PetscInt ncols, PetscInt *cols,PetscInt *dnz, PetscInt *onz)
801 
802    Not Collective
803 
804    Input Parameters:
805 +  map - the mapping between local numbering and global numbering
806 .  nrows - the number of rows indicated
807 .  rows - the indices of the rows
808 .  ncols - the number of columns in the matrix
809 .  cols - the columns indicated
810 .  dnz - the array that will be passed to the matrix preallocation routines
811 -  ozn - the other array passed to the matrix preallocation routines
812 
813 
814    Level: intermediate
815 
816    Notes:
817     See the <A href="../../docs/manual.pdf#nameddest=ch_performance">Hints for Performance Improvment</A> chapter in the users manual for more details.
818 
819    Do not malloc or free dnz and onz that is handled internally by these routines
820 
821   Concepts: preallocation^Matrix
822 
823 .seealso: MatPreallocateFinalize(), MatPreallocateSet(), MatPreallocateSymmetricSet(), MatPreallocateInitialize(),
824           MatPreallocateInitialize(), MatPreallocateSymmetricSetLocal(), MatPreallocateSetLocal()
825 M*/
826 #define MatPreallocateSymmetricSetLocal(map,nrows,rows,ncols,cols,dnz,onz) 0;\
827 {\
828   PetscInt __l;\
829   _4_ierr = ISLocalToGlobalMappingApply(map,nrows,rows,rows);CHKERRQ(_4_ierr);\
830   _4_ierr = ISLocalToGlobalMappingApply(map,ncols,cols,cols);CHKERRQ(_4_ierr);\
831   for (__l=0;__l<nrows;__l++) {\
832     _4_ierr = MatPreallocateSymmetricSet((rows)[__l],ncols,cols,dnz,onz);CHKERRQ(_4_ierr);\
833   }\
834 }
835 
836 /*MC
837    MatPreallocateSet - Indicates the locations (rows and columns) in the matrix where nonzeros will be
838        inserted using a local number of the rows and columns
839 
840    Synopsis:
841    PetscErrorCode MatPreallocateSet(PetscInt nrows, PetscInt *rows,PetscInt ncols, PetscInt *cols,PetscInt *dnz, PetscInt *onz)
842 
843    Not Collective
844 
845    Input Parameters:
846 +  row - the row
847 .  ncols - the number of columns in the matrix
848 -  cols - the columns indicated
849 
850    Output Parameters:
851 +  dnz - the array that will be passed to the matrix preallocation routines
852 -  ozn - the other array passed to the matrix preallocation routines
853 
854 
855    Level: intermediate
856 
857    Notes:
858     See the <A href="../../docs/manual.pdf#nameddest=ch_performance">Hints for Performance Improvment</A> chapter in the users manual for more details.
859 
860    Do not malloc or free dnz and onz that is handled internally by these routines
861 
862    This is a MACRO not a function because it uses variables declared in MatPreallocateInitialize().
863 
864   Concepts: preallocation^Matrix
865 
866 .seealso: MatPreallocateFinalize(), MatPreallocateSet(), MatPreallocateSymmetricSet(), MatPreallocateInitialize(),
867           MatPreallocateInitialize(), MatPreallocateSymmetricSetLocal(), MatPreallocateSetLocal()
868 M*/
869 #define MatPreallocateSet(row,nc,cols,dnz,onz) 0;\
870 { PetscInt __i; \
871   if (row < __rstart) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Trying to set preallocation for row %D less than first local row %D",row,__rstart);\
872   if (row >= __rstart+__nrows) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Trying to set preallocation for row %D greater than last local row %D",row,__rstart+__nrows-1);\
873   for (__i=0; __i<nc; __i++) {\
874     if ((cols)[__i] < __start || (cols)[__i] >= __end) onz[row - __rstart]++; \
875     else dnz[row - __rstart]++;\
876   }\
877 }
878 
879 /*MC
880    MatPreallocateSymmetricSet - Indicates the locations (rows and columns) in the matrix where nonzeros will be
881        inserted using a local number of the rows and columns
882 
883    Synopsis:
884    PetscErrorCode MatPreallocateSymmetricSet(PetscInt nrows, PetscInt *rows,PetscInt ncols, PetscInt *cols,PetscInt *dnz, PetscInt *onz)
885 
886    Not Collective
887 
888    Input Parameters:
889 +  nrows - the number of rows indicated
890 .  rows - the indices of the rows
891 .  ncols - the number of columns in the matrix
892 .  cols - the columns indicated
893 .  dnz - the array that will be passed to the matrix preallocation routines
894 -  ozn - the other array passed to the matrix preallocation routines
895 
896 
897    Level: intermediate
898 
899    Notes:
900     See the <A href="../../docs/manual.pdf#nameddest=ch_performance">Hints for Performance Improvment</A> chapter in the users manual for more details.
901 
902    Do not malloc or free dnz and onz that is handled internally by these routines
903 
904    This is a MACRO not a function because it uses variables declared in MatPreallocateInitialize().
905 
906   Concepts: preallocation^Matrix
907 
908 .seealso: MatPreallocateFinalize(), MatPreallocateSet(), MatPreallocateSymmetricSet(), MatPreallocateInitialize(),
909           MatPreallocateInitialize(), MatPreallocateSymmetricSetLocal(), MatPreallocateSetLocal()
910 M*/
911 #define MatPreallocateSymmetricSet(row,nc,cols,dnz,onz) 0;\
912 { PetscInt __i; \
913   for (__i=0; __i<nc; __i++) {\
914     if (cols[__i] >= __end) onz[row - __rstart]++; \
915     else if (cols[__i] >= row) dnz[row - __rstart]++;\
916   }\
917 }
918 
919 /*MC
920    MatPreallocateLocation -  An alternative to MatPreallocationSet() that puts the nonzero locations into the matrix if it exists
921 
922    Synopsis:
923    PetscErrorCode MatPreallocateLocations(Mat A,PetscInt row,PetscInt ncols,PetscInt *cols,PetscInt *dnz,PetscInt *onz)
924 
925    Not Collective
926 
927    Input Parameters:
928 .  A - matrix
929 .  row - row where values exist (must be local to this process)
930 .  ncols - number of columns
931 .  cols - columns with nonzeros
932 .  dnz - the array that will be passed to the matrix preallocation routines
933 -  ozn - the other array passed to the matrix preallocation routines
934 
935 
936    Level: intermediate
937 
938    Notes:
939     See the <A href="../../docs/manual.pdf#nameddest=ch_performance">Hints for Performance Improvment</A> chapter in the users manual for more details.
940 
941    Do not malloc or free dnz and onz that is handled internally by these routines
942 
943    This is a MACRO not a function because it uses a bunch of variables private to the MatPreallocation.... routines.
944 
945   Concepts: preallocation^Matrix
946 
947 .seealso: MatPreallocateInitialize(), MatPreallocateSet(), MatPreallocateSymmetricSet(), MatPreallocateSetLocal(),
948           MatPreallocateSymmetricInitialize(), MatPreallocateSymmetricSetLocal()
949 M*/
950 #define MatPreallocateLocation(A,row,ncols,cols,dnz,onz) 0;if (A) {ierr = MatSetValues(A,1,&row,ncols,cols,PETSC_NULL,INSERT_VALUES);CHKERRQ(ierr);} else {ierr =  MatPreallocateSet(row,ncols,cols,dnz,onz);CHKERRQ(ierr);}
951 
952 
953 /*MC
954    MatPreallocateFinalize - Ends the block of code that will count the number of nonzeros per
955        row in a matrix providing the data that one can use to correctly preallocate the matrix.
956 
957    Synopsis:
958    PetscErrorCode MatPreallocateFinalize(PetscInt *dnz, PetscInt *onz)
959 
960    Collective on MPI_Comm
961 
962    Input Parameters:
963 +  dnz - the array that was be passed to the matrix preallocation routines
964 -  ozn - the other array passed to the matrix preallocation routines
965 
966 
967    Level: intermediate
968 
969    Notes:
970     See the <A href="../../docs/manual.pdf#nameddest=ch_performance">Hints for Performance Improvment</A> chapter in the users manual for more details.
971 
972    Do not malloc or free dnz and onz that is handled internally by these routines
973 
974    This is a MACRO not a function because it closes the { started in MatPreallocateInitialize().
975 
976   Concepts: preallocation^Matrix
977 
978 .seealso: MatPreallocateInitialize(), MatPreallocateSet(), MatPreallocateSymmetricSet(), MatPreallocateSetLocal(),
979           MatPreallocateSymmetricInitialize(), MatPreallocateSymmetricSetLocal()
980 M*/
981 #define MatPreallocateFinalize(dnz,onz) 0;_4_ierr = PetscFree2(dnz,onz);CHKERRQ(_4_ierr);}
982 
983 
984 
985 /* Routines unique to particular data structures */
986 extern PetscErrorCode  MatShellGetContext(Mat,void **);
987 PetscPolymorphicFunction(MatShellGetContext,(Mat A),(A,&t),void*,t)
988 
989 extern PetscErrorCode  MatInodeAdjustForInodes(Mat,IS*,IS*);
990 extern PetscErrorCode  MatInodeGetInodeSizes(Mat,PetscInt *,PetscInt *[],PetscInt *);
991 
992 extern PetscErrorCode  MatSeqAIJSetColumnIndices(Mat,PetscInt[]);
993 extern PetscErrorCode  MatSeqBAIJSetColumnIndices(Mat,PetscInt[]);
994 extern PetscErrorCode  MatCreateSeqAIJWithArrays(MPI_Comm,PetscInt,PetscInt,PetscInt[],PetscInt[],PetscScalar[],Mat*);
995 extern PetscErrorCode  MatCreateSeqBAIJWithArrays(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt[],PetscInt[],PetscScalar[],Mat*);
996 extern PetscErrorCode  MatCreateSeqSBAIJWithArrays(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt[],PetscInt[],PetscScalar[],Mat*);
997 
998 #define MAT_SKIP_ALLOCATION -4
999 
1000 extern PetscErrorCode  MatSeqBAIJSetPreallocation(Mat,PetscInt,PetscInt,const PetscInt[]);
1001 PetscPolymorphicSubroutine(MatSeqBAIJSetPreallocation,(Mat A,PetscInt bs,const PetscInt nnz[]),(A,bs,0,nnz))
1002 extern PetscErrorCode  MatSeqSBAIJSetPreallocation(Mat,PetscInt,PetscInt,const PetscInt[]);
1003 PetscPolymorphicSubroutine(MatSeqSBAIJSetPreallocation,(Mat A,PetscInt bs,const PetscInt nnz[]),(A,bs,0,nnz))
1004 extern PetscErrorCode  MatSeqAIJSetPreallocation(Mat,PetscInt,const PetscInt[]);
1005 PetscPolymorphicSubroutine(MatSeqAIJSetPreallocation,(Mat A,const PetscInt nnz[]),(A,0,nnz))
1006 
1007 extern PetscErrorCode  MatMPIBAIJSetPreallocation(Mat,PetscInt,PetscInt,const PetscInt[],PetscInt,const PetscInt[]);
1008 PetscPolymorphicSubroutine(MatMPIBAIJSetPreallocation,(Mat A,PetscInt bs,const PetscInt nnz[],const PetscInt onz[]),(A,bs,0,nnz,0,onz))
1009 extern PetscErrorCode  MatMPISBAIJSetPreallocation(Mat,PetscInt,PetscInt,const PetscInt[],PetscInt,const PetscInt[]);
1010 extern PetscErrorCode  MatMPIAIJSetPreallocation(Mat,PetscInt,const PetscInt[],PetscInt,const PetscInt[]);
1011 extern PetscErrorCode  MatSeqAIJSetPreallocationCSR(Mat,const PetscInt [],const PetscInt [],const PetscScalar []);
1012 extern PetscErrorCode  MatSeqBAIJSetPreallocationCSR(Mat,PetscInt,const PetscInt[],const PetscInt[],const PetscScalar[]);
1013 extern PetscErrorCode  MatMPIAIJSetPreallocationCSR(Mat,const PetscInt[],const PetscInt[],const PetscScalar[]);
1014 extern PetscErrorCode  MatMPIBAIJSetPreallocationCSR(Mat,PetscInt,const PetscInt[],const PetscInt[],const PetscScalar[]);
1015 extern PetscErrorCode  MatMPIAdjSetPreallocation(Mat,PetscInt[],PetscInt[],PetscInt[]);
1016 extern PetscErrorCode  MatMPIDenseSetPreallocation(Mat,PetscScalar[]);
1017 extern PetscErrorCode  MatSeqDenseSetPreallocation(Mat,PetscScalar[]);
1018 extern PetscErrorCode  MatMPIAIJGetSeqAIJ(Mat,Mat*,Mat*,PetscInt*[]);
1019 extern PetscErrorCode  MatMPIBAIJGetSeqBAIJ(Mat,Mat*,Mat*,PetscInt*[]);
1020 extern PetscErrorCode  MatAdicSetLocalFunction(Mat,void (*)(void));
1021 
1022 extern PetscErrorCode  MatSeqDenseSetLDA(Mat,PetscInt);
1023 extern PetscErrorCode  MatDenseGetLocalMatrix(Mat,Mat*);
1024 
1025 extern PetscErrorCode  MatStoreValues(Mat);
1026 extern PetscErrorCode  MatRetrieveValues(Mat);
1027 
1028 extern PetscErrorCode  MatDAADSetCtx(Mat,void*);
1029 
1030 extern PetscErrorCode  MatFindNonzeroRows(Mat,IS*);
1031 /*
1032   These routines are not usually accessed directly, rather solving is
1033   done through the KSP and PC interfaces.
1034 */
1035 
1036 /*E
1037     MatOrderingType - String with the name of a PETSc matrix ordering or the creation function
1038        with an optional dynamic library name, for example
1039        http://www.mcs.anl.gov/petsc/lib.a:orderingcreate()
1040 
1041    Level: beginner
1042 
1043    Cannot use const because the PC objects manipulate the string
1044 
1045 .seealso: MatGetOrdering()
1046 E*/
1047 #define MatOrderingType char*
1048 #define MATORDERINGNATURAL     "natural"
1049 #define MATORDERINGND          "nd"
1050 #define MATORDERING1WD         "1wd"
1051 #define MATORDERINGRCM         "rcm"
1052 #define MATORDERINGQMD         "qmd"
1053 #define MATORDERINGROWLENGTH   "rowlength"
1054 #define MATORDERINGDSC_ND      "dsc_nd"         /* these three are only for DSCPACK, see its documentation for details */
1055 #define MATORDERINGDSC_MMD     "dsc_mmd"
1056 #define MATORDERINGDSC_MDF     "dsc_mdf"
1057 #define MATORDERINGAMD         "amd"            /* only works if UMFPACK is installed with PETSc */
1058 
1059 extern PetscErrorCode  MatGetOrdering(Mat,const MatOrderingType,IS*,IS*);
1060 extern PetscErrorCode  MatGetOrderingList(PetscFList *list);
1061 extern PetscErrorCode  MatOrderingRegister(const char[],const char[],const char[],PetscErrorCode(*)(Mat,const MatOrderingType,IS*,IS*));
1062 
1063 /*MC
1064    MatOrderingRegisterDynamic - Adds a new sparse matrix ordering to the matrix package.
1065 
1066    Synopsis:
1067    PetscErrorCode MatOrderingRegisterDynamic(const char *name_ordering,const char *path,const char *name_create,PetscErrorCode (*routine_create)(MatOrdering))
1068 
1069    Not Collective
1070 
1071    Input Parameters:
1072 +  sname - name of ordering (for example MATORDERINGND)
1073 .  path - location of library where creation routine is
1074 .  name - name of function that creates the ordering type,a string
1075 -  function - function pointer that creates the ordering
1076 
1077    Level: developer
1078 
1079    If dynamic libraries are used, then the fourth input argument (function)
1080    is ignored.
1081 
1082    Sample usage:
1083 .vb
1084    MatOrderingRegisterDynamic("my_order",/home/username/my_lib/lib/libO/solaris/mylib.a,
1085                "MyOrder",MyOrder);
1086 .ve
1087 
1088    Then, your partitioner can be chosen with the procedural interface via
1089 $     MatOrderingSetType(part,"my_order)
1090    or at runtime via the option
1091 $     -pc_factor_mat_ordering_type my_order
1092 
1093    ${PETSC_ARCH} occuring in pathname will be replaced with appropriate values.
1094 
1095 .keywords: matrix, ordering, register
1096 
1097 .seealso: MatOrderingRegisterDestroy(), MatOrderingRegisterAll()
1098 M*/
1099 #if defined(PETSC_USE_DYNAMIC_LIBRARIES)
1100 #define MatOrderingRegisterDynamic(a,b,c,d) MatOrderingRegister(a,b,c,0)
1101 #else
1102 #define MatOrderingRegisterDynamic(a,b,c,d) MatOrderingRegister(a,b,c,d)
1103 #endif
1104 
1105 extern PetscErrorCode  MatOrderingRegisterDestroy(void);
1106 extern PetscErrorCode  MatOrderingRegisterAll(const char[]);
1107 extern PetscBool  MatOrderingRegisterAllCalled;
1108 extern PetscFList MatOrderingList;
1109 
1110 extern PetscErrorCode  MatReorderForNonzeroDiagonal(Mat,PetscReal,IS,IS);
1111 
1112 /*S
1113     MatFactorShiftType - Numeric Shift.
1114 
1115    Level: beginner
1116 
1117 S*/
1118 typedef enum {MAT_SHIFT_NONE,MAT_SHIFT_NONZERO,MAT_SHIFT_POSITIVE_DEFINITE,MAT_SHIFT_INBLOCKS} MatFactorShiftType;
1119 extern const char *MatFactorShiftTypes[];
1120 
1121 /*S
1122    MatFactorInfo - Data passed into the matrix factorization routines
1123 
1124    In Fortran these are simply double precision arrays of size MAT_FACTORINFO_SIZE, that is use
1125 $     MatFactorInfo  info(MAT_FACTORINFO_SIZE)
1126 
1127    Notes: These are not usually directly used by users, instead use PC type of LU, ILU, CHOLESKY or ICC.
1128 
1129       You can use MatFactorInfoInitialize() to set default values.
1130 
1131    Level: developer
1132 
1133 .seealso: MatLUFactorSymbolic(), MatILUFactorSymbolic(), MatCholeskyFactorSymbolic(), MatICCFactorSymbolic(), MatICCFactor(),
1134           MatFactorInfoInitialize()
1135 
1136 S*/
1137 typedef struct {
1138   PetscReal     diagonal_fill;  /* force diagonal to fill in if initially not filled */
1139   PetscReal     usedt;
1140   PetscReal     dt;             /* drop tolerance */
1141   PetscReal     dtcol;          /* tolerance for pivoting */
1142   PetscReal     dtcount;        /* maximum nonzeros to be allowed per row */
1143   PetscReal     fill;           /* expected fill, nonzeros in factored matrix/nonzeros in original matrix */
1144   PetscReal     levels;         /* ICC/ILU(levels) */
1145   PetscReal     pivotinblocks;  /* for BAIJ and SBAIJ matrices pivot in factorization on blocks, default 1.0
1146                                    factorization may be faster if do not pivot */
1147   PetscReal     zeropivot;      /* pivot is called zero if less than this */
1148   PetscReal     shifttype;      /* type of shift added to matrix factor to prevent zero pivots */
1149   PetscReal     shiftamount;     /* how large the shift is */
1150 } MatFactorInfo;
1151 
1152 extern PetscErrorCode  MatFactorInfoInitialize(MatFactorInfo*);
1153 extern PetscErrorCode  MatCholeskyFactor(Mat,IS,const MatFactorInfo*);
1154 extern PetscErrorCode  MatCholeskyFactorSymbolic(Mat,Mat,IS,const MatFactorInfo*);
1155 extern PetscErrorCode  MatCholeskyFactorNumeric(Mat,Mat,const MatFactorInfo*);
1156 extern PetscErrorCode  MatLUFactor(Mat,IS,IS,const MatFactorInfo*);
1157 extern PetscErrorCode  MatILUFactor(Mat,IS,IS,const MatFactorInfo*);
1158 extern PetscErrorCode  MatLUFactorSymbolic(Mat,Mat,IS,IS,const MatFactorInfo*);
1159 extern PetscErrorCode  MatILUFactorSymbolic(Mat,Mat,IS,IS,const MatFactorInfo*);
1160 extern PetscErrorCode  MatICCFactorSymbolic(Mat,Mat,IS,const MatFactorInfo*);
1161 extern PetscErrorCode  MatICCFactor(Mat,IS,const MatFactorInfo*);
1162 extern PetscErrorCode  MatLUFactorNumeric(Mat,Mat,const MatFactorInfo*);
1163 extern PetscErrorCode  MatGetInertia(Mat,PetscInt*,PetscInt*,PetscInt*);
1164 extern PetscErrorCode  MatSolve(Mat,Vec,Vec);
1165 extern PetscErrorCode  MatForwardSolve(Mat,Vec,Vec);
1166 extern PetscErrorCode  MatBackwardSolve(Mat,Vec,Vec);
1167 extern PetscErrorCode  MatSolveAdd(Mat,Vec,Vec,Vec);
1168 extern PetscErrorCode  MatSolveTranspose(Mat,Vec,Vec);
1169 extern PetscErrorCode  MatSolveTransposeAdd(Mat,Vec,Vec,Vec);
1170 extern PetscErrorCode  MatSolves(Mat,Vecs,Vecs);
1171 
1172 extern PetscErrorCode  MatSetUnfactored(Mat);
1173 
1174 /*E
1175     MatSORType - What type of (S)SOR to perform
1176 
1177     Level: beginner
1178 
1179    May be bitwise ORd together
1180 
1181    Any additions/changes here MUST also be made in include/finclude/petscmat.h
1182 
1183    MatSORType may be bitwise ORd together, so do not change the numbers
1184 
1185 .seealso: MatSOR()
1186 E*/
1187 typedef enum {SOR_FORWARD_SWEEP=1,SOR_BACKWARD_SWEEP=2,SOR_SYMMETRIC_SWEEP=3,
1188               SOR_LOCAL_FORWARD_SWEEP=4,SOR_LOCAL_BACKWARD_SWEEP=8,
1189               SOR_LOCAL_SYMMETRIC_SWEEP=12,SOR_ZERO_INITIAL_GUESS=16,
1190               SOR_EISENSTAT=32,SOR_APPLY_UPPER=64,SOR_APPLY_LOWER=128} MatSORType;
1191 extern PetscErrorCode  MatSOR(Mat,Vec,PetscReal,MatSORType,PetscReal,PetscInt,PetscInt,Vec);
1192 
1193 /*
1194     These routines are for efficiently computing Jacobians via finite differences.
1195 */
1196 
1197 /*E
1198     MatColoringType - String with the name of a PETSc matrix coloring or the creation function
1199        with an optional dynamic library name, for example
1200        http://www.mcs.anl.gov/petsc/lib.a:coloringcreate()
1201 
1202    Level: beginner
1203 
1204 .seealso: MatGetColoring()
1205 E*/
1206 #define MatColoringType char*
1207 #define MATCOLORINGNATURAL "natural"
1208 #define MATCOLORINGSL      "sl"
1209 #define MATCOLORINGLF      "lf"
1210 #define MATCOLORINGID      "id"
1211 
1212 extern PetscErrorCode  MatGetColoring(Mat,const MatColoringType,ISColoring*);
1213 extern PetscErrorCode  MatColoringRegister(const char[],const char[],const char[],PetscErrorCode(*)(Mat,MatColoringType,ISColoring *));
1214 
1215 /*MC
1216    MatColoringRegisterDynamic - Adds a new sparse matrix coloring to the
1217                                matrix package.
1218 
1219    Synopsis:
1220    PetscErrorCode MatColoringRegisterDynamic(const char *name_coloring,const char *path,const char *name_create,PetscErrorCode (*routine_create)(MatColoring))
1221 
1222    Not Collective
1223 
1224    Input Parameters:
1225 +  sname - name of Coloring (for example MATCOLORINGSL)
1226 .  path - location of library where creation routine is
1227 .  name - name of function that creates the Coloring type, a string
1228 -  function - function pointer that creates the coloring
1229 
1230    Level: developer
1231 
1232    If dynamic libraries are used, then the fourth input argument (function)
1233    is ignored.
1234 
1235    Sample usage:
1236 .vb
1237    MatColoringRegisterDynamic("my_color",/home/username/my_lib/lib/libO/solaris/mylib.a,
1238                "MyColor",MyColor);
1239 .ve
1240 
1241    Then, your partitioner can be chosen with the procedural interface via
1242 $     MatColoringSetType(part,"my_color")
1243    or at runtime via the option
1244 $     -mat_coloring_type my_color
1245 
1246    $PETSC_ARCH occuring in pathname will be replaced with appropriate values.
1247 
1248 .keywords: matrix, Coloring, register
1249 
1250 .seealso: MatColoringRegisterDestroy(), MatColoringRegisterAll()
1251 M*/
1252 #if defined(PETSC_USE_DYNAMIC_LIBRARIES)
1253 #define MatColoringRegisterDynamic(a,b,c,d) MatColoringRegister(a,b,c,0)
1254 #else
1255 #define MatColoringRegisterDynamic(a,b,c,d) MatColoringRegister(a,b,c,d)
1256 #endif
1257 
1258 extern PetscBool  MatColoringRegisterAllCalled;
1259 
1260 extern PetscErrorCode  MatColoringRegisterAll(const char[]);
1261 extern PetscErrorCode  MatColoringRegisterDestroy(void);
1262 extern PetscErrorCode  MatColoringPatch(Mat,PetscInt,PetscInt,ISColoringValue[],ISColoring*);
1263 
1264 /*S
1265      MatFDColoring - Object for computing a sparse Jacobian via finite differences
1266         and coloring
1267 
1268    Level: beginner
1269 
1270   Concepts: coloring, sparse Jacobian, finite differences
1271 
1272 .seealso:  MatFDColoringCreate()
1273 S*/
1274 typedef struct _p_MatFDColoring* MatFDColoring;
1275 
1276 extern PetscErrorCode  MatFDColoringCreate(Mat,ISColoring,MatFDColoring *);
1277 extern PetscErrorCode  MatFDColoringDestroy(MatFDColoring);
1278 extern PetscErrorCode  MatFDColoringView(MatFDColoring,PetscViewer);
1279 extern PetscErrorCode  MatFDColoringSetFunction(MatFDColoring,PetscErrorCode (*)(void),void*);
1280 extern PetscErrorCode  MatFDColoringGetFunction(MatFDColoring,PetscErrorCode (**)(void),void**);
1281 extern PetscErrorCode  MatFDColoringSetParameters(MatFDColoring,PetscReal,PetscReal);
1282 extern PetscErrorCode  MatFDColoringSetFromOptions(MatFDColoring);
1283 extern PetscErrorCode  MatFDColoringApply(Mat,MatFDColoring,Vec,MatStructure*,void *);
1284 extern PetscErrorCode  MatFDColoringApplyTS(Mat,MatFDColoring,PetscReal,Vec,MatStructure*,void *);
1285 extern PetscErrorCode  MatFDColoringSetF(MatFDColoring,Vec);
1286 extern PetscErrorCode  MatFDColoringGetPerturbedColumns(MatFDColoring,PetscInt*,PetscInt*[]);
1287 /*
1288     These routines are for partitioning matrices: currently used only
1289   for adjacency matrix, MatCreateMPIAdj().
1290 */
1291 
1292 /*S
1293      MatPartitioning - Object for managing the partitioning of a matrix or graph
1294 
1295    Level: beginner
1296 
1297   Concepts: partitioning
1298 
1299 .seealso:  MatPartitioningCreate(), MatPartitioningType
1300 S*/
1301 typedef struct _p_MatPartitioning* MatPartitioning;
1302 
1303 /*E
1304     MatPartitioningType - String with the name of a PETSc matrix partitioning or the creation function
1305        with an optional dynamic library name, for example
1306        http://www.mcs.anl.gov/petsc/lib.a:partitioningcreate()
1307 
1308    Level: beginner
1309 
1310 .seealso: MatPartitioningCreate(), MatPartitioning
1311 E*/
1312 #define MatPartitioningType char*
1313 #define MATPARTITIONINGCURRENT  "current"
1314 #define MATPARTITIONINGSQUARE   "square"
1315 #define MATPARTITIONINGPARMETIS "parmetis"
1316 #define MATPARTITIONINGCHACO    "chaco"
1317 #define MATPARTITIONINGJOSTLE   "jostle"
1318 #define MATPARTITIONINGPARTY    "party"
1319 #define MATPARTITIONINGSCOTCH   "scotch"
1320 
1321 
1322 extern PetscErrorCode  MatPartitioningCreate(MPI_Comm,MatPartitioning*);
1323 extern PetscErrorCode  MatPartitioningSetType(MatPartitioning,const MatPartitioningType);
1324 extern PetscErrorCode  MatPartitioningSetNParts(MatPartitioning,PetscInt);
1325 extern PetscErrorCode  MatPartitioningSetAdjacency(MatPartitioning,Mat);
1326 extern PetscErrorCode  MatPartitioningSetVertexWeights(MatPartitioning,const PetscInt[]);
1327 extern PetscErrorCode  MatPartitioningSetPartitionWeights(MatPartitioning,const PetscReal []);
1328 extern PetscErrorCode  MatPartitioningApply(MatPartitioning,IS*);
1329 extern PetscErrorCode  MatPartitioningDestroy(MatPartitioning);
1330 
1331 extern PetscErrorCode  MatPartitioningRegister(const char[],const char[],const char[],PetscErrorCode (*)(MatPartitioning));
1332 
1333 /*MC
1334    MatPartitioningRegisterDynamic - Adds a new sparse matrix partitioning to the
1335    matrix package.
1336 
1337    Synopsis:
1338    PetscErrorCode MatPartitioningRegisterDynamic(const char *name_partitioning,const char *path,const char *name_create,PetscErrorCode (*routine_create)(MatPartitioning))
1339 
1340    Not Collective
1341 
1342    Input Parameters:
1343 +  sname - name of partitioning (for example MATPARTITIONINGCURRENT) or parmetis
1344 .  path - location of library where creation routine is
1345 .  name - name of function that creates the partitioning type, a string
1346 -  function - function pointer that creates the partitioning type
1347 
1348    Level: developer
1349 
1350    If dynamic libraries are used, then the fourth input argument (function)
1351    is ignored.
1352 
1353    Sample usage:
1354 .vb
1355    MatPartitioningRegisterDynamic("my_part",/home/username/my_lib/lib/libO/solaris/mylib.a,
1356                "MyPartCreate",MyPartCreate);
1357 .ve
1358 
1359    Then, your partitioner can be chosen with the procedural interface via
1360 $     MatPartitioningSetType(part,"my_part")
1361    or at runtime via the option
1362 $     -mat_partitioning_type my_part
1363 
1364    $PETSC_ARCH occuring in pathname will be replaced with appropriate values.
1365 
1366 .keywords: matrix, partitioning, register
1367 
1368 .seealso: MatPartitioningRegisterDestroy(), MatPartitioningRegisterAll()
1369 M*/
1370 #if defined(PETSC_USE_DYNAMIC_LIBRARIES)
1371 #define MatPartitioningRegisterDynamic(a,b,c,d) MatPartitioningRegister(a,b,c,0)
1372 #else
1373 #define MatPartitioningRegisterDynamic(a,b,c,d) MatPartitioningRegister(a,b,c,d)
1374 #endif
1375 
1376 extern PetscBool  MatPartitioningRegisterAllCalled;
1377 
1378 extern PetscErrorCode  MatPartitioningRegisterAll(const char[]);
1379 extern PetscErrorCode  MatPartitioningRegisterDestroy(void);
1380 
1381 extern PetscErrorCode  MatPartitioningView(MatPartitioning,PetscViewer);
1382 extern PetscErrorCode  MatPartitioningSetFromOptions(MatPartitioning);
1383 extern PetscErrorCode  MatPartitioningGetType(MatPartitioning,const MatPartitioningType*);
1384 
1385 extern PetscErrorCode  MatPartitioningParmetisSetCoarseSequential(MatPartitioning);
1386 extern PetscErrorCode  MatPartitioningParmetisGetEdgeCut(MatPartitioning, PetscInt *);
1387 
1388 extern PetscErrorCode  MatPartitioningJostleSetCoarseLevel(MatPartitioning,PetscReal);
1389 extern PetscErrorCode  MatPartitioningJostleSetCoarseSequential(MatPartitioning);
1390 
1391 typedef enum {MP_CHACO_MULTILEVEL_KL,MP_CHACO_SPECTRAL,MP_CHACO_LINEAR,MP_CHACO_RANDOM, MP_CHACO_SCATTERED} MPChacoGlobalType;
1392 extern PetscErrorCode  MatPartitioningChacoSetGlobal(MatPartitioning, MPChacoGlobalType);
1393 typedef enum { MP_CHACO_KERNIGHAN_LIN, MP_CHACO_NONE } MPChacoLocalType;
1394 extern PetscErrorCode  MatPartitioningChacoSetLocal(MatPartitioning, MPChacoLocalType);
1395 extern PetscErrorCode  MatPartitioningChacoSetCoarseLevel(MatPartitioning,PetscReal);
1396 typedef enum { MP_CHACO_LANCZOS, MP_CHACO_RQI_SYMMLQ } MPChacoEigenType;
1397 extern PetscErrorCode  MatPartitioningChacoSetEigenSolver(MatPartitioning,MPChacoEigenType);
1398 extern PetscErrorCode  MatPartitioningChacoSetEigenTol(MatPartitioning, PetscReal);
1399 extern PetscErrorCode  MatPartitioningChacoSetEigenNumber(MatPartitioning, PetscInt);
1400 
1401 #define MP_PARTY_OPT "opt"
1402 #define MP_PARTY_LIN "lin"
1403 #define MP_PARTY_SCA "sca"
1404 #define MP_PARTY_RAN "ran"
1405 #define MP_PARTY_GBF "gbf"
1406 #define MP_PARTY_GCF "gcf"
1407 #define MP_PARTY_BUB "bub"
1408 #define MP_PARTY_DEF "def"
1409 extern PetscErrorCode  MatPartitioningPartySetGlobal(MatPartitioning, const char*);
1410 #define MP_PARTY_HELPFUL_SETS "hs"
1411 #define MP_PARTY_KERNIGHAN_LIN "kl"
1412 #define MP_PARTY_NONE "no"
1413 extern PetscErrorCode  MatPartitioningPartySetLocal(MatPartitioning, const char*);
1414 extern PetscErrorCode  MatPartitioningPartySetCoarseLevel(MatPartitioning,PetscReal);
1415 extern PetscErrorCode  MatPartitioningPartySetBipart(MatPartitioning,PetscBool );
1416 extern PetscErrorCode  MatPartitioningPartySetMatchOptimization(MatPartitioning,PetscBool );
1417 
1418 typedef enum { MP_SCOTCH_GREEDY, MP_SCOTCH_GPS, MP_SCOTCH_GR_GPS } MPScotchGlobalType;
1419 extern PetscErrorCode  MatPartitioningScotchSetArch(MatPartitioning,const char*);
1420 extern PetscErrorCode  MatPartitioningScotchSetMultilevel(MatPartitioning);
1421 extern PetscErrorCode  MatPartitioningScotchSetGlobal(MatPartitioning,MPScotchGlobalType);
1422 extern PetscErrorCode  MatPartitioningScotchSetCoarseLevel(MatPartitioning,PetscReal);
1423 extern PetscErrorCode  MatPartitioningScotchSetHostList(MatPartitioning,const char*);
1424 typedef enum { MP_SCOTCH_KERNIGHAN_LIN, MP_SCOTCH_NONE } MPScotchLocalType;
1425 extern PetscErrorCode  MatPartitioningScotchSetLocal(MatPartitioning,MPScotchLocalType);
1426 extern PetscErrorCode  MatPartitioningScotchSetMapping(MatPartitioning);
1427 extern PetscErrorCode  MatPartitioningScotchSetStrategy(MatPartitioning,char*);
1428 
1429 extern PetscErrorCode MatMeshToVertexGraph(Mat,PetscInt,Mat*);
1430 extern PetscErrorCode MatMeshToCellGraph(Mat,PetscInt,Mat*);
1431 
1432 /*
1433     If you add entries here you must also add them to finclude/petscmat.h
1434 */
1435 typedef enum { MATOP_SET_VALUES=0,
1436                MATOP_GET_ROW=1,
1437                MATOP_RESTORE_ROW=2,
1438                MATOP_MULT=3,
1439                MATOP_MULT_ADD=4,
1440                MATOP_MULT_TRANSPOSE=5,
1441                MATOP_MULT_TRANSPOSE_ADD=6,
1442                MATOP_SOLVE=7,
1443                MATOP_SOLVE_ADD=8,
1444                MATOP_SOLVE_TRANSPOSE=9,
1445                MATOP_SOLVE_TRANSPOSE_ADD=10,
1446                MATOP_LUFACTOR=11,
1447                MATOP_CHOLESKYFACTOR=12,
1448                MATOP_SOR=13,
1449                MATOP_TRANSPOSE=14,
1450                MATOP_GETINFO=15,
1451                MATOP_EQUAL=16,
1452                MATOP_GET_DIAGONAL=17,
1453                MATOP_DIAGONAL_SCALE=18,
1454                MATOP_NORM=19,
1455                MATOP_ASSEMBLY_BEGIN=20,
1456                MATOP_ASSEMBLY_END=21,
1457                MATOP_SET_OPTION=22,
1458                MATOP_ZERO_ENTRIES=23,
1459                MATOP_ZERO_ROWS=24,
1460                MATOP_LUFACTOR_SYMBOLIC=25,
1461                MATOP_LUFACTOR_NUMERIC=26,
1462                MATOP_CHOLESKY_FACTOR_SYMBOLIC=27,
1463                MATOP_CHOLESKY_FACTOR_NUMERIC=28,
1464                MATOP_SETUP_PREALLOCATION=29,
1465                MATOP_ILUFACTOR_SYMBOLIC=30,
1466                MATOP_ICCFACTOR_SYMBOLIC=31,
1467                MATOP_GET_ARRAY=32,
1468                MATOP_RESTORE_ARRAY=33,
1469                MATOP_DUPLICATE=34,
1470                MATOP_FORWARD_SOLVE=35,
1471                MATOP_BACKWARD_SOLVE=36,
1472                MATOP_ILUFACTOR=37,
1473                MATOP_ICCFACTOR=38,
1474                MATOP_AXPY=39,
1475                MATOP_GET_SUBMATRICES=40,
1476                MATOP_INCREASE_OVERLAP=41,
1477                MATOP_GET_VALUES=42,
1478                MATOP_COPY=43,
1479                MATOP_GET_ROW_MAX=44,
1480                MATOP_SCALE=45,
1481                MATOP_SHIFT=46,
1482                MATOP_DIAGONAL_SET=47,
1483                MATOP_ILUDT_FACTOR=48,
1484                MATOP_SET_BLOCK_SIZE=49,
1485                MATOP_GET_ROW_IJ=50,
1486                MATOP_RESTORE_ROW_IJ=51,
1487                MATOP_GET_COLUMN_IJ=52,
1488                MATOP_RESTORE_COLUMN_IJ=53,
1489                MATOP_FDCOLORING_CREATE=54,
1490                MATOP_COLORING_PATCH=55,
1491                MATOP_SET_UNFACTORED=56,
1492                MATOP_PERMUTE=57,
1493                MATOP_SET_VALUES_BLOCKED=58,
1494                MATOP_GET_SUBMATRIX=59,
1495                MATOP_DESTROY=60,
1496                MATOP_VIEW=61,
1497                MATOP_CONVERT_FROM=62,
1498                MATOP_USE_SCALED_FORM=63,
1499                MATOP_SCALE_SYSTEM=64,
1500                MATOP_UNSCALE_SYSTEM=65,
1501                MATOP_SET_LOCAL_TO_GLOBAL_MAP=66,
1502                MATOP_SET_VALUES_LOCAL=67,
1503                MATOP_ZERO_ROWS_LOCAL=68,
1504                MATOP_GET_ROW_MAX_ABS=69,
1505                MATOP_GET_ROW_MIN_ABS=70,
1506                MATOP_CONVERT=71,
1507                MATOP_SET_COLORING=72,
1508                MATOP_SET_VALUES_ADIC=73,
1509                MATOP_SET_VALUES_ADIFOR=74,
1510                MATOP_FD_COLORING_APPLY=75,
1511                MATOP_SET_FROM_OPTIONS=76,
1512                MATOP_MULT_CON=77,
1513                MATOP_MULT_TRANSPOSE_CON=78,
1514                MATOP_PERMUTE_SPARSIFY=79,
1515                MATOP_MULT_MULTIPLE=80,
1516                MATOP_SOLVE_MULTIPLE=81,
1517                MATOP_GET_INERTIA=82,
1518                MATOP_LOAD=83,
1519                MATOP_IS_SYMMETRIC=84,
1520                MATOP_IS_HERMITIAN=85,
1521                MATOP_IS_STRUCTURALLY_SYMMETRIC=86,
1522                MATOP_DUMMY=87,
1523                MATOP_GET_VECS=88,
1524                MATOP_MAT_MULT=89,
1525                MATOP_MAT_MULT_SYMBOLIC=90,
1526                MATOP_MAT_MULT_NUMERIC=91,
1527                MATOP_PTAP=92,
1528                MATOP_PTAP_SYMBOLIC=93,
1529                MATOP_PTAP_NUMERIC=94,
1530                MATOP_MAT_MULTTRANSPOSE=95,
1531                MATOP_MAT_MULTTRANSPOSE_SYM=96,
1532                MATOP_MAT_MULTTRANSPOSE_NUM=97,
1533                MATOP_PTAP_SYMBOLIC_SEQAIJ=98,
1534                MATOP_PTAP_NUMERIC_SEQAIJ=99,
1535                MATOP_PTAP_SYMBOLIC_MPIAIJ=100,
1536                MATOP_PTAP_NUMERIC_MPIAIJ=101,
1537                MATOP_CONJUGATE=102,
1538                MATOP_SET_SIZES=103,
1539                MATOP_SET_VALUES_ROW=104,
1540                MATOP_REAL_PART=105,
1541                MATOP_IMAG_PART=106,
1542                MATOP_GET_ROW_UTRIANGULAR=107,
1543                MATOP_RESTORE_ROW_UTRIANGULAR=108,
1544                MATOP_MATSOLVE=109,
1545                MATOP_GET_REDUNDANTMATRIX=110,
1546                MATOP_GET_ROW_MIN=111,
1547                MATOP_GET_COLUMN_VEC=112,
1548                MATOP_MISSING_DIAGONAL=113,
1549                MATOP_MATGETSEQNONZEROSTRUCTURE=114,
1550                MATOP_CREATE=115,
1551                MATOP_GET_GHOSTS=116,
1552                MATOP_GET_LOCALSUBMATRIX=117,
1553                MATOP_RESTORE_LOCALSUBMATRIX=118,
1554                MATOP_MULT_DIAGONAL_BLOCK=119,
1555                MATOP_HERMITIANTRANSPOSE=120,
1556                MATOP_MULTHERMITIANTRANSPOSE=121,
1557                MATOP_MULTHERMITIANTRANSPOSEADD=122,
1558                MATOP_GETMULTIPROCBLOCK=123,
1559 	       MATOP_GET_SUBMATRICES_PARALLEL=128
1560              } MatOperation;
1561 extern PetscErrorCode  MatHasOperation(Mat,MatOperation,PetscBool *);
1562 extern PetscErrorCode  MatShellSetOperation(Mat,MatOperation,void(*)(void));
1563 extern PetscErrorCode  MatShellGetOperation(Mat,MatOperation,void(**)(void));
1564 extern PetscErrorCode  MatShellSetContext(Mat,void*);
1565 
1566 /*
1567    Codes for matrices stored on disk. By default they are
1568    stored in a universal format. By changing the format with
1569    PetscViewerSetFormat(viewer,PETSC_VIEWER_NATIVE); the matrices will
1570    be stored in a way natural for the matrix, for example dense matrices
1571    would be stored as dense. Matrices stored this way may only be
1572    read into matrices of the same type.
1573 */
1574 #define MATRIX_BINARY_FORMAT_DENSE -1
1575 
1576 extern PetscErrorCode  MatMPIBAIJSetHashTableFactor(Mat,PetscReal);
1577 extern PetscErrorCode  MatISGetLocalMat(Mat,Mat*);
1578 
1579 /*S
1580      MatNullSpace - Object that removes a null space from a vector, i.e.
1581          orthogonalizes the vector to a subsapce
1582 
1583    Level: advanced
1584 
1585   Concepts: matrix; linear operator, null space
1586 
1587   Users manual sections:
1588 .   sec_singular
1589 
1590 .seealso:  MatNullSpaceCreate()
1591 S*/
1592 typedef struct _p_MatNullSpace* MatNullSpace;
1593 
1594 extern PetscErrorCode  MatNullSpaceCreate(MPI_Comm,PetscBool ,PetscInt,const Vec[],MatNullSpace*);
1595 extern PetscErrorCode  MatNullSpaceSetFunction(MatNullSpace,PetscErrorCode (*)(MatNullSpace,Vec,void*),void*);
1596 extern PetscErrorCode  MatNullSpaceDestroy(MatNullSpace);
1597 extern PetscErrorCode  MatNullSpaceRemove(MatNullSpace,Vec,Vec*);
1598 extern PetscErrorCode  MatNullSpaceAttach(Mat,MatNullSpace);
1599 extern PetscErrorCode  MatNullSpaceTest(MatNullSpace,Mat,PetscBool  *);
1600 
1601 extern PetscErrorCode  MatReorderingSeqSBAIJ(Mat,IS);
1602 extern PetscErrorCode  MatMPISBAIJSetHashTableFactor(Mat,PetscReal);
1603 extern PetscErrorCode  MatSeqSBAIJSetColumnIndices(Mat,PetscInt *);
1604 extern PetscErrorCode  MatSeqBAIJInvertBlockDiagonal(Mat);
1605 
1606 extern PetscErrorCode  MatCreateMAIJ(Mat,PetscInt,Mat*);
1607 extern PetscErrorCode  MatMAIJRedimension(Mat,PetscInt,Mat*);
1608 extern PetscErrorCode  MatMAIJGetAIJ(Mat,Mat*);
1609 
1610 extern PetscErrorCode  MatComputeExplicitOperator(Mat,Mat*);
1611 
1612 extern PetscErrorCode  MatDiagonalScaleLocal(Mat,Vec);
1613 
1614 extern PetscErrorCode  MatCreateMFFD(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,Mat*);
1615 extern PetscErrorCode  MatMFFDSetBase(Mat,Vec,Vec);
1616 extern PetscErrorCode  MatMFFDSetFunction(Mat,PetscErrorCode(*)(void*,Vec,Vec),void*);
1617 extern PetscErrorCode  MatMFFDSetFunctioni(Mat,PetscErrorCode (*)(void*,PetscInt,Vec,PetscScalar*));
1618 extern PetscErrorCode  MatMFFDSetFunctioniBase(Mat,PetscErrorCode (*)(void*,Vec));
1619 extern PetscErrorCode  MatMFFDAddNullSpace(Mat,MatNullSpace);
1620 extern PetscErrorCode  MatMFFDSetHHistory(Mat,PetscScalar[],PetscInt);
1621 extern PetscErrorCode  MatMFFDResetHHistory(Mat);
1622 extern PetscErrorCode  MatMFFDSetFunctionError(Mat,PetscReal);
1623 extern PetscErrorCode  MatMFFDSetPeriod(Mat,PetscInt);
1624 extern PetscErrorCode  MatMFFDGetH(Mat,PetscScalar *);
1625 extern PetscErrorCode  MatMFFDSetOptionsPrefix(Mat,const char[]);
1626 extern PetscErrorCode  MatMFFDSetFromOptions(Mat);
1627 extern PetscErrorCode  MatMFFDCheckPositivity(void*,Vec,Vec,PetscScalar*);
1628 extern PetscErrorCode  MatMFFDSetCheckh(Mat,PetscErrorCode (*)(void*,Vec,Vec,PetscScalar*),void*);
1629 
1630 /*S
1631     MatMFFD - A data structured used to manage the computation of the h differencing parameter for matrix-free
1632               Jacobian vector products
1633 
1634     Notes: MATMFFD is a specific MatType which uses the MatMFFD data structure
1635 
1636            MatMFFD*() methods actually take the Mat as their first argument. Not a MatMFFD data structure
1637 
1638     Level: developer
1639 
1640 .seealso: MATMFFD, MatCreateMFFD(), MatMFFDSetFuction(), MatMFFDSetType(), MatMFFDRegister()
1641 S*/
1642 typedef struct _p_MatMFFD* MatMFFD;
1643 
1644 /*E
1645     MatMFFDType - algorithm used to compute the h used in computing matrix-vector products via differencing of the function
1646 
1647    Level: beginner
1648 
1649 .seealso: MatMFFDSetType(), MatMFFDRegister()
1650 E*/
1651 #define MatMFFDType char*
1652 #define MATMFFD_DS  "ds"
1653 #define MATMFFD_WP  "wp"
1654 
1655 extern PetscErrorCode  MatMFFDSetType(Mat,const MatMFFDType);
1656 extern PetscErrorCode  MatMFFDRegister(const char[],const char[],const char[],PetscErrorCode (*)(MatMFFD));
1657 
1658 /*MC
1659    MatMFFDRegisterDynamic - Adds a method to the MatMFFD registry.
1660 
1661    Synopsis:
1662    PetscErrorCode MatMFFDRegisterDynamic(const char *name_solver,const char *path,const char *name_create,PetscErrorCode (*routine_create)(MatMFFD))
1663 
1664    Not Collective
1665 
1666    Input Parameters:
1667 +  name_solver - name of a new user-defined compute-h module
1668 .  path - path (either absolute or relative) the library containing this solver
1669 .  name_create - name of routine to create method context
1670 -  routine_create - routine to create method context
1671 
1672    Level: developer
1673 
1674    Notes:
1675    MatMFFDRegisterDynamic() may be called multiple times to add several user-defined solvers.
1676 
1677    If dynamic libraries are used, then the fourth input argument (routine_create)
1678    is ignored.
1679 
1680    Sample usage:
1681 .vb
1682    MatMFFDRegisterDynamic("my_h",/home/username/my_lib/lib/libO/solaris/mylib.a,
1683                "MyHCreate",MyHCreate);
1684 .ve
1685 
1686    Then, your solver can be chosen with the procedural interface via
1687 $     MatMFFDSetType(mfctx,"my_h")
1688    or at runtime via the option
1689 $     -snes_mf_type my_h
1690 
1691 .keywords: MatMFFD, register
1692 
1693 .seealso: MatMFFDRegisterAll(), MatMFFDRegisterDestroy()
1694 M*/
1695 #if defined(PETSC_USE_DYNAMIC_LIBRARIES)
1696 #define MatMFFDRegisterDynamic(a,b,c,d) MatMFFDRegister(a,b,c,0)
1697 #else
1698 #define MatMFFDRegisterDynamic(a,b,c,d) MatMFFDRegister(a,b,c,d)
1699 #endif
1700 
1701 extern PetscErrorCode  MatMFFDRegisterAll(const char[]);
1702 extern PetscErrorCode  MatMFFDRegisterDestroy(void);
1703 extern PetscErrorCode  MatMFFDDSSetUmin(Mat,PetscReal);
1704 extern PetscErrorCode  MatMFFDWPSetComputeNormU(Mat,PetscBool );
1705 
1706 
1707 extern PetscErrorCode  PetscViewerMathematicaPutMatrix(PetscViewer, PetscInt, PetscInt, PetscReal *);
1708 extern PetscErrorCode  PetscViewerMathematicaPutCSRMatrix(PetscViewer, PetscInt, PetscInt, PetscInt *, PetscInt *, PetscReal *);
1709 
1710 /*
1711    PETSc interface to MUMPS
1712 */
1713 #ifdef PETSC_HAVE_MUMPS
1714 extern PetscErrorCode  MatMumpsSetIcntl(Mat,PetscInt,PetscInt);
1715 #endif
1716 
1717 /*
1718    PETSc interface to SUPERLU
1719 */
1720 #ifdef PETSC_HAVE_SUPERLU
1721 extern PetscErrorCode  MatSuperluSetILUDropTol(Mat,PetscReal);
1722 #endif
1723 
1724 extern PetscErrorCode  MatCreateNest(MPI_Comm comm,PetscInt,const IS[],PetscInt,const IS[],const Mat[],Mat*);
1725 extern PetscErrorCode  MatNestGetSize(Mat A,PetscInt *M,PetscInt *N);
1726 extern PetscErrorCode  MatNestGetSubMats(Mat A,PetscInt *M,PetscInt *N,Mat ***mat);
1727 extern PetscErrorCode  MatNestGetSubMat(Mat A,PetscInt idxm,PetscInt jdxm,Mat *sub);
1728 extern PetscErrorCode  MatNestSetVecType(Mat,const VecType);
1729 
1730 PETSC_EXTERN_CXX_END
1731 #endif
1732