xref: /petsc/include/petscmat.h (revision d372ba47371068bdce79d7d3cf159aa0df7e4cba)
1 /*
2      Include file for the matrix component of PETSc
3 */
4 #ifndef __PETSCMAT_H
5 #define __PETSCMAT_H
6 #include "petscvec.h"
7 PETSC_EXTERN_CXX_BEGIN
8 
9 /*S
10      Mat - Abstract PETSc matrix object
11 
12    Level: beginner
13 
14   Concepts: matrix; linear operator
15 
16 .seealso:  MatCreate(), MatType, MatSetType()
17 S*/
18 typedef struct _p_Mat*           Mat;
19 
20 /*E
21     MatType - String with the name of a PETSc matrix or the creation function
22        with an optional dynamic library name, for example
23        http://www.mcs.anl.gov/petsc/lib.a:mymatcreate()
24 
25    Level: beginner
26 
27 .seealso: MatSetType(), Mat, MatSolverPackage
28 E*/
29 #define MatType char*
30 #define MATSAME            "same"
31 #define MATMAIJ            "maij"
32 #define MATSEQMAIJ           "seqmaij"
33 #define MATMPIMAIJ           "mpimaij"
34 #define MATIS              "is"
35 #define MATAIJ             "aij"
36 #define MATSEQAIJ            "seqaij"
37 #define MATMPIAIJ            "mpiaij"
38 #define MATAIJCRL              "aijcrl"
39 #define MATSEQAIJCRL             "seqaijcrl"
40 #define MATMPIAIJCRL             "mpiaijcrl"
41 #define MATAIJCUSP             "aijcusp"
42 #define MATSEQAIJCUSP            "seqaijcusp"
43 #define MATMPIAIJCUSP            "mpiaijcusp"
44 #define MATAIJPERM             "aijperm"
45 #define MATSEQAIJPERM            "seqaijperm"
46 #define MATMPIAIJPERM            "mpiaijperm"
47 #define MATSHELL           "shell"
48 #define MATDENSE           "dense"
49 #define MATSEQDENSE          "seqdense"
50 #define MATMPIDENSE          "mpidense"
51 #define MATBAIJ            "baij"
52 #define MATSEQBAIJ           "seqbaij"
53 #define MATMPIBAIJ           "mpibaij"
54 #define MATMPIADJ          "mpiadj"
55 #define MATSBAIJ           "sbaij"
56 #define MATSEQSBAIJ          "seqsbaij"
57 #define MATMPISBAIJ          "mpisbaij"
58 #define MATDAAD            "daad"
59 #define MATMFFD            "mffd"
60 #define MATNORMAL          "normal"
61 #define MATLRC             "lrc"
62 #define MATSCATTER         "scatter"
63 #define MATBLOCKMAT        "blockmat"
64 #define MATCOMPOSITE       "composite"
65 #define MATFFT             "fft"
66 #define MATFFTW              "fftw"
67 #define MATSEQCUFFT          "seqcufft"
68 #define MATTRANSPOSEMAT    "transpose"
69 #define MATSCHURCOMPLEMENT "schurcomplement"
70 #define MATPYTHON          "python"
71 #define MATHYPRESTRUCT     "hyprestruct"
72 #define MATHYPRESSTRUCT    "hypresstruct"
73 #define MATSUBMATRIX       "submatrix"
74 #define MATLOCALREF        "localref"
75 #define MATNEST            "nest"
76 
77 /*E
78     MatSolverPackage - String with the name of a PETSc matrix solver type.
79 
80     For example: "petsc" indicates what PETSc provides, "superlu" indicates either
81        SuperLU or SuperLU_Dist etc.
82 
83 
84    Level: beginner
85 
86 .seealso: MatGetFactor(), Mat, MatSetType(), MatType
87 E*/
88 #define MatSolverPackage char*
89 #define MATSOLVERSPOOLES      "spooles"
90 #define MATSOLVERSUPERLU      "superlu"
91 #define MATSOLVERSUPERLU_DIST "superlu_dist"
92 #define MATSOLVERUMFPACK      "umfpack"
93 #define MATSOLVERCHOLMOD      "cholmod"
94 #define MATSOLVERESSL         "essl"
95 #define MATSOLVERLUSOL        "lusol"
96 #define MATSOLVERMUMPS        "mumps"
97 #define MATSOLVERPASTIX       "pastix"
98 #define MATSOLVERDSCPACK      "dscpack"
99 #define MATSOLVERMATLAB       "matlab"
100 #define MATSOLVERPETSC        "petsc"
101 #define MATSOLVERPLAPACK      "plapack"
102 #define MATSOLVERBAS          "bas"
103 
104 /*E
105     MatFactorType - indicates what type of factorization is requested
106 
107     Level: beginner
108 
109    Any additions/changes here MUST also be made in include/finclude/petscmat.h
110 
111 .seealso: MatSolverPackage, MatGetFactor()
112 E*/
113 typedef enum {MAT_FACTOR_NONE, MAT_FACTOR_LU, MAT_FACTOR_CHOLESKY, MAT_FACTOR_ILU, MAT_FACTOR_ICC,MAT_FACTOR_ILUDT} MatFactorType;
114 extern const char *const MatFactorTypes[];
115 
116 extern PetscErrorCode  MatGetFactor(Mat,const MatSolverPackage,MatFactorType,Mat*);
117 extern PetscErrorCode  MatGetFactorAvailable(Mat,const MatSolverPackage,MatFactorType,PetscBool *);
118 extern PetscErrorCode  MatFactorGetSolverPackage(Mat,const MatSolverPackage*);
119 extern PetscErrorCode  MatGetFactorType(Mat,MatFactorType*);
120 
121 /* Logging support */
122 #define    MAT_FILE_CLASSID 1211216    /* used to indicate matrices in binary files */
123 extern PetscClassId  MAT_CLASSID;
124 extern PetscClassId  MAT_FDCOLORING_CLASSID;
125 extern PetscClassId  MAT_PARTITIONING_CLASSID;
126 extern PetscClassId  MAT_NULLSPACE_CLASSID;
127 extern PetscClassId  MATMFFD_CLASSID;
128 
129 /*E
130     MatReuse - Indicates if matrices obtained from a previous call to MatGetSubMatrices()
131      or MatGetSubMatrix() are to be reused to store the new matrix values. For MatConvert() is used to indicate
132      that the input matrix is to be replaced with the converted matrix.
133 
134     Level: beginner
135 
136    Any additions/changes here MUST also be made in include/finclude/petscmat.h
137 
138 .seealso: MatGetSubMatrices(), MatGetSubMatrix(), MatDestroyMatrices(), MatConvert()
139 E*/
140 typedef enum {MAT_INITIAL_MATRIX,MAT_REUSE_MATRIX,MAT_IGNORE_MATRIX} MatReuse;
141 
142 /*E
143     MatGetSubMatrixOption - Indicates if matrices obtained from a call to MatGetSubMatrices()
144      include the matrix values. Currently it is only used by MatGetSeqNonzerostructure().
145 
146     Level: beginner
147 
148 .seealso: MatGetSeqNonzerostructure()
149 E*/
150 typedef enum {MAT_DO_NOT_GET_VALUES,MAT_GET_VALUES} MatGetSubMatrixOption;
151 
152 extern PetscErrorCode  MatInitializePackage(const char[]);
153 
154 extern PetscErrorCode  MatCreate(MPI_Comm,Mat*);
155 PetscPolymorphicFunction(MatCreate,(MPI_Comm comm),(comm,&A),Mat,A)
156 PetscPolymorphicFunction(MatCreate,(),(PETSC_COMM_WORLD,&A),Mat,A)
157 extern PetscErrorCode  MatSetSizes(Mat,PetscInt,PetscInt,PetscInt,PetscInt);
158 extern PetscErrorCode  MatSetType(Mat,const MatType);
159 extern PetscErrorCode  MatSetFromOptions(Mat);
160 extern PetscErrorCode  MatSetUpPreallocation(Mat);
161 extern PetscErrorCode  MatRegisterAll(const char[]);
162 extern PetscErrorCode  MatRegister(const char[],const char[],const char[],PetscErrorCode(*)(Mat));
163 
164 extern PetscErrorCode  MatSetOptionsPrefix(Mat,const char[]);
165 extern PetscErrorCode  MatAppendOptionsPrefix(Mat,const char[]);
166 extern PetscErrorCode  MatGetOptionsPrefix(Mat,const char*[]);
167 
168 /*MC
169    MatRegisterDynamic - Adds a new matrix type
170 
171    Synopsis:
172    PetscErrorCode MatRegisterDynamic(const char *name,const char *path,const char *name_create,PetscErrorCode (*routine_create)(Mat))
173 
174    Not Collective
175 
176    Input Parameters:
177 +  name - name of a new user-defined matrix type
178 .  path - path (either absolute or relative) the library containing this solver
179 .  name_create - name of routine to create method context
180 -  routine_create - routine to create method context
181 
182    Notes:
183    MatRegisterDynamic() may be called multiple times to add several user-defined solvers.
184 
185    If dynamic libraries are used, then the fourth input argument (routine_create)
186    is ignored.
187 
188    Sample usage:
189 .vb
190    MatRegisterDynamic("my_mat",/home/username/my_lib/lib/libO/solaris/mylib.a,
191                "MyMatCreate",MyMatCreate);
192 .ve
193 
194    Then, your solver can be chosen with the procedural interface via
195 $     MatSetType(Mat,"my_mat")
196    or at runtime via the option
197 $     -mat_type my_mat
198 
199    Level: advanced
200 
201    Notes: ${PETSC_ARCH} occuring in pathname will be replaced with appropriate values.
202          If your function is not being put into a shared library then use VecRegister() instead
203 
204 .keywords: Mat, register
205 
206 .seealso: MatRegisterAll(), MatRegisterDestroy()
207 
208 M*/
209 #if defined(PETSC_USE_DYNAMIC_LIBRARIES)
210 #define MatRegisterDynamic(a,b,c,d) MatRegister(a,b,c,0)
211 #else
212 #define MatRegisterDynamic(a,b,c,d) MatRegister(a,b,c,d)
213 #endif
214 
215 extern PetscBool  MatRegisterAllCalled;
216 extern PetscFList MatList;
217 extern PetscFList MatColoringList;
218 extern PetscFList MatPartitioningList;
219 
220 /*E
221     MatStructure - Indicates if the matrix has the same nonzero structure
222 
223     Level: beginner
224 
225    Any additions/changes here MUST also be made in include/finclude/petscmat.h
226 
227 .seealso: MatCopy(), KSPSetOperators(), PCSetOperators()
228 E*/
229 typedef enum {SAME_NONZERO_PATTERN,DIFFERENT_NONZERO_PATTERN,SAME_PRECONDITIONER,SUBSET_NONZERO_PATTERN} MatStructure;
230 
231 extern PetscErrorCode  MatCreateSeqDense(MPI_Comm,PetscInt,PetscInt,PetscScalar[],Mat*);
232 extern PetscErrorCode  MatCreateMPIDense(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,PetscScalar[],Mat*);
233 extern PetscErrorCode  MatCreateSeqAIJ(MPI_Comm,PetscInt,PetscInt,PetscInt,const PetscInt[],Mat*);
234 PetscPolymorphicFunction(MatCreateSeqAIJ,(PetscInt m,PetscInt n,PetscInt nz,const PetscInt nnz[]),(PETSC_COMM_SELF,m,n,nz,nnz,&A),Mat,A)
235 PetscPolymorphicFunction(MatCreateSeqAIJ,(PetscInt m,PetscInt n,PetscInt nz),(PETSC_COMM_SELF,m,n,nz,PETSC_NULL,&A),Mat,A)
236 PetscPolymorphicFunction(MatCreateSeqAIJ,(PetscInt m,PetscInt n,const PetscInt nnz[]),(PETSC_COMM_SELF,m,n,0,nnz,&A),Mat,A)
237 PetscPolymorphicFunction(MatCreateSeqAIJ,(PetscInt m,PetscInt n),(PETSC_COMM_SELF,m,n,0,PETSC_NULL,&A),Mat,A)
238 PetscPolymorphicSubroutine(MatCreateSeqAIJ,(PetscInt m,PetscInt n,PetscInt nz,Mat *A),(PETSC_COMM_SELF,m,n,nz,PETSC_NULL,A))
239 PetscPolymorphicSubroutine(MatCreateSeqAIJ,(PetscInt m,PetscInt n,const PetscInt nnz[],Mat *A),(PETSC_COMM_SELF,m,n,0,nnz,A))
240 PetscPolymorphicSubroutine(MatCreateSeqAIJ,(PetscInt m,PetscInt n,Mat *A),(PETSC_COMM_SELF,m,n,0,PETSC_NULL,A))
241 extern PetscErrorCode  MatCreateMPIAIJ(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt,const PetscInt[],PetscInt,const PetscInt[],Mat*);
242 PetscPolymorphicFunction(MatCreateMPIAIJ,(MPI_Comm comm,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,const PetscInt nnz[],PetscInt onz,const PetscInt onnz[]),(comm,m,n,M,N,nz,nnz,onz,onnz,&A),Mat,A)
243 PetscPolymorphicFunction(MatCreateMPIAIJ,(MPI_Comm comm,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,PetscInt nnz),(comm,m,n,M,N,nz,PETSC_NULL,nnz,PETSC_NULL,&A),Mat,A)
244 PetscPolymorphicFunction(MatCreateMPIAIJ,(MPI_Comm comm,PetscInt m,PetscInt n,PetscInt M,PetscInt N,const PetscInt nnz[],const PetscInt onz[]),(comm,m,n,M,N,0,nnz,0,onz,&A),Mat,A)
245 PetscPolymorphicFunction(MatCreateMPIAIJ,(MPI_Comm comm,PetscInt m,PetscInt n,PetscInt M,PetscInt N),(comm,m,n,M,N,0,PETSC_NULL,0,PETSC_NULL,&A),Mat,A)
246 PetscPolymorphicSubroutine(MatCreateMPIAIJ,(MPI_Comm comm,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,PetscInt nnz,Mat *A),(comm,m,n,M,N,nz,PETSC_NULL,nnz,PETSC_NULL,A))
247 PetscPolymorphicSubroutine(MatCreateMPIAIJ,(MPI_Comm comm,PetscInt m,PetscInt n,PetscInt M,PetscInt N,const PetscInt nnz[],const PetscInt onz[],Mat *A),(comm,m,n,M,N,0,nnz,0,onz,A))
248 PetscPolymorphicSubroutine(MatCreateMPIAIJ,(MPI_Comm comm,PetscInt m,PetscInt n,PetscInt M,PetscInt N,Mat *A),(comm,m,n,M,N,0,PETSC_NULL,0,PETSC_NULL,A))
249 PetscPolymorphicFunction(MatCreateMPIAIJ,(PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,const PetscInt nnz[],PetscInt onz,const PetscInt onnz[]),(PETSC_COMM_WORLD,m,n,M,N,nz,nnz,onz,onnz,&A),Mat,A)
250 PetscPolymorphicFunction(MatCreateMPIAIJ,(PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,PetscInt nnz),(PETSC_COMM_WORLD,m,n,M,N,nz,PETSC_NULL,nnz,PETSC_NULL,&A),Mat,A)
251 PetscPolymorphicFunction(MatCreateMPIAIJ,(PetscInt m,PetscInt n,PetscInt M,PetscInt N,const PetscInt nnz[],const PetscInt onz[]),(PETSC_COMM_WORLD,m,n,M,N,0,nnz,0,onz,&A),Mat,A)
252 PetscPolymorphicFunction(MatCreateMPIAIJ,(PetscInt m,PetscInt n,PetscInt M,PetscInt N),(PETSC_COMM_WORLD,m,n,M,N,0,PETSC_NULL,0,PETSC_NULL,&A),Mat,A)
253 PetscPolymorphicSubroutine(MatCreateMPIAIJ,(PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,PetscInt nnz,Mat *A),(PETSC_COMM_WORLD,m,n,M,N,nz,PETSC_NULL,nnz,PETSC_NULL,A))
254 PetscPolymorphicSubroutine(MatCreateMPIAIJ,(PetscInt m,PetscInt n,PetscInt M,PetscInt N,const PetscInt nnz[],const PetscInt onz[],Mat *A),(PETSC_COMM_WORLD,m,n,M,N,0,nnz,0,onz,A))
255 PetscPolymorphicSubroutine(MatCreateMPIAIJ,(PetscInt m,PetscInt n,PetscInt M,PetscInt N,Mat *A),(PETSC_COMM_WORLD,m,n,M,N,0,PETSC_NULL,0,PETSC_NULL,A))
256 extern PetscErrorCode  MatCreateMPIAIJWithArrays(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,const PetscInt[],const PetscInt[],const PetscScalar[],Mat *);
257 extern PetscErrorCode  MatCreateMPIAIJWithSplitArrays(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt[],PetscInt[],PetscScalar[],PetscInt[],PetscInt[],PetscScalar[],Mat*);
258 
259 extern PetscErrorCode  MatCreateSeqBAIJ(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,const PetscInt[],Mat*);
260 PetscPolymorphicFunction(MatCreateSeqBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt nz,const PetscInt nnz[]),(PETSC_COMM_SELF,bs,m,n,nz,nnz,&A),Mat,A)
261 PetscPolymorphicFunction(MatCreateSeqBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt nz),(PETSC_COMM_SELF,bs,m,n,nz,PETSC_NULL,&A),Mat,A)
262 PetscPolymorphicFunction(MatCreateSeqBAIJ,(PetscInt bs,PetscInt m,PetscInt n,const PetscInt nnz[]),(PETSC_COMM_SELF,bs,m,n,0,nnz,&A),Mat,A)
263 PetscPolymorphicFunction(MatCreateSeqBAIJ,(PetscInt bs,PetscInt m,PetscInt n),(PETSC_COMM_SELF,bs,m,n,0,PETSC_NULL,&A),Mat,A)
264 PetscPolymorphicSubroutine(MatCreateSeqBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt nz,Mat *A),(PETSC_COMM_SELF,bs,m,n,nz,PETSC_NULL,A))
265 PetscPolymorphicSubroutine(MatCreateSeqBAIJ,(PetscInt bs,PetscInt m,PetscInt n,const PetscInt nnz[],Mat *A),(PETSC_COMM_SELF,bs,m,n,0,nnz,A))
266 PetscPolymorphicSubroutine(MatCreateSeqBAIJ,(PetscInt bs,PetscInt m,PetscInt n,Mat *A),(PETSC_COMM_SELF,bs,m,n,0,PETSC_NULL,A))
267 extern PetscErrorCode  MatCreateMPIBAIJ(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt,const PetscInt[],PetscInt,const PetscInt[],Mat*);
268 PetscPolymorphicFunction(MatCreateMPIBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,const PetscInt nnz[],PetscInt onz,const PetscInt onnz[]),(comm,bs,m,n,M,N,nz,nnz,onz,onnz,&A),Mat,A)
269 PetscPolymorphicFunction(MatCreateMPIBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,PetscInt nnz),(comm,bs,m,n,M,N,nz,PETSC_NULL,nnz,PETSC_NULL,&A),Mat,A)
270 PetscPolymorphicFunction(MatCreateMPIBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,const PetscInt nnz[],const PetscInt onz[]),(comm,bs,m,n,M,N,0,nnz,0,onz,&A),Mat,A)
271 PetscPolymorphicFunction(MatCreateMPIBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N),(comm,bs,m,n,M,N,0,PETSC_NULL,0,PETSC_NULL,&A),Mat,A)
272 PetscPolymorphicSubroutine(MatCreateMPIBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,PetscInt nnz,Mat *A),(comm,bs,m,n,M,N,nz,PETSC_NULL,nnz,PETSC_NULL,A))
273 PetscPolymorphicSubroutine(MatCreateMPIBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,const PetscInt nnz[],const PetscInt onz[],Mat *A),(comm,bs,m,n,M,N,0,nnz,0,onz,A))
274 PetscPolymorphicSubroutine(MatCreateMPIBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,Mat *A),(comm,bs,m,n,M,N,0,PETSC_NULL,0,PETSC_NULL,A))
275 PetscPolymorphicFunction(MatCreateMPIBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,const PetscInt nnz[],PetscInt onz,const PetscInt onnz[]),(PETSC_COMM_WORLD,bs,m,n,M,N,nz,nnz,onz,onnz,&A),Mat,A)
276 PetscPolymorphicFunction(MatCreateMPIBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,PetscInt nnz),(PETSC_COMM_WORLD,bs,m,n,M,N,nz,PETSC_NULL,nnz,PETSC_NULL,&A),Mat,A)
277 PetscPolymorphicFunction(MatCreateMPIBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,const PetscInt nnz[],const PetscInt onz[]),(PETSC_COMM_WORLD,bs,m,n,M,N,0,nnz,0,onz,&A),Mat,A)
278 PetscPolymorphicFunction(MatCreateMPIBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N),(PETSC_COMM_WORLD,bs,m,n,M,N,0,PETSC_NULL,0,PETSC_NULL,&A),Mat,A)
279 PetscPolymorphicSubroutine(MatCreateMPIBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,PetscInt nnz,Mat *A),(PETSC_COMM_WORLD,bs,m,n,M,N,nz,PETSC_NULL,nnz,PETSC_NULL,A))
280 PetscPolymorphicSubroutine(MatCreateMPIBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,const PetscInt nnz[],const PetscInt onz[],Mat *A),(PETSC_COMM_WORLD,bs,m,n,M,N,0,nnz,0,onz,A))
281 PetscPolymorphicSubroutine(MatCreateMPIBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,Mat *A),(PETSC_COMM_WORLD,bs,m,n,M,N,0,PETSC_NULL,0,PETSC_NULL,A))
282 extern PetscErrorCode  MatCreateMPIBAIJWithArrays(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt,const PetscInt[],const PetscInt[],const PetscScalar[],Mat*);
283 
284 extern PetscErrorCode  MatCreateMPIAdj(MPI_Comm,PetscInt,PetscInt,PetscInt[],PetscInt[],PetscInt[],Mat*);
285 extern PetscErrorCode  MatCreateSeqSBAIJ(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,const PetscInt[],Mat*);
286 PetscPolymorphicFunction(MatCreateSeqSBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt nz,const PetscInt nnz[]),(PETSC_COMM_SELF,bs,m,n,nz,nnz,&A),Mat,A)
287 PetscPolymorphicFunction(MatCreateSeqSBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt nz),(PETSC_COMM_SELF,bs,m,n,nz,PETSC_NULL,&A),Mat,A)
288 PetscPolymorphicFunction(MatCreateSeqSBAIJ,(PetscInt bs,PetscInt m,PetscInt n,const PetscInt nnz[]),(PETSC_COMM_SELF,bs,m,n,0,nnz,&A),Mat,A)
289 PetscPolymorphicFunction(MatCreateSeqSBAIJ,(PetscInt bs,PetscInt m,PetscInt n),(PETSC_COMM_SELF,bs,m,n,0,PETSC_NULL,&A),Mat,A)
290 PetscPolymorphicSubroutine(MatCreateSeqSBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt nz,Mat *A),(PETSC_COMM_SELF,bs,m,n,nz,PETSC_NULL,A))
291 PetscPolymorphicSubroutine(MatCreateSeqSBAIJ,(PetscInt bs,PetscInt m,PetscInt n,const PetscInt nnz[],Mat *A),(PETSC_COMM_SELF,bs,m,n,0,nnz,A))
292 PetscPolymorphicSubroutine(MatCreateSeqSBAIJ,(PetscInt bs,PetscInt m,PetscInt n,Mat *A),(PETSC_COMM_SELF,bs,m,n,0,PETSC_NULL,A))
293 
294 extern PetscErrorCode  MatCreateMPISBAIJ(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt,const PetscInt[],PetscInt,const PetscInt[],Mat*);
295 PetscPolymorphicFunction(MatCreateMPISBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,const PetscInt nnz[],PetscInt onz,const PetscInt onnz[]),(comm,bs,m,n,M,N,nz,nnz,onz,onnz,&A),Mat,A)
296 PetscPolymorphicFunction(MatCreateMPISBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,PetscInt nnz),(comm,bs,m,n,M,N,nz,PETSC_NULL,nnz,PETSC_NULL,&A),Mat,A)
297 PetscPolymorphicFunction(MatCreateMPISBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,const PetscInt nnz[],const PetscInt onz[]),(comm,bs,m,n,M,N,0,nnz,0,onz,&A),Mat,A)
298 PetscPolymorphicFunction(MatCreateMPISBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N),(comm,bs,m,n,M,N,0,PETSC_NULL,0,PETSC_NULL,&A),Mat,A)
299 PetscPolymorphicSubroutine(MatCreateMPISBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,PetscInt nnz,Mat *A),(comm,bs,m,n,M,N,nz,PETSC_NULL,nnz,PETSC_NULL,A))
300 PetscPolymorphicSubroutine(MatCreateMPISBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,const PetscInt nnz[],const PetscInt onz[],Mat *A),(comm,bs,m,n,M,N,0,nnz,0,onz,A))
301 PetscPolymorphicSubroutine(MatCreateMPISBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,Mat *A),(comm,bs,m,n,M,N,0,PETSC_NULL,0,PETSC_NULL,A))
302 PetscPolymorphicFunction(MatCreateMPISBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,const PetscInt nnz[],PetscInt onz,const PetscInt onnz[]),(PETSC_COMM_WORLD,bs,m,n,M,N,nz,nnz,onz,onnz,&A),Mat,A)
303 PetscPolymorphicFunction(MatCreateMPISBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,PetscInt nnz),(PETSC_COMM_WORLD,bs,m,n,M,N,nz,PETSC_NULL,nnz,PETSC_NULL,&A),Mat,A)
304 PetscPolymorphicFunction(MatCreateMPISBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,const PetscInt nnz[],const PetscInt onz[]),(PETSC_COMM_WORLD,bs,m,n,M,N,0,nnz,0,onz,&A),Mat,A)
305 PetscPolymorphicFunction(MatCreateMPISBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N),(PETSC_COMM_WORLD,bs,m,n,M,N,0,PETSC_NULL,0,PETSC_NULL,&A),Mat,A)
306 PetscPolymorphicSubroutine(MatCreateMPISBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,PetscInt nnz,Mat *A),(PETSC_COMM_WORLD,bs,m,n,M,N,nz,PETSC_NULL,nnz,PETSC_NULL,A))
307 PetscPolymorphicSubroutine(MatCreateMPISBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,const PetscInt nnz[],const PetscInt onz[],Mat *A),(PETSC_COMM_WORLD,bs,m,n,M,N,0,nnz,0,onz,A))
308 PetscPolymorphicSubroutine(MatCreateMPISBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,Mat *A),(PETSC_COMM_WORLD,bs,m,n,M,N,0,PETSC_NULL,0,PETSC_NULL,A))
309 extern PetscErrorCode  MatCreateMPISBAIJWithArrays(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt,const PetscInt[],const PetscInt[],const PetscScalar[],Mat *);
310 extern PetscErrorCode  MatMPISBAIJSetPreallocationCSR(Mat,PetscInt,const PetscInt[],const PetscInt[],const PetscScalar[]);
311 
312 extern PetscErrorCode  MatCreateShell(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,void *,Mat*);
313 PetscPolymorphicFunction(MatCreateShell,(MPI_Comm comm,PetscInt m,PetscInt n,PetscInt M,PetscInt N,void *ctx),(comm,m,n,M,N,ctx,&A),Mat,A)
314 PetscPolymorphicFunction(MatCreateShell,(PetscInt m,PetscInt n,PetscInt M,PetscInt N,void *ctx),(PETSC_COMM_WORLD,m,n,M,N,ctx,&A),Mat,A)
315 extern PetscErrorCode  MatCreateAdic(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt,void (*)(void),Mat*);
316 extern PetscErrorCode  MatCreateNormal(Mat,Mat*);
317 PetscPolymorphicFunction(MatCreateNormal,(Mat mat),(mat,&A),Mat,A)
318 extern PetscErrorCode  MatCreateLRC(Mat,Mat,Mat,Mat*);
319 extern PetscErrorCode  MatCreateIS(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,ISLocalToGlobalMapping,Mat*);
320 extern PetscErrorCode  MatCreateSeqAIJCRL(MPI_Comm,PetscInt,PetscInt,PetscInt,const PetscInt[],Mat*);
321 extern PetscErrorCode  MatCreateMPIAIJCRL(MPI_Comm,PetscInt,PetscInt,PetscInt,const PetscInt[],PetscInt,const PetscInt[],Mat*);
322 extern PetscErrorCode  MatCreateScatter(MPI_Comm,VecScatter,Mat*);
323 extern PetscErrorCode  MatScatterSetVecScatter(Mat,VecScatter);
324 extern PetscErrorCode  MatScatterGetVecScatter(Mat,VecScatter*);
325 extern PetscErrorCode  MatCreateBlockMat(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt*,Mat*);
326 extern PetscErrorCode  MatCompositeAddMat(Mat,Mat);
327 extern PetscErrorCode  MatCompositeMerge(Mat);
328 extern PetscErrorCode  MatCreateComposite(MPI_Comm,PetscInt,const Mat*,Mat*);
329 typedef enum {MAT_COMPOSITE_ADDITIVE,MAT_COMPOSITE_MULTIPLICATIVE} MatCompositeType;
330 extern PetscErrorCode  MatCompositeSetType(Mat,MatCompositeType);
331 
332 extern PetscErrorCode  MatCreateFFT(MPI_Comm,PetscInt,const PetscInt[],const MatType,Mat*);
333 extern PetscErrorCode  MatCreateSeqCUFFT(MPI_Comm,PetscInt,const PetscInt[],Mat*);
334 
335 extern PetscErrorCode  MatCreateTranspose(Mat,Mat*);
336 extern PetscErrorCode  MatCreateSubMatrix(Mat,IS,IS,Mat*);
337 extern PetscErrorCode  MatSubMatrixUpdate(Mat,Mat,IS,IS);
338 extern PetscErrorCode  MatCreateLocalRef(Mat,IS,IS,Mat*);
339 
340 extern PetscErrorCode  MatPythonSetType(Mat,const char[]);
341 
342 extern PetscErrorCode  MatSetUp(Mat);
343 extern PetscErrorCode  MatDestroy(Mat*);
344 
345 extern PetscErrorCode  MatConjugate(Mat);
346 extern PetscErrorCode  MatRealPart(Mat);
347 extern PetscErrorCode  MatImaginaryPart(Mat);
348 extern PetscErrorCode  MatGetDiagonalBlock(Mat,PetscBool *,MatReuse,Mat*);
349 extern PetscErrorCode  MatGetTrace(Mat,PetscScalar*);
350 
351 /* ------------------------------------------------------------*/
352 extern PetscErrorCode  MatSetValues(Mat,PetscInt,const PetscInt[],PetscInt,const PetscInt[],const PetscScalar[],InsertMode);
353 extern PetscErrorCode  MatSetValuesBlocked(Mat,PetscInt,const PetscInt[],PetscInt,const PetscInt[],const PetscScalar[],InsertMode);
354 extern PetscErrorCode  MatSetValuesRow(Mat,PetscInt,const PetscScalar[]);
355 extern PetscErrorCode  MatSetValuesRowLocal(Mat,PetscInt,const PetscScalar[]);
356 
357 /*S
358      MatStencil - Data structure (C struct) for storing information about a single row or
359         column of a matrix as index on an associated grid.
360 
361    Level: beginner
362 
363   Concepts: matrix; linear operator
364 
365 .seealso:  MatSetValuesStencil(), MatSetStencil(), MatSetValuesBlockStencil()
366 S*/
367 typedef struct {
368   PetscInt k,j,i,c;
369 } MatStencil;
370 
371 extern PetscErrorCode  MatSetValuesStencil(Mat,PetscInt,const MatStencil[],PetscInt,const MatStencil[],const PetscScalar[],InsertMode);
372 extern PetscErrorCode  MatSetValuesBlockedStencil(Mat,PetscInt,const MatStencil[],PetscInt,const MatStencil[],const PetscScalar[],InsertMode);
373 extern PetscErrorCode  MatSetStencil(Mat,PetscInt,const PetscInt[],const PetscInt[],PetscInt);
374 
375 extern PetscErrorCode  MatSetColoring(Mat,ISColoring);
376 extern PetscErrorCode  MatSetValuesAdic(Mat,void*);
377 extern PetscErrorCode  MatSetValuesAdifor(Mat,PetscInt,void*);
378 
379 /*E
380     MatAssemblyType - Indicates if the matrix is now to be used, or if you plan
381      to continue to add values to it
382 
383     Level: beginner
384 
385 .seealso: MatAssemblyBegin(), MatAssemblyEnd()
386 E*/
387 typedef enum {MAT_FLUSH_ASSEMBLY=1,MAT_FINAL_ASSEMBLY=0} MatAssemblyType;
388 extern PetscErrorCode  MatAssemblyBegin(Mat,MatAssemblyType);
389 extern PetscErrorCode  MatAssemblyEnd(Mat,MatAssemblyType);
390 extern PetscErrorCode  MatAssembled(Mat,PetscBool *);
391 
392 
393 
394 /*E
395     MatOption - Options that may be set for a matrix and its behavior or storage
396 
397     Level: beginner
398 
399    Any additions/changes here MUST also be made in include/finclude/petscmat.h
400 
401 .seealso: MatSetOption()
402 E*/
403 typedef enum {MAT_ROW_ORIENTED,MAT_NEW_NONZERO_LOCATIONS,
404               MAT_SYMMETRIC,
405               MAT_STRUCTURALLY_SYMMETRIC,
406               MAT_NEW_DIAGONALS,MAT_IGNORE_OFF_PROC_ENTRIES,
407               MAT_NEW_NONZERO_LOCATION_ERR,
408               MAT_NEW_NONZERO_ALLOCATION_ERR,MAT_USE_HASH_TABLE,
409               MAT_KEEP_NONZERO_PATTERN,MAT_IGNORE_ZERO_ENTRIES,
410               MAT_USE_INODES,
411               MAT_HERMITIAN,
412               MAT_SYMMETRY_ETERNAL,
413               MAT_CHECK_COMPRESSED_ROW,
414               MAT_IGNORE_LOWER_TRIANGULAR,MAT_ERROR_LOWER_TRIANGULAR,
415               MAT_GETROW_UPPERTRIANGULAR,MAT_UNUSED_NONZERO_LOCATION_ERR,
416               MAT_SPD,MAT_NO_OFF_PROC_ENTRIES,MAT_NO_OFF_PROC_ZERO_ROWS,
417               NUM_MAT_OPTIONS} MatOption;
418 extern const char *MatOptions[];
419 extern PetscErrorCode  MatSetOption(Mat,MatOption,PetscBool );
420 extern PetscErrorCode  MatGetType(Mat,const MatType*);
421 PetscPolymorphicFunction(MatGetType,(Mat mat),(mat,&t),const MatType,t)
422 
423 extern PetscErrorCode  MatGetValues(Mat,PetscInt,const PetscInt[],PetscInt,const PetscInt[],PetscScalar[]);
424 extern PetscErrorCode  MatGetRow(Mat,PetscInt,PetscInt *,const PetscInt *[],const PetscScalar*[]);
425 extern PetscErrorCode  MatRestoreRow(Mat,PetscInt,PetscInt *,const PetscInt *[],const PetscScalar*[]);
426 extern PetscErrorCode  MatGetRowUpperTriangular(Mat);
427 extern PetscErrorCode  MatRestoreRowUpperTriangular(Mat);
428 extern PetscErrorCode  MatGetColumn(Mat,PetscInt,PetscInt *,const PetscInt *[],const PetscScalar*[]);
429 extern PetscErrorCode  MatRestoreColumn(Mat,PetscInt,PetscInt *,const PetscInt *[],const PetscScalar*[]);
430 extern PetscErrorCode  MatGetColumnVector(Mat,Vec,PetscInt);
431 extern PetscErrorCode  MatGetArray(Mat,PetscScalar *[]);
432 PetscPolymorphicFunction(MatGetArray,(Mat mat),(mat,&a),PetscScalar*,a)
433 extern PetscErrorCode  MatRestoreArray(Mat,PetscScalar *[]);
434 extern PetscErrorCode  MatGetBlockSize(Mat,PetscInt *);
435 PetscPolymorphicFunction(MatGetBlockSize,(Mat mat),(mat,&a),PetscInt,a)
436 extern PetscErrorCode  MatSetBlockSize(Mat,PetscInt);
437 
438 
439 extern PetscErrorCode  MatMult(Mat,Vec,Vec);
440 extern PetscErrorCode  MatMultDiagonalBlock(Mat,Vec,Vec);
441 extern PetscErrorCode  MatMultAdd(Mat,Vec,Vec,Vec);
442 PetscPolymorphicSubroutine(MatMultAdd,(Mat A,Vec x,Vec y),(A,x,y,y))
443 extern PetscErrorCode  MatMultTranspose(Mat,Vec,Vec);
444 extern PetscErrorCode  MatMultHermitianTranspose(Mat,Vec,Vec);
445 extern PetscErrorCode  MatIsTranspose(Mat,Mat,PetscReal,PetscBool *);
446 PetscPolymorphicFunction(MatIsTranspose,(Mat A,Mat B,PetscReal tol),(A,B,tol,&t),PetscBool ,t)
447 PetscPolymorphicFunction(MatIsTranspose,(Mat A,Mat B),(A,B,0,&t),PetscBool ,t)
448 extern PetscErrorCode  MatIsHermitianTranspose(Mat,Mat,PetscReal,PetscBool *);
449 extern PetscErrorCode  MatMultTransposeAdd(Mat,Vec,Vec,Vec);
450 extern PetscErrorCode  MatMultHermitianTransposeAdd(Mat,Vec,Vec,Vec);
451 PetscPolymorphicSubroutine(MatMultTransposeAdd,(Mat A,Vec x,Vec y),(A,x,y,y))
452 extern PetscErrorCode  MatMultConstrained(Mat,Vec,Vec);
453 extern PetscErrorCode  MatMultTransposeConstrained(Mat,Vec,Vec);
454 extern PetscErrorCode  MatMatSolve(Mat,Mat,Mat);
455 
456 /*E
457     MatDuplicateOption - Indicates if a duplicated sparse matrix should have
458   its numerical values copied over or just its nonzero structure.
459 
460     Level: beginner
461 
462    Any additions/changes here MUST also be made in include/finclude/petscmat.h
463 
464 $   MAT_SHARE_NONZERO_PATTERN - the i and j arrays in the new matrix will be shared with the original matrix
465 $                               this also triggers the MAT_DO_NOT_COPY_VALUES option. This is used when you
466 $                               have several matrices with the same nonzero pattern.
467 
468 .seealso: MatDuplicate()
469 E*/
470 typedef enum {MAT_DO_NOT_COPY_VALUES,MAT_COPY_VALUES,MAT_SHARE_NONZERO_PATTERN} MatDuplicateOption;
471 
472 extern PetscErrorCode  MatConvert(Mat,const MatType,MatReuse,Mat*);
473 PetscPolymorphicFunction(MatConvert,(Mat A,const MatType t),(A,t,MAT_INITIAL_MATRIX,&a),Mat,a)
474 extern PetscErrorCode  MatDuplicate(Mat,MatDuplicateOption,Mat*);
475 PetscPolymorphicFunction(MatDuplicate,(Mat A,MatDuplicateOption o),(A,o,&a),Mat,a)
476 PetscPolymorphicFunction(MatDuplicate,(Mat A),(A,MAT_COPY_VALUES,&a),Mat,a)
477 
478 
479 extern PetscErrorCode  MatCopy(Mat,Mat,MatStructure);
480 extern PetscErrorCode  MatView(Mat,PetscViewer);
481 extern PetscErrorCode  MatIsSymmetric(Mat,PetscReal,PetscBool *);
482 PetscPolymorphicFunction(MatIsSymmetric,(Mat A,PetscReal tol),(A,tol,&t),PetscBool ,t)
483 PetscPolymorphicFunction(MatIsSymmetric,(Mat A),(A,0,&t),PetscBool ,t)
484 extern PetscErrorCode  MatIsStructurallySymmetric(Mat,PetscBool *);
485 PetscPolymorphicFunction(MatIsStructurallySymmetric,(Mat A),(A,&t),PetscBool ,t)
486 extern PetscErrorCode  MatIsHermitian(Mat,PetscReal,PetscBool *);
487 PetscPolymorphicFunction(MatIsHermitian,(Mat A),(A,0,&t),PetscBool ,t)
488 extern PetscErrorCode  MatIsSymmetricKnown(Mat,PetscBool *,PetscBool *);
489 extern PetscErrorCode  MatIsHermitianKnown(Mat,PetscBool *,PetscBool *);
490 extern PetscErrorCode  MatMissingDiagonal(Mat,PetscBool  *,PetscInt *);
491 extern PetscErrorCode  MatLoad(Mat, PetscViewer);
492 
493 extern PetscErrorCode  MatGetRowIJ(Mat,PetscInt,PetscBool ,PetscBool ,PetscInt*,PetscInt *[],PetscInt *[],PetscBool  *);
494 extern PetscErrorCode  MatRestoreRowIJ(Mat,PetscInt,PetscBool ,PetscBool ,PetscInt *,PetscInt *[],PetscInt *[],PetscBool  *);
495 extern PetscErrorCode  MatGetColumnIJ(Mat,PetscInt,PetscBool ,PetscBool ,PetscInt*,PetscInt *[],PetscInt *[],PetscBool  *);
496 extern PetscErrorCode  MatRestoreColumnIJ(Mat,PetscInt,PetscBool ,PetscBool ,PetscInt *,PetscInt *[],PetscInt *[],PetscBool  *);
497 
498 /*S
499      MatInfo - Context of matrix information, used with MatGetInfo()
500 
501    In Fortran this is simply a double precision array of dimension MAT_INFO_SIZE
502 
503    Level: intermediate
504 
505   Concepts: matrix^nonzero information
506 
507 .seealso:  MatGetInfo(), MatInfoType
508 S*/
509 typedef struct {
510   PetscLogDouble block_size;                         /* block size */
511   PetscLogDouble nz_allocated,nz_used,nz_unneeded;   /* number of nonzeros */
512   PetscLogDouble memory;                             /* memory allocated */
513   PetscLogDouble assemblies;                         /* number of matrix assemblies called */
514   PetscLogDouble mallocs;                            /* number of mallocs during MatSetValues() */
515   PetscLogDouble fill_ratio_given,fill_ratio_needed; /* fill ratio for LU/ILU */
516   PetscLogDouble factor_mallocs;                     /* number of mallocs during factorization */
517 } MatInfo;
518 
519 /*E
520     MatInfoType - Indicates if you want information about the local part of the matrix,
521      the entire parallel matrix or the maximum over all the local parts.
522 
523     Level: beginner
524 
525    Any additions/changes here MUST also be made in include/finclude/petscmat.h
526 
527 .seealso: MatGetInfo(), MatInfo
528 E*/
529 typedef enum {MAT_LOCAL=1,MAT_GLOBAL_MAX=2,MAT_GLOBAL_SUM=3} MatInfoType;
530 extern PetscErrorCode  MatGetInfo(Mat,MatInfoType,MatInfo*);
531 extern PetscErrorCode  MatGetDiagonal(Mat,Vec);
532 extern PetscErrorCode  MatGetRowMax(Mat,Vec,PetscInt[]);
533 extern PetscErrorCode  MatGetRowMin(Mat,Vec,PetscInt[]);
534 extern PetscErrorCode  MatGetRowMaxAbs(Mat,Vec,PetscInt[]);
535 extern PetscErrorCode  MatGetRowMinAbs(Mat,Vec,PetscInt[]);
536 extern PetscErrorCode  MatGetRowSum(Mat,Vec);
537 extern PetscErrorCode  MatTranspose(Mat,MatReuse,Mat*);
538 PetscPolymorphicFunction(MatTranspose,(Mat A),(A,MAT_INITIAL_MATRIX,&t),Mat,t)
539 extern PetscErrorCode  MatHermitianTranspose(Mat,MatReuse,Mat*);
540 extern PetscErrorCode  MatPermute(Mat,IS,IS,Mat *);
541 PetscPolymorphicFunction(MatPermute,(Mat A,IS is1,IS is2),(A,is1,is2,&t),Mat,t)
542 extern PetscErrorCode  MatDiagonalScale(Mat,Vec,Vec);
543 extern PetscErrorCode  MatDiagonalSet(Mat,Vec,InsertMode);
544 extern PetscErrorCode  MatEqual(Mat,Mat,PetscBool *);
545 PetscPolymorphicFunction(MatEqual,(Mat A,Mat B),(A,B,&t),PetscBool ,t)
546 extern PetscErrorCode  MatMultEqual(Mat,Mat,PetscInt,PetscBool *);
547 extern PetscErrorCode  MatMultAddEqual(Mat,Mat,PetscInt,PetscBool *);
548 extern PetscErrorCode  MatMultTransposeEqual(Mat,Mat,PetscInt,PetscBool *);
549 extern PetscErrorCode  MatMultTransposeAddEqual(Mat,Mat,PetscInt,PetscBool *);
550 
551 extern PetscErrorCode  MatNorm(Mat,NormType,PetscReal *);
552 PetscPolymorphicFunction(MatNorm,(Mat A,NormType t),(A,t,&n),PetscReal,n)
553 extern PetscErrorCode MatGetColumnNorms(Mat,NormType,PetscReal *);
554 extern PetscErrorCode  MatZeroEntries(Mat);
555 extern PetscErrorCode  MatZeroRows(Mat,PetscInt,const PetscInt [],PetscScalar,Vec,Vec);
556 extern PetscErrorCode  MatZeroRowsIS(Mat,IS,PetscScalar,Vec,Vec);
557 extern PetscErrorCode  MatZeroRowsStencil(Mat,PetscInt,const MatStencil [],PetscScalar,Vec,Vec);
558 extern PetscErrorCode  MatZeroRowsColumns(Mat,PetscInt,const PetscInt [],PetscScalar,Vec,Vec);
559 extern PetscErrorCode  MatZeroRowsColumnsIS(Mat,IS,PetscScalar,Vec,Vec);
560 
561 extern PetscErrorCode  MatUseScaledForm(Mat,PetscBool );
562 extern PetscErrorCode  MatScaleSystem(Mat,Vec,Vec);
563 extern PetscErrorCode  MatUnScaleSystem(Mat,Vec,Vec);
564 
565 extern PetscErrorCode  MatGetSize(Mat,PetscInt*,PetscInt*);
566 extern PetscErrorCode  MatGetLocalSize(Mat,PetscInt*,PetscInt*);
567 extern PetscErrorCode  MatGetOwnershipRange(Mat,PetscInt*,PetscInt*);
568 extern PetscErrorCode  MatGetOwnershipRanges(Mat,const PetscInt**);
569 extern PetscErrorCode  MatGetOwnershipRangeColumn(Mat,PetscInt*,PetscInt*);
570 extern PetscErrorCode  MatGetOwnershipRangesColumn(Mat,const PetscInt**);
571 
572 extern PetscErrorCode  MatGetSubMatrices(Mat,PetscInt,const IS[],const IS[],MatReuse,Mat *[]);
573 extern PetscErrorCode  MatGetSubMatricesParallel(Mat,PetscInt,const IS[],const IS[],MatReuse,Mat *[]);
574 extern PetscErrorCode  MatDestroyMatrices(PetscInt,Mat *[]);
575 extern PetscErrorCode  MatGetSubMatrix(Mat,IS,IS,MatReuse,Mat *);
576 extern PetscErrorCode  MatGetLocalSubMatrix(Mat,IS,IS,Mat*);
577 extern PetscErrorCode  MatRestoreLocalSubMatrix(Mat,IS,IS,Mat*);
578 extern PetscErrorCode  MatGetSeqNonzeroStructure(Mat,Mat*);
579 extern PetscErrorCode  MatDestroySeqNonzeroStructure(Mat*);
580 
581 extern PetscErrorCode  MatMerge(MPI_Comm,Mat,PetscInt,MatReuse,Mat*);
582 extern PetscErrorCode  MatMerge_SeqsToMPI(MPI_Comm,Mat,PetscInt,PetscInt,MatReuse,Mat*);
583 extern PetscErrorCode  MatMerge_SeqsToMPISymbolic(MPI_Comm,Mat,PetscInt,PetscInt,Mat*);
584 extern PetscErrorCode  MatMerge_SeqsToMPINumeric(Mat,Mat);
585 extern PetscErrorCode  MatMPIAIJGetLocalMat(Mat,MatReuse,Mat*);
586 extern PetscErrorCode  MatMPIAIJGetLocalMatCondensed(Mat,MatReuse,IS*,IS*,Mat*);
587 extern PetscErrorCode  MatGetBrowsOfAcols(Mat,Mat,MatReuse,IS*,IS*,PetscInt*,Mat*);
588 extern PetscErrorCode  MatGetBrowsOfAoCols(Mat,Mat,MatReuse,PetscInt**,PetscInt**,MatScalar**,Mat*);
589 #if defined (PETSC_USE_CTABLE)
590 #include "petscctable.h"
591 extern PetscErrorCode  MatGetCommunicationStructs(Mat, Vec *, PetscTable *, VecScatter *);
592 #else
593 extern PetscErrorCode  MatGetCommunicationStructs(Mat, Vec *, PetscInt *[], VecScatter *);
594 #endif
595 extern PetscErrorCode  MatGetGhosts(Mat, PetscInt *,const PetscInt *[]);
596 
597 extern PetscErrorCode  MatIncreaseOverlap(Mat,PetscInt,IS[],PetscInt);
598 
599 extern PetscErrorCode  MatMatMult(Mat,Mat,MatReuse,PetscReal,Mat*);
600 extern PetscErrorCode  MatMatMultSymbolic(Mat,Mat,PetscReal,Mat*);
601 extern PetscErrorCode  MatMatMultNumeric(Mat,Mat,Mat);
602 
603 extern PetscErrorCode  MatPtAP(Mat,Mat,MatReuse,PetscReal,Mat*);
604 extern PetscErrorCode  MatPtAPSymbolic(Mat,Mat,PetscReal,Mat*);
605 extern PetscErrorCode  MatPtAPNumeric(Mat,Mat,Mat);
606 
607 extern PetscErrorCode  MatMatMultTranspose(Mat,Mat,MatReuse,PetscReal,Mat*);
608 extern PetscErrorCode  MatMatMultTransposeSymbolic(Mat,Mat,PetscReal,Mat*);
609 extern PetscErrorCode  MatMatMultTransposeNumeric(Mat,Mat,Mat);
610 
611 extern PetscErrorCode  MatAXPY(Mat,PetscScalar,Mat,MatStructure);
612 extern PetscErrorCode  MatAYPX(Mat,PetscScalar,Mat,MatStructure);
613 
614 extern PetscErrorCode  MatScale(Mat,PetscScalar);
615 extern PetscErrorCode  MatShift(Mat,PetscScalar);
616 
617 extern PetscErrorCode  MatSetLocalToGlobalMapping(Mat,ISLocalToGlobalMapping,ISLocalToGlobalMapping);
618 extern PetscErrorCode  MatSetLocalToGlobalMappingBlock(Mat,ISLocalToGlobalMapping,ISLocalToGlobalMapping);
619 extern PetscErrorCode  MatGetLocalToGlobalMapping(Mat,ISLocalToGlobalMapping*,ISLocalToGlobalMapping*);
620 extern PetscErrorCode  MatGetLocalToGlobalMappingBlock(Mat,ISLocalToGlobalMapping*,ISLocalToGlobalMapping*);
621 extern PetscErrorCode  MatZeroRowsLocal(Mat,PetscInt,const PetscInt [],PetscScalar,Vec,Vec);
622 extern PetscErrorCode  MatZeroRowsLocalIS(Mat,IS,PetscScalar,Vec,Vec);
623 extern PetscErrorCode  MatZeroRowsColumnsLocal(Mat,PetscInt,const PetscInt [],PetscScalar,Vec,Vec);
624 extern PetscErrorCode  MatZeroRowsColumnsLocalIS(Mat,IS,PetscScalar,Vec,Vec);
625 extern PetscErrorCode  MatSetValuesLocal(Mat,PetscInt,const PetscInt[],PetscInt,const PetscInt[],const PetscScalar[],InsertMode);
626 extern PetscErrorCode  MatSetValuesBlockedLocal(Mat,PetscInt,const PetscInt[],PetscInt,const PetscInt[],const PetscScalar[],InsertMode);
627 
628 extern PetscErrorCode  MatStashSetInitialSize(Mat,PetscInt,PetscInt);
629 extern PetscErrorCode  MatStashGetInfo(Mat,PetscInt*,PetscInt*,PetscInt*,PetscInt*);
630 
631 extern PetscErrorCode  MatInterpolate(Mat,Vec,Vec);
632 extern PetscErrorCode  MatInterpolateAdd(Mat,Vec,Vec,Vec);
633 PetscPolymorphicSubroutine(MatInterpolateAdd,(Mat A,Vec x,Vec y),(A,x,y,y))
634 extern PetscErrorCode  MatRestrict(Mat,Vec,Vec);
635 extern PetscErrorCode  MatGetVecs(Mat,Vec*,Vec*);
636 extern PetscErrorCode  MatGetRedundantMatrix(Mat,PetscInt,MPI_Comm,PetscInt,MatReuse,Mat*);
637 extern PetscErrorCode  MatGetMultiProcBlock(Mat,MPI_Comm,Mat*);
638 extern PetscErrorCode  MatFindZeroDiagonals(Mat,IS*);
639 
640 /*MC
641    MatSetValue - Set a single entry into a matrix.
642 
643    Not collective
644 
645    Input Parameters:
646 +  m - the matrix
647 .  row - the row location of the entry
648 .  col - the column location of the entry
649 .  value - the value to insert
650 -  mode - either INSERT_VALUES or ADD_VALUES
651 
652    Notes:
653    For efficiency one should use MatSetValues() and set several or many
654    values simultaneously if possible.
655 
656    Level: beginner
657 
658 .seealso: MatSetValues(), MatSetValueLocal()
659 M*/
660 PETSC_STATIC_INLINE PetscErrorCode MatSetValue(Mat v,PetscInt i,PetscInt j,PetscScalar va,InsertMode mode) {return MatSetValues(v,1,&i,1,&j,&va,mode);}
661 
662 PETSC_STATIC_INLINE PetscErrorCode MatGetValue(Mat v,PetscInt i,PetscInt j,PetscScalar *va) {return MatGetValues(v,1,&i,1,&j,va);}
663 
664 PETSC_STATIC_INLINE PetscErrorCode MatSetValueLocal(Mat v,PetscInt i,PetscInt j,PetscScalar va,InsertMode mode) {return MatSetValuesLocal(v,1,&i,1,&j,&va,mode);}
665 
666 /*MC
667    MatPreallocateInitialize - Begins the block of code that will count the number of nonzeros per
668        row in a matrix providing the data that one can use to correctly preallocate the matrix.
669 
670    Synopsis:
671    PetscErrorCode MatPreallocateInitialize(MPI_Comm comm, PetscInt nrows, PetscInt ncols, PetscInt *dnz, PetscInt *onz)
672 
673    Collective on MPI_Comm
674 
675    Input Parameters:
676 +  comm - the communicator that will share the eventually allocated matrix
677 .  nrows - the number of LOCAL rows in the matrix
678 -  ncols - the number of LOCAL columns in the matrix
679 
680    Output Parameters:
681 +  dnz - the array that will be passed to the matrix preallocation routines
682 -  ozn - the other array passed to the matrix preallocation routines
683 
684 
685    Level: intermediate
686 
687    Notes:
688     See the <A href="../../docs/manual.pdf#nameddest=ch_performance">Hints for Performance Improvment</A> chapter in the users manual for more details.
689 
690    Do not malloc or free dnz and onz, that is handled internally by these routines
691 
692    Use MatPreallocateInitializeSymmetric() for symmetric matrices (MPISBAIJ matrices)
693 
694    This is a MACRO not a function because it has a leading { that is closed by PetscPreallocateFinalize().
695 
696   Concepts: preallocation^Matrix
697 
698 .seealso: MatPreallocateFinalize(), MatPreallocateSet(), MatPreallocateSymmetricSet(), MatPreallocateSetLocal(),
699           MatPreallocateInitializeSymmetric(), MatPreallocateSymmetricSetLocal()
700 M*/
701 #define MatPreallocateInitialize(comm,nrows,ncols,dnz,onz) 0; \
702 { \
703   PetscErrorCode _4_ierr; PetscInt __nrows = (nrows),__ctmp = (ncols),__rstart,__start,__end; \
704   _4_ierr = PetscMalloc2(__nrows,PetscInt,&dnz,__nrows,PetscInt,&onz);CHKERRQ(_4_ierr); \
705   _4_ierr = PetscMemzero(dnz,__nrows*sizeof(PetscInt));CHKERRQ(_4_ierr);\
706   _4_ierr = PetscMemzero(onz,__nrows*sizeof(PetscInt));CHKERRQ(_4_ierr);\
707   _4_ierr = MPI_Scan(&__ctmp,&__end,1,MPIU_INT,MPI_SUM,comm);CHKERRQ(_4_ierr); __start = __end - __ctmp;\
708   _4_ierr = MPI_Scan(&__nrows,&__rstart,1,MPIU_INT,MPI_SUM,comm);CHKERRQ(_4_ierr); __rstart = __rstart - __nrows;
709 
710 /*MC
711    MatPreallocateSymmetricInitialize - Begins the block of code that will count the number of nonzeros per
712        row in a matrix providing the data that one can use to correctly preallocate the matrix.
713 
714    Synopsis:
715    PetscErrorCode MatPreallocateSymmetricInitialize(MPI_Comm comm, PetscInt nrows, PetscInt ncols, PetscInt *dnz, PetscInt *onz)
716 
717    Collective on MPI_Comm
718 
719    Input Parameters:
720 +  comm - the communicator that will share the eventually allocated matrix
721 .  nrows - the number of LOCAL rows in the matrix
722 -  ncols - the number of LOCAL columns in the matrix
723 
724    Output Parameters:
725 +  dnz - the array that will be passed to the matrix preallocation routines
726 -  ozn - the other array passed to the matrix preallocation routines
727 
728 
729    Level: intermediate
730 
731    Notes:
732     See the <A href="../../docs/manual.pdf#nameddest=ch_performance">Hints for Performance Improvment</A> chapter in the users manual for more details.
733 
734    Do not malloc or free dnz and onz, that is handled internally by these routines
735 
736    This is a MACRO not a function because it has a leading { that is closed by PetscPreallocateFinalize().
737 
738   Concepts: preallocation^Matrix
739 
740 .seealso: MatPreallocateFinalize(), MatPreallocateSet(), MatPreallocateSymmetricSet(), MatPreallocateSetLocal(),
741           MatPreallocateInitialize(), MatPreallocateSymmetricSetLocal()
742 M*/
743 #define MatPreallocateSymmetricInitialize(comm,nrows,ncols,dnz,onz) 0; \
744 { \
745   PetscErrorCode _4_ierr; PetscInt __nrows = (nrows),__ctmp = (ncols),__rstart,__end; \
746   _4_ierr = PetscMalloc2(__nrows,PetscInt,&dnz,__nrows,PetscInt,&onz);CHKERRQ(_4_ierr); \
747   _4_ierr = PetscMemzero(dnz,__nrows*sizeof(PetscInt));CHKERRQ(_4_ierr);\
748   _4_ierr = PetscMemzero(onz,__nrows*sizeof(PetscInt));CHKERRQ(_4_ierr);\
749   _4_ierr = MPI_Scan(&__ctmp,&__end,1,MPIU_INT,MPI_SUM,comm);CHKERRQ(_4_ierr);\
750   _4_ierr = MPI_Scan(&__nrows,&__rstart,1,MPIU_INT,MPI_SUM,comm);CHKERRQ(_4_ierr); __rstart = __rstart - __nrows;
751 
752 /*MC
753    MatPreallocateSetLocal - Indicates the locations (rows and columns) in the matrix where nonzeros will be
754        inserted using a local number of the rows and columns
755 
756    Synopsis:
757    PetscErrorCode MatPreallocateSetLocal(ISLocalToGlobalMappping map,PetscInt nrows, PetscInt *rows,PetscInt ncols, PetscInt *cols,PetscInt *dnz, PetscInt *onz)
758 
759    Not Collective
760 
761    Input Parameters:
762 +  map - the row mapping from local numbering to global numbering
763 .  nrows - the number of rows indicated
764 .  rows - the indices of the rows
765 .  cmap - the column mapping from local to global numbering
766 .  ncols - the number of columns in the matrix
767 .  cols - the columns indicated
768 .  dnz - the array that will be passed to the matrix preallocation routines
769 -  ozn - the other array passed to the matrix preallocation routines
770 
771 
772    Level: intermediate
773 
774    Notes:
775     See the <A href="../../docs/manual.pdf#nameddest=ch_performance">Hints for Performance Improvment</A> chapter in the users manual for more details.
776 
777    Do not malloc or free dnz and onz, that is handled internally by these routines
778 
779   Concepts: preallocation^Matrix
780 
781 .seealso: MatPreallocateFinalize(), MatPreallocateSet(), MatPreallocateSymmetricSet(), MatPreallocateInitialize(),
782           MatPreallocateInitialize(), MatPreallocateSymmetricSetLocal()
783 M*/
784 #define MatPreallocateSetLocal(rmap,nrows,rows,cmap,ncols,cols,dnz,onz) 0; \
785 {\
786   PetscInt __l;\
787   _4_ierr = ISLocalToGlobalMappingApply(rmap,nrows,rows,rows);CHKERRQ(_4_ierr);\
788   _4_ierr = ISLocalToGlobalMappingApply(cmap,ncols,cols,cols);CHKERRQ(_4_ierr);\
789   for (__l=0;__l<nrows;__l++) {\
790     _4_ierr = MatPreallocateSet((rows)[__l],ncols,cols,dnz,onz);CHKERRQ(_4_ierr);\
791   }\
792 }
793 
794 /*MC
795    MatPreallocateSymmetricSetLocal - Indicates the locations (rows and columns) in the matrix where nonzeros will be
796        inserted using a local number of the rows and columns
797 
798    Synopsis:
799    PetscErrorCode MatPreallocateSymmetricSetLocal(ISLocalToGlobalMappping map,PetscInt nrows, PetscInt *rows,PetscInt ncols, PetscInt *cols,PetscInt *dnz, PetscInt *onz)
800 
801    Not Collective
802 
803    Input Parameters:
804 +  map - the mapping between local numbering and global numbering
805 .  nrows - the number of rows indicated
806 .  rows - the indices of the rows
807 .  ncols - the number of columns in the matrix
808 .  cols - the columns indicated
809 .  dnz - the array that will be passed to the matrix preallocation routines
810 -  ozn - the other array passed to the matrix preallocation routines
811 
812 
813    Level: intermediate
814 
815    Notes:
816     See the <A href="../../docs/manual.pdf#nameddest=ch_performance">Hints for Performance Improvment</A> chapter in the users manual for more details.
817 
818    Do not malloc or free dnz and onz that is handled internally by these routines
819 
820   Concepts: preallocation^Matrix
821 
822 .seealso: MatPreallocateFinalize(), MatPreallocateSet(), MatPreallocateSymmetricSet(), MatPreallocateInitialize(),
823           MatPreallocateInitialize(), MatPreallocateSymmetricSetLocal(), MatPreallocateSetLocal()
824 M*/
825 #define MatPreallocateSymmetricSetLocal(map,nrows,rows,ncols,cols,dnz,onz) 0;\
826 {\
827   PetscInt __l;\
828   _4_ierr = ISLocalToGlobalMappingApply(map,nrows,rows,rows);CHKERRQ(_4_ierr);\
829   _4_ierr = ISLocalToGlobalMappingApply(map,ncols,cols,cols);CHKERRQ(_4_ierr);\
830   for (__l=0;__l<nrows;__l++) {\
831     _4_ierr = MatPreallocateSymmetricSet((rows)[__l],ncols,cols,dnz,onz);CHKERRQ(_4_ierr);\
832   }\
833 }
834 
835 /*MC
836    MatPreallocateSet - Indicates the locations (rows and columns) in the matrix where nonzeros will be
837        inserted using a local number of the rows and columns
838 
839    Synopsis:
840    PetscErrorCode MatPreallocateSet(PetscInt nrows, PetscInt *rows,PetscInt ncols, PetscInt *cols,PetscInt *dnz, PetscInt *onz)
841 
842    Not Collective
843 
844    Input Parameters:
845 +  row - the row
846 .  ncols - the number of columns in the matrix
847 -  cols - the columns indicated
848 
849    Output Parameters:
850 +  dnz - the array that will be passed to the matrix preallocation routines
851 -  ozn - the other array passed to the matrix preallocation routines
852 
853 
854    Level: intermediate
855 
856    Notes:
857     See the <A href="../../docs/manual.pdf#nameddest=ch_performance">Hints for Performance Improvment</A> chapter in the users manual for more details.
858 
859    Do not malloc or free dnz and onz that is handled internally by these routines
860 
861    This is a MACRO not a function because it uses variables declared in MatPreallocateInitialize().
862 
863   Concepts: preallocation^Matrix
864 
865 .seealso: MatPreallocateFinalize(), MatPreallocateSet(), MatPreallocateSymmetricSet(), MatPreallocateInitialize(),
866           MatPreallocateInitialize(), MatPreallocateSymmetricSetLocal(), MatPreallocateSetLocal()
867 M*/
868 #define MatPreallocateSet(row,nc,cols,dnz,onz) 0;\
869 { PetscInt __i; \
870   if (row < __rstart) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Trying to set preallocation for row %D less than first local row %D",row,__rstart);\
871   if (row >= __rstart+__nrows) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Trying to set preallocation for row %D greater than last local row %D",row,__rstart+__nrows-1);\
872   for (__i=0; __i<nc; __i++) {\
873     if ((cols)[__i] < __start || (cols)[__i] >= __end) onz[row - __rstart]++; \
874     else dnz[row - __rstart]++;\
875   }\
876 }
877 
878 /*MC
879    MatPreallocateSymmetricSet - Indicates the locations (rows and columns) in the matrix where nonzeros will be
880        inserted using a local number of the rows and columns
881 
882    Synopsis:
883    PetscErrorCode MatPreallocateSymmetricSet(PetscInt nrows, PetscInt *rows,PetscInt ncols, PetscInt *cols,PetscInt *dnz, PetscInt *onz)
884 
885    Not Collective
886 
887    Input Parameters:
888 +  nrows - the number of rows indicated
889 .  rows - the indices of the rows
890 .  ncols - the number of columns in the matrix
891 .  cols - the columns indicated
892 .  dnz - the array that will be passed to the matrix preallocation routines
893 -  ozn - the other array passed to the matrix preallocation routines
894 
895 
896    Level: intermediate
897 
898    Notes:
899     See the <A href="../../docs/manual.pdf#nameddest=ch_performance">Hints for Performance Improvment</A> chapter in the users manual for more details.
900 
901    Do not malloc or free dnz and onz that is handled internally by these routines
902 
903    This is a MACRO not a function because it uses variables declared in MatPreallocateInitialize().
904 
905   Concepts: preallocation^Matrix
906 
907 .seealso: MatPreallocateFinalize(), MatPreallocateSet(), MatPreallocateSymmetricSet(), MatPreallocateInitialize(),
908           MatPreallocateInitialize(), MatPreallocateSymmetricSetLocal(), MatPreallocateSetLocal()
909 M*/
910 #define MatPreallocateSymmetricSet(row,nc,cols,dnz,onz) 0;\
911 { PetscInt __i; \
912   for (__i=0; __i<nc; __i++) {\
913     if (cols[__i] >= __end) onz[row - __rstart]++; \
914     else if (cols[__i] >= row) dnz[row - __rstart]++;\
915   }\
916 }
917 
918 /*MC
919    MatPreallocateLocation -  An alternative to MatPreallocationSet() that puts the nonzero locations into the matrix if it exists
920 
921    Synopsis:
922    PetscErrorCode MatPreallocateLocations(Mat A,PetscInt row,PetscInt ncols,PetscInt *cols,PetscInt *dnz,PetscInt *onz)
923 
924    Not Collective
925 
926    Input Parameters:
927 .  A - matrix
928 .  row - row where values exist (must be local to this process)
929 .  ncols - number of columns
930 .  cols - columns with nonzeros
931 .  dnz - the array that will be passed to the matrix preallocation routines
932 -  ozn - the other array passed to the matrix preallocation routines
933 
934 
935    Level: intermediate
936 
937    Notes:
938     See the <A href="../../docs/manual.pdf#nameddest=ch_performance">Hints for Performance Improvment</A> chapter in the users manual for more details.
939 
940    Do not malloc or free dnz and onz that is handled internally by these routines
941 
942    This is a MACRO not a function because it uses a bunch of variables private to the MatPreallocation.... routines.
943 
944   Concepts: preallocation^Matrix
945 
946 .seealso: MatPreallocateInitialize(), MatPreallocateSet(), MatPreallocateSymmetricSet(), MatPreallocateSetLocal(),
947           MatPreallocateSymmetricInitialize(), MatPreallocateSymmetricSetLocal()
948 M*/
949 #define MatPreallocateLocation(A,row,ncols,cols,dnz,onz) 0;if (A) {ierr = MatSetValues(A,1,&row,ncols,cols,PETSC_NULL,INSERT_VALUES);CHKERRQ(ierr);} else {ierr =  MatPreallocateSet(row,ncols,cols,dnz,onz);CHKERRQ(ierr);}
950 
951 
952 /*MC
953    MatPreallocateFinalize - Ends the block of code that will count the number of nonzeros per
954        row in a matrix providing the data that one can use to correctly preallocate the matrix.
955 
956    Synopsis:
957    PetscErrorCode MatPreallocateFinalize(PetscInt *dnz, PetscInt *onz)
958 
959    Collective on MPI_Comm
960 
961    Input Parameters:
962 +  dnz - the array that was be passed to the matrix preallocation routines
963 -  ozn - the other array passed to the matrix preallocation routines
964 
965 
966    Level: intermediate
967 
968    Notes:
969     See the <A href="../../docs/manual.pdf#nameddest=ch_performance">Hints for Performance Improvment</A> chapter in the users manual for more details.
970 
971    Do not malloc or free dnz and onz that is handled internally by these routines
972 
973    This is a MACRO not a function because it closes the { started in MatPreallocateInitialize().
974 
975   Concepts: preallocation^Matrix
976 
977 .seealso: MatPreallocateInitialize(), MatPreallocateSet(), MatPreallocateSymmetricSet(), MatPreallocateSetLocal(),
978           MatPreallocateSymmetricInitialize(), MatPreallocateSymmetricSetLocal()
979 M*/
980 #define MatPreallocateFinalize(dnz,onz) 0;_4_ierr = PetscFree2(dnz,onz);CHKERRQ(_4_ierr);}
981 
982 
983 
984 /* Routines unique to particular data structures */
985 extern PetscErrorCode  MatShellGetContext(Mat,void **);
986 PetscPolymorphicFunction(MatShellGetContext,(Mat A),(A,&t),void*,t)
987 
988 extern PetscErrorCode  MatInodeAdjustForInodes(Mat,IS*,IS*);
989 extern PetscErrorCode  MatInodeGetInodeSizes(Mat,PetscInt *,PetscInt *[],PetscInt *);
990 
991 extern PetscErrorCode  MatSeqAIJSetColumnIndices(Mat,PetscInt[]);
992 extern PetscErrorCode  MatSeqBAIJSetColumnIndices(Mat,PetscInt[]);
993 extern PetscErrorCode  MatCreateSeqAIJWithArrays(MPI_Comm,PetscInt,PetscInt,PetscInt[],PetscInt[],PetscScalar[],Mat*);
994 extern PetscErrorCode  MatCreateSeqBAIJWithArrays(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt[],PetscInt[],PetscScalar[],Mat*);
995 extern PetscErrorCode  MatCreateSeqSBAIJWithArrays(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt[],PetscInt[],PetscScalar[],Mat*);
996 
997 #define MAT_SKIP_ALLOCATION -4
998 
999 extern PetscErrorCode  MatSeqBAIJSetPreallocation(Mat,PetscInt,PetscInt,const PetscInt[]);
1000 PetscPolymorphicSubroutine(MatSeqBAIJSetPreallocation,(Mat A,PetscInt bs,const PetscInt nnz[]),(A,bs,0,nnz))
1001 extern PetscErrorCode  MatSeqSBAIJSetPreallocation(Mat,PetscInt,PetscInt,const PetscInt[]);
1002 PetscPolymorphicSubroutine(MatSeqSBAIJSetPreallocation,(Mat A,PetscInt bs,const PetscInt nnz[]),(A,bs,0,nnz))
1003 extern PetscErrorCode  MatSeqAIJSetPreallocation(Mat,PetscInt,const PetscInt[]);
1004 PetscPolymorphicSubroutine(MatSeqAIJSetPreallocation,(Mat A,const PetscInt nnz[]),(A,0,nnz))
1005 
1006 extern PetscErrorCode  MatMPIBAIJSetPreallocation(Mat,PetscInt,PetscInt,const PetscInt[],PetscInt,const PetscInt[]);
1007 PetscPolymorphicSubroutine(MatMPIBAIJSetPreallocation,(Mat A,PetscInt bs,const PetscInt nnz[],const PetscInt onz[]),(A,bs,0,nnz,0,onz))
1008 extern PetscErrorCode  MatMPISBAIJSetPreallocation(Mat,PetscInt,PetscInt,const PetscInt[],PetscInt,const PetscInt[]);
1009 extern PetscErrorCode  MatMPIAIJSetPreallocation(Mat,PetscInt,const PetscInt[],PetscInt,const PetscInt[]);
1010 extern PetscErrorCode  MatSeqAIJSetPreallocationCSR(Mat,const PetscInt [],const PetscInt [],const PetscScalar []);
1011 extern PetscErrorCode  MatSeqBAIJSetPreallocationCSR(Mat,PetscInt,const PetscInt[],const PetscInt[],const PetscScalar[]);
1012 extern PetscErrorCode  MatMPIAIJSetPreallocationCSR(Mat,const PetscInt[],const PetscInt[],const PetscScalar[]);
1013 extern PetscErrorCode  MatMPIBAIJSetPreallocationCSR(Mat,PetscInt,const PetscInt[],const PetscInt[],const PetscScalar[]);
1014 extern PetscErrorCode  MatMPIAdjSetPreallocation(Mat,PetscInt[],PetscInt[],PetscInt[]);
1015 extern PetscErrorCode  MatMPIDenseSetPreallocation(Mat,PetscScalar[]);
1016 extern PetscErrorCode  MatSeqDenseSetPreallocation(Mat,PetscScalar[]);
1017 extern PetscErrorCode  MatMPIAIJGetSeqAIJ(Mat,Mat*,Mat*,PetscInt*[]);
1018 extern PetscErrorCode  MatMPIBAIJGetSeqBAIJ(Mat,Mat*,Mat*,PetscInt*[]);
1019 extern PetscErrorCode  MatAdicSetLocalFunction(Mat,void (*)(void));
1020 
1021 extern PetscErrorCode  MatSeqDenseSetLDA(Mat,PetscInt);
1022 extern PetscErrorCode  MatDenseGetLocalMatrix(Mat,Mat*);
1023 
1024 extern PetscErrorCode  MatStoreValues(Mat);
1025 extern PetscErrorCode  MatRetrieveValues(Mat);
1026 
1027 extern PetscErrorCode  MatDAADSetCtx(Mat,void*);
1028 
1029 extern PetscErrorCode  MatFindNonzeroRows(Mat,IS*);
1030 /*
1031   These routines are not usually accessed directly, rather solving is
1032   done through the KSP and PC interfaces.
1033 */
1034 
1035 /*E
1036     MatOrderingType - String with the name of a PETSc matrix ordering or the creation function
1037        with an optional dynamic library name, for example
1038        http://www.mcs.anl.gov/petsc/lib.a:orderingcreate()
1039 
1040    Level: beginner
1041 
1042    Cannot use const because the PC objects manipulate the string
1043 
1044 .seealso: MatGetOrdering()
1045 E*/
1046 #define MatOrderingType char*
1047 #define MATORDERINGNATURAL     "natural"
1048 #define MATORDERINGND          "nd"
1049 #define MATORDERING1WD         "1wd"
1050 #define MATORDERINGRCM         "rcm"
1051 #define MATORDERINGQMD         "qmd"
1052 #define MATORDERINGROWLENGTH   "rowlength"
1053 #define MATORDERINGDSC_ND      "dsc_nd"         /* these three are only for DSCPACK, see its documentation for details */
1054 #define MATORDERINGDSC_MMD     "dsc_mmd"
1055 #define MATORDERINGDSC_MDF     "dsc_mdf"
1056 #define MATORDERINGAMD         "amd"            /* only works if UMFPACK is installed with PETSc */
1057 
1058 extern PetscErrorCode  MatGetOrdering(Mat,const MatOrderingType,IS*,IS*);
1059 extern PetscErrorCode  MatGetOrderingList(PetscFList *list);
1060 extern PetscErrorCode  MatOrderingRegister(const char[],const char[],const char[],PetscErrorCode(*)(Mat,const MatOrderingType,IS*,IS*));
1061 
1062 /*MC
1063    MatOrderingRegisterDynamic - Adds a new sparse matrix ordering to the matrix package.
1064 
1065    Synopsis:
1066    PetscErrorCode MatOrderingRegisterDynamic(const char *name_ordering,const char *path,const char *name_create,PetscErrorCode (*routine_create)(MatOrdering))
1067 
1068    Not Collective
1069 
1070    Input Parameters:
1071 +  sname - name of ordering (for example MATORDERINGND)
1072 .  path - location of library where creation routine is
1073 .  name - name of function that creates the ordering type,a string
1074 -  function - function pointer that creates the ordering
1075 
1076    Level: developer
1077 
1078    If dynamic libraries are used, then the fourth input argument (function)
1079    is ignored.
1080 
1081    Sample usage:
1082 .vb
1083    MatOrderingRegisterDynamic("my_order",/home/username/my_lib/lib/libO/solaris/mylib.a,
1084                "MyOrder",MyOrder);
1085 .ve
1086 
1087    Then, your partitioner can be chosen with the procedural interface via
1088 $     MatOrderingSetType(part,"my_order)
1089    or at runtime via the option
1090 $     -pc_factor_mat_ordering_type my_order
1091 
1092    ${PETSC_ARCH} occuring in pathname will be replaced with appropriate values.
1093 
1094 .keywords: matrix, ordering, register
1095 
1096 .seealso: MatOrderingRegisterDestroy(), MatOrderingRegisterAll()
1097 M*/
1098 #if defined(PETSC_USE_DYNAMIC_LIBRARIES)
1099 #define MatOrderingRegisterDynamic(a,b,c,d) MatOrderingRegister(a,b,c,0)
1100 #else
1101 #define MatOrderingRegisterDynamic(a,b,c,d) MatOrderingRegister(a,b,c,d)
1102 #endif
1103 
1104 extern PetscErrorCode  MatOrderingRegisterDestroy(void);
1105 extern PetscErrorCode  MatOrderingRegisterAll(const char[]);
1106 extern PetscBool  MatOrderingRegisterAllCalled;
1107 extern PetscFList MatOrderingList;
1108 
1109 extern PetscErrorCode  MatReorderForNonzeroDiagonal(Mat,PetscReal,IS,IS);
1110 
1111 /*S
1112     MatFactorShiftType - Numeric Shift.
1113 
1114    Level: beginner
1115 
1116 S*/
1117 typedef enum {MAT_SHIFT_NONE,MAT_SHIFT_NONZERO,MAT_SHIFT_POSITIVE_DEFINITE,MAT_SHIFT_INBLOCKS} MatFactorShiftType;
1118 extern const char *MatFactorShiftTypes[];
1119 
1120 /*S
1121    MatFactorInfo - Data passed into the matrix factorization routines
1122 
1123    In Fortran these are simply double precision arrays of size MAT_FACTORINFO_SIZE, that is use
1124 $     MatFactorInfo  info(MAT_FACTORINFO_SIZE)
1125 
1126    Notes: These are not usually directly used by users, instead use PC type of LU, ILU, CHOLESKY or ICC.
1127 
1128       You can use MatFactorInfoInitialize() to set default values.
1129 
1130    Level: developer
1131 
1132 .seealso: MatLUFactorSymbolic(), MatILUFactorSymbolic(), MatCholeskyFactorSymbolic(), MatICCFactorSymbolic(), MatICCFactor(),
1133           MatFactorInfoInitialize()
1134 
1135 S*/
1136 typedef struct {
1137   PetscReal     diagonal_fill;  /* force diagonal to fill in if initially not filled */
1138   PetscReal     usedt;
1139   PetscReal     dt;             /* drop tolerance */
1140   PetscReal     dtcol;          /* tolerance for pivoting */
1141   PetscReal     dtcount;        /* maximum nonzeros to be allowed per row */
1142   PetscReal     fill;           /* expected fill, nonzeros in factored matrix/nonzeros in original matrix */
1143   PetscReal     levels;         /* ICC/ILU(levels) */
1144   PetscReal     pivotinblocks;  /* for BAIJ and SBAIJ matrices pivot in factorization on blocks, default 1.0
1145                                    factorization may be faster if do not pivot */
1146   PetscReal     zeropivot;      /* pivot is called zero if less than this */
1147   PetscReal     shifttype;      /* type of shift added to matrix factor to prevent zero pivots */
1148   PetscReal     shiftamount;     /* how large the shift is */
1149 } MatFactorInfo;
1150 
1151 extern PetscErrorCode  MatFactorInfoInitialize(MatFactorInfo*);
1152 extern PetscErrorCode  MatCholeskyFactor(Mat,IS,const MatFactorInfo*);
1153 extern PetscErrorCode  MatCholeskyFactorSymbolic(Mat,Mat,IS,const MatFactorInfo*);
1154 extern PetscErrorCode  MatCholeskyFactorNumeric(Mat,Mat,const MatFactorInfo*);
1155 extern PetscErrorCode  MatLUFactor(Mat,IS,IS,const MatFactorInfo*);
1156 extern PetscErrorCode  MatILUFactor(Mat,IS,IS,const MatFactorInfo*);
1157 extern PetscErrorCode  MatLUFactorSymbolic(Mat,Mat,IS,IS,const MatFactorInfo*);
1158 extern PetscErrorCode  MatILUFactorSymbolic(Mat,Mat,IS,IS,const MatFactorInfo*);
1159 extern PetscErrorCode  MatICCFactorSymbolic(Mat,Mat,IS,const MatFactorInfo*);
1160 extern PetscErrorCode  MatICCFactor(Mat,IS,const MatFactorInfo*);
1161 extern PetscErrorCode  MatLUFactorNumeric(Mat,Mat,const MatFactorInfo*);
1162 extern PetscErrorCode  MatGetInertia(Mat,PetscInt*,PetscInt*,PetscInt*);
1163 extern PetscErrorCode  MatSolve(Mat,Vec,Vec);
1164 extern PetscErrorCode  MatForwardSolve(Mat,Vec,Vec);
1165 extern PetscErrorCode  MatBackwardSolve(Mat,Vec,Vec);
1166 extern PetscErrorCode  MatSolveAdd(Mat,Vec,Vec,Vec);
1167 extern PetscErrorCode  MatSolveTranspose(Mat,Vec,Vec);
1168 extern PetscErrorCode  MatSolveTransposeAdd(Mat,Vec,Vec,Vec);
1169 extern PetscErrorCode  MatSolves(Mat,Vecs,Vecs);
1170 
1171 extern PetscErrorCode  MatSetUnfactored(Mat);
1172 
1173 /*E
1174     MatSORType - What type of (S)SOR to perform
1175 
1176     Level: beginner
1177 
1178    May be bitwise ORd together
1179 
1180    Any additions/changes here MUST also be made in include/finclude/petscmat.h
1181 
1182    MatSORType may be bitwise ORd together, so do not change the numbers
1183 
1184 .seealso: MatSOR()
1185 E*/
1186 typedef enum {SOR_FORWARD_SWEEP=1,SOR_BACKWARD_SWEEP=2,SOR_SYMMETRIC_SWEEP=3,
1187               SOR_LOCAL_FORWARD_SWEEP=4,SOR_LOCAL_BACKWARD_SWEEP=8,
1188               SOR_LOCAL_SYMMETRIC_SWEEP=12,SOR_ZERO_INITIAL_GUESS=16,
1189               SOR_EISENSTAT=32,SOR_APPLY_UPPER=64,SOR_APPLY_LOWER=128} MatSORType;
1190 extern PetscErrorCode  MatSOR(Mat,Vec,PetscReal,MatSORType,PetscReal,PetscInt,PetscInt,Vec);
1191 
1192 /*
1193     These routines are for efficiently computing Jacobians via finite differences.
1194 */
1195 
1196 /*E
1197     MatColoringType - String with the name of a PETSc matrix coloring or the creation function
1198        with an optional dynamic library name, for example
1199        http://www.mcs.anl.gov/petsc/lib.a:coloringcreate()
1200 
1201    Level: beginner
1202 
1203 .seealso: MatGetColoring()
1204 E*/
1205 #define MatColoringType char*
1206 #define MATCOLORINGNATURAL "natural"
1207 #define MATCOLORINGSL      "sl"
1208 #define MATCOLORINGLF      "lf"
1209 #define MATCOLORINGID      "id"
1210 
1211 extern PetscErrorCode  MatGetColoring(Mat,const MatColoringType,ISColoring*);
1212 extern PetscErrorCode  MatColoringRegister(const char[],const char[],const char[],PetscErrorCode(*)(Mat,MatColoringType,ISColoring *));
1213 
1214 /*MC
1215    MatColoringRegisterDynamic - Adds a new sparse matrix coloring to the
1216                                matrix package.
1217 
1218    Synopsis:
1219    PetscErrorCode MatColoringRegisterDynamic(const char *name_coloring,const char *path,const char *name_create,PetscErrorCode (*routine_create)(MatColoring))
1220 
1221    Not Collective
1222 
1223    Input Parameters:
1224 +  sname - name of Coloring (for example MATCOLORINGSL)
1225 .  path - location of library where creation routine is
1226 .  name - name of function that creates the Coloring type, a string
1227 -  function - function pointer that creates the coloring
1228 
1229    Level: developer
1230 
1231    If dynamic libraries are used, then the fourth input argument (function)
1232    is ignored.
1233 
1234    Sample usage:
1235 .vb
1236    MatColoringRegisterDynamic("my_color",/home/username/my_lib/lib/libO/solaris/mylib.a,
1237                "MyColor",MyColor);
1238 .ve
1239 
1240    Then, your partitioner can be chosen with the procedural interface via
1241 $     MatColoringSetType(part,"my_color")
1242    or at runtime via the option
1243 $     -mat_coloring_type my_color
1244 
1245    $PETSC_ARCH occuring in pathname will be replaced with appropriate values.
1246 
1247 .keywords: matrix, Coloring, register
1248 
1249 .seealso: MatColoringRegisterDestroy(), MatColoringRegisterAll()
1250 M*/
1251 #if defined(PETSC_USE_DYNAMIC_LIBRARIES)
1252 #define MatColoringRegisterDynamic(a,b,c,d) MatColoringRegister(a,b,c,0)
1253 #else
1254 #define MatColoringRegisterDynamic(a,b,c,d) MatColoringRegister(a,b,c,d)
1255 #endif
1256 
1257 extern PetscBool  MatColoringRegisterAllCalled;
1258 
1259 extern PetscErrorCode  MatColoringRegisterAll(const char[]);
1260 extern PetscErrorCode  MatColoringRegisterDestroy(void);
1261 extern PetscErrorCode  MatColoringPatch(Mat,PetscInt,PetscInt,ISColoringValue[],ISColoring*);
1262 
1263 /*S
1264      MatFDColoring - Object for computing a sparse Jacobian via finite differences
1265         and coloring
1266 
1267    Level: beginner
1268 
1269   Concepts: coloring, sparse Jacobian, finite differences
1270 
1271 .seealso:  MatFDColoringCreate()
1272 S*/
1273 typedef struct _p_MatFDColoring* MatFDColoring;
1274 
1275 extern PetscErrorCode  MatFDColoringCreate(Mat,ISColoring,MatFDColoring *);
1276 extern PetscErrorCode  MatFDColoringDestroy(MatFDColoring*);
1277 extern PetscErrorCode  MatFDColoringView(MatFDColoring,PetscViewer);
1278 extern PetscErrorCode  MatFDColoringSetFunction(MatFDColoring,PetscErrorCode (*)(void),void*);
1279 extern PetscErrorCode  MatFDColoringGetFunction(MatFDColoring,PetscErrorCode (**)(void),void**);
1280 extern PetscErrorCode  MatFDColoringSetParameters(MatFDColoring,PetscReal,PetscReal);
1281 extern PetscErrorCode  MatFDColoringSetFromOptions(MatFDColoring);
1282 extern PetscErrorCode  MatFDColoringApply(Mat,MatFDColoring,Vec,MatStructure*,void *);
1283 extern PetscErrorCode  MatFDColoringApplyTS(Mat,MatFDColoring,PetscReal,Vec,MatStructure*,void *);
1284 extern PetscErrorCode  MatFDColoringSetF(MatFDColoring,Vec);
1285 extern PetscErrorCode  MatFDColoringGetPerturbedColumns(MatFDColoring,PetscInt*,PetscInt*[]);
1286 /*
1287     These routines are for partitioning matrices: currently used only
1288   for adjacency matrix, MatCreateMPIAdj().
1289 */
1290 
1291 /*S
1292      MatPartitioning - Object for managing the partitioning of a matrix or graph
1293 
1294    Level: beginner
1295 
1296   Concepts: partitioning
1297 
1298 .seealso:  MatPartitioningCreate(), MatPartitioningType
1299 S*/
1300 typedef struct _p_MatPartitioning* MatPartitioning;
1301 
1302 /*E
1303     MatPartitioningType - String with the name of a PETSc matrix partitioning or the creation function
1304        with an optional dynamic library name, for example
1305        http://www.mcs.anl.gov/petsc/lib.a:partitioningcreate()
1306 
1307    Level: beginner
1308 
1309 .seealso: MatPartitioningCreate(), MatPartitioning
1310 E*/
1311 #define MatPartitioningType char*
1312 #define MATPARTITIONINGCURRENT  "current"
1313 #define MATPARTITIONINGSQUARE   "square"
1314 #define MATPARTITIONINGPARMETIS "parmetis"
1315 #define MATPARTITIONINGCHACO    "chaco"
1316 #define MATPARTITIONINGJOSTLE   "jostle"
1317 #define MATPARTITIONINGPARTY    "party"
1318 #define MATPARTITIONINGSCOTCH   "scotch"
1319 
1320 
1321 extern PetscErrorCode  MatPartitioningCreate(MPI_Comm,MatPartitioning*);
1322 extern PetscErrorCode  MatPartitioningSetType(MatPartitioning,const MatPartitioningType);
1323 extern PetscErrorCode  MatPartitioningSetNParts(MatPartitioning,PetscInt);
1324 extern PetscErrorCode  MatPartitioningSetAdjacency(MatPartitioning,Mat);
1325 extern PetscErrorCode  MatPartitioningSetVertexWeights(MatPartitioning,const PetscInt[]);
1326 extern PetscErrorCode  MatPartitioningSetPartitionWeights(MatPartitioning,const PetscReal []);
1327 extern PetscErrorCode  MatPartitioningApply(MatPartitioning,IS*);
1328 extern PetscErrorCode  MatPartitioningDestroy(MatPartitioning*);
1329 
1330 extern PetscErrorCode  MatPartitioningRegister(const char[],const char[],const char[],PetscErrorCode (*)(MatPartitioning));
1331 
1332 /*MC
1333    MatPartitioningRegisterDynamic - Adds a new sparse matrix partitioning to the
1334    matrix package.
1335 
1336    Synopsis:
1337    PetscErrorCode MatPartitioningRegisterDynamic(const char *name_partitioning,const char *path,const char *name_create,PetscErrorCode (*routine_create)(MatPartitioning))
1338 
1339    Not Collective
1340 
1341    Input Parameters:
1342 +  sname - name of partitioning (for example MATPARTITIONINGCURRENT) or parmetis
1343 .  path - location of library where creation routine is
1344 .  name - name of function that creates the partitioning type, a string
1345 -  function - function pointer that creates the partitioning type
1346 
1347    Level: developer
1348 
1349    If dynamic libraries are used, then the fourth input argument (function)
1350    is ignored.
1351 
1352    Sample usage:
1353 .vb
1354    MatPartitioningRegisterDynamic("my_part",/home/username/my_lib/lib/libO/solaris/mylib.a,
1355                "MyPartCreate",MyPartCreate);
1356 .ve
1357 
1358    Then, your partitioner can be chosen with the procedural interface via
1359 $     MatPartitioningSetType(part,"my_part")
1360    or at runtime via the option
1361 $     -mat_partitioning_type my_part
1362 
1363    $PETSC_ARCH occuring in pathname will be replaced with appropriate values.
1364 
1365 .keywords: matrix, partitioning, register
1366 
1367 .seealso: MatPartitioningRegisterDestroy(), MatPartitioningRegisterAll()
1368 M*/
1369 #if defined(PETSC_USE_DYNAMIC_LIBRARIES)
1370 #define MatPartitioningRegisterDynamic(a,b,c,d) MatPartitioningRegister(a,b,c,0)
1371 #else
1372 #define MatPartitioningRegisterDynamic(a,b,c,d) MatPartitioningRegister(a,b,c,d)
1373 #endif
1374 
1375 extern PetscBool  MatPartitioningRegisterAllCalled;
1376 
1377 extern PetscErrorCode  MatPartitioningRegisterAll(const char[]);
1378 extern PetscErrorCode  MatPartitioningRegisterDestroy(void);
1379 
1380 extern PetscErrorCode  MatPartitioningView(MatPartitioning,PetscViewer);
1381 extern PetscErrorCode  MatPartitioningSetFromOptions(MatPartitioning);
1382 extern PetscErrorCode  MatPartitioningGetType(MatPartitioning,const MatPartitioningType*);
1383 
1384 extern PetscErrorCode  MatPartitioningParmetisSetCoarseSequential(MatPartitioning);
1385 extern PetscErrorCode  MatPartitioningParmetisGetEdgeCut(MatPartitioning, PetscInt *);
1386 
1387 extern PetscErrorCode  MatPartitioningJostleSetCoarseLevel(MatPartitioning,PetscReal);
1388 extern PetscErrorCode  MatPartitioningJostleSetCoarseSequential(MatPartitioning);
1389 
1390 typedef enum {MP_CHACO_MULTILEVEL_KL,MP_CHACO_SPECTRAL,MP_CHACO_LINEAR,MP_CHACO_RANDOM, MP_CHACO_SCATTERED} MPChacoGlobalType;
1391 extern PetscErrorCode  MatPartitioningChacoSetGlobal(MatPartitioning, MPChacoGlobalType);
1392 typedef enum { MP_CHACO_KERNIGHAN_LIN, MP_CHACO_NONE } MPChacoLocalType;
1393 extern PetscErrorCode  MatPartitioningChacoSetLocal(MatPartitioning, MPChacoLocalType);
1394 extern PetscErrorCode  MatPartitioningChacoSetCoarseLevel(MatPartitioning,PetscReal);
1395 typedef enum { MP_CHACO_LANCZOS, MP_CHACO_RQI_SYMMLQ } MPChacoEigenType;
1396 extern PetscErrorCode  MatPartitioningChacoSetEigenSolver(MatPartitioning,MPChacoEigenType);
1397 extern PetscErrorCode  MatPartitioningChacoSetEigenTol(MatPartitioning, PetscReal);
1398 extern PetscErrorCode  MatPartitioningChacoSetEigenNumber(MatPartitioning, PetscInt);
1399 
1400 #define MP_PARTY_OPT "opt"
1401 #define MP_PARTY_LIN "lin"
1402 #define MP_PARTY_SCA "sca"
1403 #define MP_PARTY_RAN "ran"
1404 #define MP_PARTY_GBF "gbf"
1405 #define MP_PARTY_GCF "gcf"
1406 #define MP_PARTY_BUB "bub"
1407 #define MP_PARTY_DEF "def"
1408 extern PetscErrorCode  MatPartitioningPartySetGlobal(MatPartitioning, const char*);
1409 #define MP_PARTY_HELPFUL_SETS "hs"
1410 #define MP_PARTY_KERNIGHAN_LIN "kl"
1411 #define MP_PARTY_NONE "no"
1412 extern PetscErrorCode  MatPartitioningPartySetLocal(MatPartitioning, const char*);
1413 extern PetscErrorCode  MatPartitioningPartySetCoarseLevel(MatPartitioning,PetscReal);
1414 extern PetscErrorCode  MatPartitioningPartySetBipart(MatPartitioning,PetscBool );
1415 extern PetscErrorCode  MatPartitioningPartySetMatchOptimization(MatPartitioning,PetscBool );
1416 
1417 typedef enum { MP_SCOTCH_GREEDY, MP_SCOTCH_GPS, MP_SCOTCH_GR_GPS } MPScotchGlobalType;
1418 extern PetscErrorCode  MatPartitioningScotchSetArch(MatPartitioning,const char*);
1419 extern PetscErrorCode  MatPartitioningScotchSetMultilevel(MatPartitioning);
1420 extern PetscErrorCode  MatPartitioningScotchSetGlobal(MatPartitioning,MPScotchGlobalType);
1421 extern PetscErrorCode  MatPartitioningScotchSetCoarseLevel(MatPartitioning,PetscReal);
1422 extern PetscErrorCode  MatPartitioningScotchSetHostList(MatPartitioning,const char*);
1423 typedef enum { MP_SCOTCH_KERNIGHAN_LIN, MP_SCOTCH_NONE } MPScotchLocalType;
1424 extern PetscErrorCode  MatPartitioningScotchSetLocal(MatPartitioning,MPScotchLocalType);
1425 extern PetscErrorCode  MatPartitioningScotchSetMapping(MatPartitioning);
1426 extern PetscErrorCode  MatPartitioningScotchSetStrategy(MatPartitioning,char*);
1427 
1428 extern PetscErrorCode MatMeshToVertexGraph(Mat,PetscInt,Mat*);
1429 extern PetscErrorCode MatMeshToCellGraph(Mat,PetscInt,Mat*);
1430 
1431 /*
1432     If you add entries here you must also add them to finclude/petscmat.h
1433 */
1434 typedef enum { MATOP_SET_VALUES=0,
1435                MATOP_GET_ROW=1,
1436                MATOP_RESTORE_ROW=2,
1437                MATOP_MULT=3,
1438                MATOP_MULT_ADD=4,
1439                MATOP_MULT_TRANSPOSE=5,
1440                MATOP_MULT_TRANSPOSE_ADD=6,
1441                MATOP_SOLVE=7,
1442                MATOP_SOLVE_ADD=8,
1443                MATOP_SOLVE_TRANSPOSE=9,
1444                MATOP_SOLVE_TRANSPOSE_ADD=10,
1445                MATOP_LUFACTOR=11,
1446                MATOP_CHOLESKYFACTOR=12,
1447                MATOP_SOR=13,
1448                MATOP_TRANSPOSE=14,
1449                MATOP_GETINFO=15,
1450                MATOP_EQUAL=16,
1451                MATOP_GET_DIAGONAL=17,
1452                MATOP_DIAGONAL_SCALE=18,
1453                MATOP_NORM=19,
1454                MATOP_ASSEMBLY_BEGIN=20,
1455                MATOP_ASSEMBLY_END=21,
1456                MATOP_SET_OPTION=22,
1457                MATOP_ZERO_ENTRIES=23,
1458                MATOP_ZERO_ROWS=24,
1459                MATOP_LUFACTOR_SYMBOLIC=25,
1460                MATOP_LUFACTOR_NUMERIC=26,
1461                MATOP_CHOLESKY_FACTOR_SYMBOLIC=27,
1462                MATOP_CHOLESKY_FACTOR_NUMERIC=28,
1463                MATOP_SETUP_PREALLOCATION=29,
1464                MATOP_ILUFACTOR_SYMBOLIC=30,
1465                MATOP_ICCFACTOR_SYMBOLIC=31,
1466                MATOP_GET_ARRAY=32,
1467                MATOP_RESTORE_ARRAY=33,
1468                MATOP_DUPLICATE=34,
1469                MATOP_FORWARD_SOLVE=35,
1470                MATOP_BACKWARD_SOLVE=36,
1471                MATOP_ILUFACTOR=37,
1472                MATOP_ICCFACTOR=38,
1473                MATOP_AXPY=39,
1474                MATOP_GET_SUBMATRICES=40,
1475                MATOP_INCREASE_OVERLAP=41,
1476                MATOP_GET_VALUES=42,
1477                MATOP_COPY=43,
1478                MATOP_GET_ROW_MAX=44,
1479                MATOP_SCALE=45,
1480                MATOP_SHIFT=46,
1481                MATOP_DIAGONAL_SET=47,
1482                MATOP_ILUDT_FACTOR=48,
1483                MATOP_SET_BLOCK_SIZE=49,
1484                MATOP_GET_ROW_IJ=50,
1485                MATOP_RESTORE_ROW_IJ=51,
1486                MATOP_GET_COLUMN_IJ=52,
1487                MATOP_RESTORE_COLUMN_IJ=53,
1488                MATOP_FDCOLORING_CREATE=54,
1489                MATOP_COLORING_PATCH=55,
1490                MATOP_SET_UNFACTORED=56,
1491                MATOP_PERMUTE=57,
1492                MATOP_SET_VALUES_BLOCKED=58,
1493                MATOP_GET_SUBMATRIX=59,
1494                MATOP_DESTROY=60,
1495                MATOP_VIEW=61,
1496                MATOP_CONVERT_FROM=62,
1497                MATOP_USE_SCALED_FORM=63,
1498                MATOP_SCALE_SYSTEM=64,
1499                MATOP_UNSCALE_SYSTEM=65,
1500                MATOP_SET_LOCAL_TO_GLOBAL_MAP=66,
1501                MATOP_SET_VALUES_LOCAL=67,
1502                MATOP_ZERO_ROWS_LOCAL=68,
1503                MATOP_GET_ROW_MAX_ABS=69,
1504                MATOP_GET_ROW_MIN_ABS=70,
1505                MATOP_CONVERT=71,
1506                MATOP_SET_COLORING=72,
1507                MATOP_SET_VALUES_ADIC=73,
1508                MATOP_SET_VALUES_ADIFOR=74,
1509                MATOP_FD_COLORING_APPLY=75,
1510                MATOP_SET_FROM_OPTIONS=76,
1511                MATOP_MULT_CON=77,
1512                MATOP_MULT_TRANSPOSE_CON=78,
1513                MATOP_PERMUTE_SPARSIFY=79,
1514                MATOP_MULT_MULTIPLE=80,
1515                MATOP_SOLVE_MULTIPLE=81,
1516                MATOP_GET_INERTIA=82,
1517                MATOP_LOAD=83,
1518                MATOP_IS_SYMMETRIC=84,
1519                MATOP_IS_HERMITIAN=85,
1520                MATOP_IS_STRUCTURALLY_SYMMETRIC=86,
1521                MATOP_DUMMY=87,
1522                MATOP_GET_VECS=88,
1523                MATOP_MAT_MULT=89,
1524                MATOP_MAT_MULT_SYMBOLIC=90,
1525                MATOP_MAT_MULT_NUMERIC=91,
1526                MATOP_PTAP=92,
1527                MATOP_PTAP_SYMBOLIC=93,
1528                MATOP_PTAP_NUMERIC=94,
1529                MATOP_MAT_MULTTRANSPOSE=95,
1530                MATOP_MAT_MULTTRANSPOSE_SYM=96,
1531                MATOP_MAT_MULTTRANSPOSE_NUM=97,
1532                MATOP_PTAP_SYMBOLIC_SEQAIJ=98,
1533                MATOP_PTAP_NUMERIC_SEQAIJ=99,
1534                MATOP_PTAP_SYMBOLIC_MPIAIJ=100,
1535                MATOP_PTAP_NUMERIC_MPIAIJ=101,
1536                MATOP_CONJUGATE=102,
1537                MATOP_SET_SIZES=103,
1538                MATOP_SET_VALUES_ROW=104,
1539                MATOP_REAL_PART=105,
1540                MATOP_IMAG_PART=106,
1541                MATOP_GET_ROW_UTRIANGULAR=107,
1542                MATOP_RESTORE_ROW_UTRIANGULAR=108,
1543                MATOP_MATSOLVE=109,
1544                MATOP_GET_REDUNDANTMATRIX=110,
1545                MATOP_GET_ROW_MIN=111,
1546                MATOP_GET_COLUMN_VEC=112,
1547                MATOP_MISSING_DIAGONAL=113,
1548                MATOP_MATGETSEQNONZEROSTRUCTURE=114,
1549                MATOP_CREATE=115,
1550                MATOP_GET_GHOSTS=116,
1551                MATOP_GET_LOCALSUBMATRIX=117,
1552                MATOP_RESTORE_LOCALSUBMATRIX=118,
1553                MATOP_MULT_DIAGONAL_BLOCK=119,
1554                MATOP_HERMITIANTRANSPOSE=120,
1555                MATOP_MULTHERMITIANTRANSPOSE=121,
1556                MATOP_MULTHERMITIANTRANSPOSEADD=122,
1557                MATOP_GETMULTIPROCBLOCK=123,
1558 	       MATOP_GET_SUBMATRICES_PARALLEL=128
1559              } MatOperation;
1560 extern PetscErrorCode  MatHasOperation(Mat,MatOperation,PetscBool *);
1561 extern PetscErrorCode  MatShellSetOperation(Mat,MatOperation,void(*)(void));
1562 extern PetscErrorCode  MatShellGetOperation(Mat,MatOperation,void(**)(void));
1563 extern PetscErrorCode  MatShellSetContext(Mat,void*);
1564 
1565 /*
1566    Codes for matrices stored on disk. By default they are
1567    stored in a universal format. By changing the format with
1568    PetscViewerSetFormat(viewer,PETSC_VIEWER_NATIVE); the matrices will
1569    be stored in a way natural for the matrix, for example dense matrices
1570    would be stored as dense. Matrices stored this way may only be
1571    read into matrices of the same type.
1572 */
1573 #define MATRIX_BINARY_FORMAT_DENSE -1
1574 
1575 extern PetscErrorCode  MatMPIBAIJSetHashTableFactor(Mat,PetscReal);
1576 extern PetscErrorCode  MatISGetLocalMat(Mat,Mat*);
1577 
1578 /*S
1579      MatNullSpace - Object that removes a null space from a vector, i.e.
1580          orthogonalizes the vector to a subsapce
1581 
1582    Level: advanced
1583 
1584   Concepts: matrix; linear operator, null space
1585 
1586   Users manual sections:
1587 .   sec_singular
1588 
1589 .seealso:  MatNullSpaceCreate()
1590 S*/
1591 typedef struct _p_MatNullSpace* MatNullSpace;
1592 
1593 extern PetscErrorCode  MatNullSpaceCreate(MPI_Comm,PetscBool ,PetscInt,const Vec[],MatNullSpace*);
1594 extern PetscErrorCode  MatNullSpaceSetFunction(MatNullSpace,PetscErrorCode (*)(MatNullSpace,Vec,void*),void*);
1595 extern PetscErrorCode  MatNullSpaceDestroy(MatNullSpace*);
1596 extern PetscErrorCode  MatNullSpaceRemove(MatNullSpace,Vec,Vec*);
1597 extern PetscErrorCode  MatNullSpaceAttach(Mat,MatNullSpace);
1598 extern PetscErrorCode  MatNullSpaceTest(MatNullSpace,Mat,PetscBool  *);
1599 extern PetscErrorCode  MatNullSpaceView(MatNullSpace,PetscViewer);
1600 
1601 extern PetscErrorCode  MatReorderingSeqSBAIJ(Mat,IS);
1602 extern PetscErrorCode  MatMPISBAIJSetHashTableFactor(Mat,PetscReal);
1603 extern PetscErrorCode  MatSeqSBAIJSetColumnIndices(Mat,PetscInt *);
1604 extern PetscErrorCode  MatSeqBAIJInvertBlockDiagonal(Mat);
1605 
1606 extern PetscErrorCode  MatCreateMAIJ(Mat,PetscInt,Mat*);
1607 extern PetscErrorCode  MatMAIJRedimension(Mat,PetscInt,Mat*);
1608 extern PetscErrorCode  MatMAIJGetAIJ(Mat,Mat*);
1609 
1610 extern PetscErrorCode  MatComputeExplicitOperator(Mat,Mat*);
1611 
1612 extern PetscErrorCode  MatDiagonalScaleLocal(Mat,Vec);
1613 
1614 extern PetscErrorCode  MatCreateMFFD(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,Mat*);
1615 extern PetscErrorCode  MatMFFDSetBase(Mat,Vec,Vec);
1616 extern PetscErrorCode  MatMFFDSetFunction(Mat,PetscErrorCode(*)(void*,Vec,Vec),void*);
1617 extern PetscErrorCode  MatMFFDSetFunctioni(Mat,PetscErrorCode (*)(void*,PetscInt,Vec,PetscScalar*));
1618 extern PetscErrorCode  MatMFFDSetFunctioniBase(Mat,PetscErrorCode (*)(void*,Vec));
1619 extern PetscErrorCode  MatMFFDAddNullSpace(Mat,MatNullSpace);
1620 extern PetscErrorCode  MatMFFDSetHHistory(Mat,PetscScalar[],PetscInt);
1621 extern PetscErrorCode  MatMFFDResetHHistory(Mat);
1622 extern PetscErrorCode  MatMFFDSetFunctionError(Mat,PetscReal);
1623 extern PetscErrorCode  MatMFFDSetPeriod(Mat,PetscInt);
1624 extern PetscErrorCode  MatMFFDGetH(Mat,PetscScalar *);
1625 extern PetscErrorCode  MatMFFDSetOptionsPrefix(Mat,const char[]);
1626 extern PetscErrorCode  MatMFFDCheckPositivity(void*,Vec,Vec,PetscScalar*);
1627 extern PetscErrorCode  MatMFFDSetCheckh(Mat,PetscErrorCode (*)(void*,Vec,Vec,PetscScalar*),void*);
1628 
1629 /*S
1630     MatMFFD - A data structured used to manage the computation of the h differencing parameter for matrix-free
1631               Jacobian vector products
1632 
1633     Notes: MATMFFD is a specific MatType which uses the MatMFFD data structure
1634 
1635            MatMFFD*() methods actually take the Mat as their first argument. Not a MatMFFD data structure
1636 
1637     Level: developer
1638 
1639 .seealso: MATMFFD, MatCreateMFFD(), MatMFFDSetFuction(), MatMFFDSetType(), MatMFFDRegister()
1640 S*/
1641 typedef struct _p_MatMFFD* MatMFFD;
1642 
1643 /*E
1644     MatMFFDType - algorithm used to compute the h used in computing matrix-vector products via differencing of the function
1645 
1646    Level: beginner
1647 
1648 .seealso: MatMFFDSetType(), MatMFFDRegister()
1649 E*/
1650 #define MatMFFDType char*
1651 #define MATMFFD_DS  "ds"
1652 #define MATMFFD_WP  "wp"
1653 
1654 extern PetscErrorCode  MatMFFDSetType(Mat,const MatMFFDType);
1655 extern PetscErrorCode  MatMFFDRegister(const char[],const char[],const char[],PetscErrorCode (*)(MatMFFD));
1656 
1657 /*MC
1658    MatMFFDRegisterDynamic - Adds a method to the MatMFFD registry.
1659 
1660    Synopsis:
1661    PetscErrorCode MatMFFDRegisterDynamic(const char *name_solver,const char *path,const char *name_create,PetscErrorCode (*routine_create)(MatMFFD))
1662 
1663    Not Collective
1664 
1665    Input Parameters:
1666 +  name_solver - name of a new user-defined compute-h module
1667 .  path - path (either absolute or relative) the library containing this solver
1668 .  name_create - name of routine to create method context
1669 -  routine_create - routine to create method context
1670 
1671    Level: developer
1672 
1673    Notes:
1674    MatMFFDRegisterDynamic() may be called multiple times to add several user-defined solvers.
1675 
1676    If dynamic libraries are used, then the fourth input argument (routine_create)
1677    is ignored.
1678 
1679    Sample usage:
1680 .vb
1681    MatMFFDRegisterDynamic("my_h",/home/username/my_lib/lib/libO/solaris/mylib.a,
1682                "MyHCreate",MyHCreate);
1683 .ve
1684 
1685    Then, your solver can be chosen with the procedural interface via
1686 $     MatMFFDSetType(mfctx,"my_h")
1687    or at runtime via the option
1688 $     -snes_mf_type my_h
1689 
1690 .keywords: MatMFFD, register
1691 
1692 .seealso: MatMFFDRegisterAll(), MatMFFDRegisterDestroy()
1693 M*/
1694 #if defined(PETSC_USE_DYNAMIC_LIBRARIES)
1695 #define MatMFFDRegisterDynamic(a,b,c,d) MatMFFDRegister(a,b,c,0)
1696 #else
1697 #define MatMFFDRegisterDynamic(a,b,c,d) MatMFFDRegister(a,b,c,d)
1698 #endif
1699 
1700 extern PetscErrorCode  MatMFFDRegisterAll(const char[]);
1701 extern PetscErrorCode  MatMFFDRegisterDestroy(void);
1702 extern PetscErrorCode  MatMFFDDSSetUmin(Mat,PetscReal);
1703 extern PetscErrorCode  MatMFFDWPSetComputeNormU(Mat,PetscBool );
1704 
1705 
1706 extern PetscErrorCode  PetscViewerMathematicaPutMatrix(PetscViewer, PetscInt, PetscInt, PetscReal *);
1707 extern PetscErrorCode  PetscViewerMathematicaPutCSRMatrix(PetscViewer, PetscInt, PetscInt, PetscInt *, PetscInt *, PetscReal *);
1708 
1709 /*
1710    PETSc interface to MUMPS
1711 */
1712 #ifdef PETSC_HAVE_MUMPS
1713 extern PetscErrorCode  MatMumpsSetIcntl(Mat,PetscInt,PetscInt);
1714 #endif
1715 
1716 /*
1717    PETSc interface to SUPERLU
1718 */
1719 #ifdef PETSC_HAVE_SUPERLU
1720 extern PetscErrorCode  MatSuperluSetILUDropTol(Mat,PetscReal);
1721 #endif
1722 
1723 extern PetscErrorCode MatCreateNest(MPI_Comm,PetscInt,const IS[],PetscInt,const IS[],const Mat[],Mat*);
1724 extern PetscErrorCode MatNestGetSize(Mat,PetscInt*,PetscInt*);
1725 extern PetscErrorCode MatNestGetSubMats(Mat,PetscInt*,PetscInt*,Mat***);
1726 extern PetscErrorCode MatNestGetSubMat(Mat,PetscInt,PetscInt,Mat*);
1727 extern PetscErrorCode MatNestSetVecType(Mat,const VecType);
1728 extern PetscErrorCode MatNestSetSubMats(Mat,PetscInt,const IS[],PetscInt,const IS[],const Mat[]);
1729 
1730 PETSC_EXTERN_CXX_END
1731 #endif
1732