xref: /petsc/include/petscmat.h (revision a8a26c1ef46eece1f692deb11f56bcfac16cda1d)
1 /*
2      Include file for the matrix component of PETSc
3 */
4 #ifndef __PETSCMAT_H
5 #define __PETSCMAT_H
6 #include "petscvec.h"
7 PETSC_EXTERN_CXX_BEGIN
8 
9 /*S
10      Mat - Abstract PETSc matrix object
11 
12    Level: beginner
13 
14   Concepts: matrix; linear operator
15 
16 .seealso:  MatCreate(), MatType, MatSetType()
17 S*/
18 typedef struct _p_Mat*           Mat;
19 
20 /*E
21     MatType - String with the name of a PETSc matrix or the creation function
22        with an optional dynamic library name, for example
23        http://www.mcs.anl.gov/petsc/lib.a:mymatcreate()
24 
25    Level: beginner
26 
27 .seealso: MatSetType(), Mat, MatSolverPackage
28 E*/
29 #define MatType char*
30 #define MATSAME            "same"
31 #define MATMAIJ            "maij"
32 #define MATSEQMAIJ           "seqmaij"
33 #define MATMPIMAIJ           "mpimaij"
34 #define MATIS              "is"
35 #define MATAIJ             "aij"
36 #define MATSEQAIJ            "seqaij"
37 #define MATMPIAIJ            "mpiaij"
38 #define MATAIJCRL              "aijcrl"
39 #define MATSEQAIJCRL             "seqaijcrl"
40 #define MATMPIAIJCRL             "mpiaijcrl"
41 #define MATAIJCUDA             "aijcuda"
42 #define MATSEQAIJCUDA            "seqaijcuda"
43 #define MATMPIAIJCUDA            "mpiaijcuda"
44 #define MATAIJPERM             "aijperm"
45 #define MATSEQAIJPERM            "seqaijperm"
46 #define MATMPIAIJPERM            "mpiaijperm"
47 #define MATSHELL           "shell"
48 #define MATDENSE           "dense"
49 #define MATSEQDENSE          "seqdense"
50 #define MATMPIDENSE          "mpidense"
51 #define MATBAIJ            "baij"
52 #define MATSEQBAIJ           "seqbaij"
53 #define MATMPIBAIJ           "mpibaij"
54 #define MATMPIADJ          "mpiadj"
55 #define MATSBAIJ           "sbaij"
56 #define MATSEQSBAIJ          "seqsbaij"
57 #define MATMPISBAIJ          "mpisbaij"
58 #define MATDAAD            "daad"
59 #define MATMFFD            "mffd"
60 #define MATNORMAL          "normal"
61 #define MATLRC             "lrc"
62 #define MATSCATTER         "scatter"
63 #define MATBLOCKMAT        "blockmat"
64 #define MATCOMPOSITE       "composite"
65 #define MATSEQFFTW         "seqfftw"
66 #define MATSEQCUFFT        "seqcufft"
67 #define MATTRANSPOSEMAT    "transpose"
68 #define MATSCHURCOMPLEMENT "schurcomplement"
69 #define MATPYTHON          "python"
70 #define MATHYPRESTRUCT     "hyprestruct"
71 #define MATHYPRESSTRUCT    "hypresstruct"
72 #define MATSUBMATRIX       "submatrix"
73 #define MATLOCALREF        "localref"
74 #define MATNEST            "nest"
75 
76 /*E
77     MatSolverPackage - String with the name of a PETSc matrix solver type.
78 
79     For example: "petsc" indicates what PETSc provides, "superlu" indicates either
80        SuperLU or SuperLU_Dist etc.
81 
82 
83    Level: beginner
84 
85 .seealso: MatGetFactor(), Mat, MatSetType(), MatType
86 E*/
87 #define MatSolverPackage char*
88 #define MATSOLVERSPOOLES      "spooles"
89 #define MATSOLVERSUPERLU      "superlu"
90 #define MATSOLVERSUPERLU_DIST "superlu_dist"
91 #define MATSOLVERUMFPACK      "umfpack"
92 #define MATSOLVERCHOLMOD      "cholmod"
93 #define MATSOLVERESSL         "essl"
94 #define MATSOLVERLUSOL        "lusol"
95 #define MATSOLVERMUMPS        "mumps"
96 #define MATSOLVERPASTIX       "pastix"
97 #define MATSOLVERDSCPACK      "dscpack"
98 #define MATSOLVERMATLAB       "matlab"
99 #define MATSOLVERPETSC        "petsc"
100 #define MATSOLVERPLAPACK      "plapack"
101 #define MATSOLVERBAS          "bas"
102 
103 /*E
104     MatFactorType - indicates what type of factorization is requested
105 
106     Level: beginner
107 
108    Any additions/changes here MUST also be made in include/finclude/petscmat.h
109 
110 .seealso: MatSolverPackage, MatGetFactor()
111 E*/
112 typedef enum {MAT_FACTOR_NONE, MAT_FACTOR_LU, MAT_FACTOR_CHOLESKY, MAT_FACTOR_ILU, MAT_FACTOR_ICC,MAT_FACTOR_ILUDT} MatFactorType;
113 extern const char *const MatFactorTypes[];
114 
115 extern PetscErrorCode  MatGetFactor(Mat,const MatSolverPackage,MatFactorType,Mat*);
116 extern PetscErrorCode  MatGetFactorAvailable(Mat,const MatSolverPackage,MatFactorType,PetscBool *);
117 extern PetscErrorCode  MatFactorGetSolverPackage(Mat,const MatSolverPackage*);
118 extern PetscErrorCode  MatGetFactorType(Mat,MatFactorType*);
119 
120 /* Logging support */
121 #define    MAT_FILE_CLASSID 1211216    /* used to indicate matrices in binary files */
122 extern PetscClassId  MAT_CLASSID;
123 extern PetscClassId  MAT_FDCOLORING_CLASSID;
124 extern PetscClassId  MAT_PARTITIONING_CLASSID;
125 extern PetscClassId  MAT_NULLSPACE_CLASSID;
126 extern PetscClassId  MATMFFD_CLASSID;
127 
128 /*E
129     MatReuse - Indicates if matrices obtained from a previous call to MatGetSubMatrices()
130      or MatGetSubMatrix() are to be reused to store the new matrix values. For MatConvert() is used to indicate
131      that the input matrix is to be replaced with the converted matrix.
132 
133     Level: beginner
134 
135    Any additions/changes here MUST also be made in include/finclude/petscmat.h
136 
137 .seealso: MatGetSubMatrices(), MatGetSubMatrix(), MatDestroyMatrices(), MatConvert()
138 E*/
139 typedef enum {MAT_INITIAL_MATRIX,MAT_REUSE_MATRIX,MAT_IGNORE_MATRIX} MatReuse;
140 
141 /*E
142     MatGetSubMatrixOption - Indicates if matrices obtained from a call to MatGetSubMatrices()
143      include the matrix values. Currently it is only used by MatGetSeqNonzerostructure().
144 
145     Level: beginner
146 
147 .seealso: MatGetSeqNonzerostructure()
148 E*/
149 typedef enum {MAT_DO_NOT_GET_VALUES,MAT_GET_VALUES} MatGetSubMatrixOption;
150 
151 extern PetscErrorCode  MatInitializePackage(const char[]);
152 
153 extern PetscErrorCode  MatCreate(MPI_Comm,Mat*);
154 PetscPolymorphicFunction(MatCreate,(MPI_Comm comm),(comm,&A),Mat,A)
155 PetscPolymorphicFunction(MatCreate,(),(PETSC_COMM_WORLD,&A),Mat,A)
156 extern PetscErrorCode  MatSetSizes(Mat,PetscInt,PetscInt,PetscInt,PetscInt);
157 extern PetscErrorCode  MatSetType(Mat,const MatType);
158 extern PetscErrorCode  MatSetFromOptions(Mat);
159 extern PetscErrorCode  MatSetUpPreallocation(Mat);
160 extern PetscErrorCode  MatRegisterAll(const char[]);
161 extern PetscErrorCode  MatRegister(const char[],const char[],const char[],PetscErrorCode(*)(Mat));
162 
163 extern PetscErrorCode  MatSetOptionsPrefix(Mat,const char[]);
164 extern PetscErrorCode  MatAppendOptionsPrefix(Mat,const char[]);
165 extern PetscErrorCode  MatGetOptionsPrefix(Mat,const char*[]);
166 
167 /*MC
168    MatRegisterDynamic - Adds a new matrix type
169 
170    Synopsis:
171    PetscErrorCode MatRegisterDynamic(const char *name,const char *path,const char *name_create,PetscErrorCode (*routine_create)(Mat))
172 
173    Not Collective
174 
175    Input Parameters:
176 +  name - name of a new user-defined matrix type
177 .  path - path (either absolute or relative) the library containing this solver
178 .  name_create - name of routine to create method context
179 -  routine_create - routine to create method context
180 
181    Notes:
182    MatRegisterDynamic() may be called multiple times to add several user-defined solvers.
183 
184    If dynamic libraries are used, then the fourth input argument (routine_create)
185    is ignored.
186 
187    Sample usage:
188 .vb
189    MatRegisterDynamic("my_mat",/home/username/my_lib/lib/libO/solaris/mylib.a,
190                "MyMatCreate",MyMatCreate);
191 .ve
192 
193    Then, your solver can be chosen with the procedural interface via
194 $     MatSetType(Mat,"my_mat")
195    or at runtime via the option
196 $     -mat_type my_mat
197 
198    Level: advanced
199 
200    Notes: ${PETSC_ARCH} occuring in pathname will be replaced with appropriate values.
201          If your function is not being put into a shared library then use VecRegister() instead
202 
203 .keywords: Mat, register
204 
205 .seealso: MatRegisterAll(), MatRegisterDestroy()
206 
207 M*/
208 #if defined(PETSC_USE_DYNAMIC_LIBRARIES)
209 #define MatRegisterDynamic(a,b,c,d) MatRegister(a,b,c,0)
210 #else
211 #define MatRegisterDynamic(a,b,c,d) MatRegister(a,b,c,d)
212 #endif
213 
214 extern PetscBool  MatRegisterAllCalled;
215 extern PetscFList MatList;
216 extern PetscFList MatColoringList;
217 extern PetscFList MatPartitioningList;
218 
219 /*E
220     MatStructure - Indicates if the matrix has the same nonzero structure
221 
222     Level: beginner
223 
224    Any additions/changes here MUST also be made in include/finclude/petscmat.h
225 
226 .seealso: MatCopy(), KSPSetOperators(), PCSetOperators()
227 E*/
228 typedef enum {SAME_NONZERO_PATTERN,DIFFERENT_NONZERO_PATTERN,SAME_PRECONDITIONER,SUBSET_NONZERO_PATTERN} MatStructure;
229 
230 extern PetscErrorCode  MatCreateSeqDense(MPI_Comm,PetscInt,PetscInt,PetscScalar[],Mat*);
231 extern PetscErrorCode  MatCreateMPIDense(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,PetscScalar[],Mat*);
232 extern PetscErrorCode  MatCreateSeqAIJ(MPI_Comm,PetscInt,PetscInt,PetscInt,const PetscInt[],Mat*);
233 PetscPolymorphicFunction(MatCreateSeqAIJ,(PetscInt m,PetscInt n,PetscInt nz,const PetscInt nnz[]),(PETSC_COMM_SELF,m,n,nz,nnz,&A),Mat,A)
234 PetscPolymorphicFunction(MatCreateSeqAIJ,(PetscInt m,PetscInt n,PetscInt nz),(PETSC_COMM_SELF,m,n,nz,PETSC_NULL,&A),Mat,A)
235 PetscPolymorphicFunction(MatCreateSeqAIJ,(PetscInt m,PetscInt n,const PetscInt nnz[]),(PETSC_COMM_SELF,m,n,0,nnz,&A),Mat,A)
236 PetscPolymorphicFunction(MatCreateSeqAIJ,(PetscInt m,PetscInt n),(PETSC_COMM_SELF,m,n,0,PETSC_NULL,&A),Mat,A)
237 PetscPolymorphicSubroutine(MatCreateSeqAIJ,(PetscInt m,PetscInt n,PetscInt nz,Mat *A),(PETSC_COMM_SELF,m,n,nz,PETSC_NULL,A))
238 PetscPolymorphicSubroutine(MatCreateSeqAIJ,(PetscInt m,PetscInt n,const PetscInt nnz[],Mat *A),(PETSC_COMM_SELF,m,n,0,nnz,A))
239 PetscPolymorphicSubroutine(MatCreateSeqAIJ,(PetscInt m,PetscInt n,Mat *A),(PETSC_COMM_SELF,m,n,0,PETSC_NULL,A))
240 extern PetscErrorCode  MatCreateMPIAIJ(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt,const PetscInt[],PetscInt,const PetscInt[],Mat*);
241 PetscPolymorphicFunction(MatCreateMPIAIJ,(MPI_Comm comm,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,const PetscInt nnz[],PetscInt onz,const PetscInt onnz[]),(comm,m,n,M,N,nz,nnz,onz,onnz,&A),Mat,A)
242 PetscPolymorphicFunction(MatCreateMPIAIJ,(MPI_Comm comm,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,PetscInt nnz),(comm,m,n,M,N,nz,PETSC_NULL,nnz,PETSC_NULL,&A),Mat,A)
243 PetscPolymorphicFunction(MatCreateMPIAIJ,(MPI_Comm comm,PetscInt m,PetscInt n,PetscInt M,PetscInt N,const PetscInt nnz[],const PetscInt onz[]),(comm,m,n,M,N,0,nnz,0,onz,&A),Mat,A)
244 PetscPolymorphicFunction(MatCreateMPIAIJ,(MPI_Comm comm,PetscInt m,PetscInt n,PetscInt M,PetscInt N),(comm,m,n,M,N,0,PETSC_NULL,0,PETSC_NULL,&A),Mat,A)
245 PetscPolymorphicSubroutine(MatCreateMPIAIJ,(MPI_Comm comm,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,PetscInt nnz,Mat *A),(comm,m,n,M,N,nz,PETSC_NULL,nnz,PETSC_NULL,A))
246 PetscPolymorphicSubroutine(MatCreateMPIAIJ,(MPI_Comm comm,PetscInt m,PetscInt n,PetscInt M,PetscInt N,const PetscInt nnz[],const PetscInt onz[],Mat *A),(comm,m,n,M,N,0,nnz,0,onz,A))
247 PetscPolymorphicSubroutine(MatCreateMPIAIJ,(MPI_Comm comm,PetscInt m,PetscInt n,PetscInt M,PetscInt N,Mat *A),(comm,m,n,M,N,0,PETSC_NULL,0,PETSC_NULL,A))
248 PetscPolymorphicFunction(MatCreateMPIAIJ,(PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,const PetscInt nnz[],PetscInt onz,const PetscInt onnz[]),(PETSC_COMM_WORLD,m,n,M,N,nz,nnz,onz,onnz,&A),Mat,A)
249 PetscPolymorphicFunction(MatCreateMPIAIJ,(PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,PetscInt nnz),(PETSC_COMM_WORLD,m,n,M,N,nz,PETSC_NULL,nnz,PETSC_NULL,&A),Mat,A)
250 PetscPolymorphicFunction(MatCreateMPIAIJ,(PetscInt m,PetscInt n,PetscInt M,PetscInt N,const PetscInt nnz[],const PetscInt onz[]),(PETSC_COMM_WORLD,m,n,M,N,0,nnz,0,onz,&A),Mat,A)
251 PetscPolymorphicFunction(MatCreateMPIAIJ,(PetscInt m,PetscInt n,PetscInt M,PetscInt N),(PETSC_COMM_WORLD,m,n,M,N,0,PETSC_NULL,0,PETSC_NULL,&A),Mat,A)
252 PetscPolymorphicSubroutine(MatCreateMPIAIJ,(PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,PetscInt nnz,Mat *A),(PETSC_COMM_WORLD,m,n,M,N,nz,PETSC_NULL,nnz,PETSC_NULL,A))
253 PetscPolymorphicSubroutine(MatCreateMPIAIJ,(PetscInt m,PetscInt n,PetscInt M,PetscInt N,const PetscInt nnz[],const PetscInt onz[],Mat *A),(PETSC_COMM_WORLD,m,n,M,N,0,nnz,0,onz,A))
254 PetscPolymorphicSubroutine(MatCreateMPIAIJ,(PetscInt m,PetscInt n,PetscInt M,PetscInt N,Mat *A),(PETSC_COMM_WORLD,m,n,M,N,0,PETSC_NULL,0,PETSC_NULL,A))
255 extern PetscErrorCode  MatCreateMPIAIJWithArrays(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,const PetscInt[],const PetscInt[],const PetscScalar[],Mat *);
256 extern PetscErrorCode  MatCreateMPIAIJWithSplitArrays(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt[],PetscInt[],PetscScalar[],PetscInt[],PetscInt[],PetscScalar[],Mat*);
257 
258 extern PetscErrorCode  MatCreateSeqBAIJ(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,const PetscInt[],Mat*);
259 PetscPolymorphicFunction(MatCreateSeqBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt nz,const PetscInt nnz[]),(PETSC_COMM_SELF,bs,m,n,nz,nnz,&A),Mat,A)
260 PetscPolymorphicFunction(MatCreateSeqBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt nz),(PETSC_COMM_SELF,bs,m,n,nz,PETSC_NULL,&A),Mat,A)
261 PetscPolymorphicFunction(MatCreateSeqBAIJ,(PetscInt bs,PetscInt m,PetscInt n,const PetscInt nnz[]),(PETSC_COMM_SELF,bs,m,n,0,nnz,&A),Mat,A)
262 PetscPolymorphicFunction(MatCreateSeqBAIJ,(PetscInt bs,PetscInt m,PetscInt n),(PETSC_COMM_SELF,bs,m,n,0,PETSC_NULL,&A),Mat,A)
263 PetscPolymorphicSubroutine(MatCreateSeqBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt nz,Mat *A),(PETSC_COMM_SELF,bs,m,n,nz,PETSC_NULL,A))
264 PetscPolymorphicSubroutine(MatCreateSeqBAIJ,(PetscInt bs,PetscInt m,PetscInt n,const PetscInt nnz[],Mat *A),(PETSC_COMM_SELF,bs,m,n,0,nnz,A))
265 PetscPolymorphicSubroutine(MatCreateSeqBAIJ,(PetscInt bs,PetscInt m,PetscInt n,Mat *A),(PETSC_COMM_SELF,bs,m,n,0,PETSC_NULL,A))
266 extern PetscErrorCode  MatCreateMPIBAIJ(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt,const PetscInt[],PetscInt,const PetscInt[],Mat*);
267 PetscPolymorphicFunction(MatCreateMPIBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,const PetscInt nnz[],PetscInt onz,const PetscInt onnz[]),(comm,bs,m,n,M,N,nz,nnz,onz,onnz,&A),Mat,A)
268 PetscPolymorphicFunction(MatCreateMPIBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,PetscInt nnz),(comm,bs,m,n,M,N,nz,PETSC_NULL,nnz,PETSC_NULL,&A),Mat,A)
269 PetscPolymorphicFunction(MatCreateMPIBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,const PetscInt nnz[],const PetscInt onz[]),(comm,bs,m,n,M,N,0,nnz,0,onz,&A),Mat,A)
270 PetscPolymorphicFunction(MatCreateMPIBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N),(comm,bs,m,n,M,N,0,PETSC_NULL,0,PETSC_NULL,&A),Mat,A)
271 PetscPolymorphicSubroutine(MatCreateMPIBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,PetscInt nnz,Mat *A),(comm,bs,m,n,M,N,nz,PETSC_NULL,nnz,PETSC_NULL,A))
272 PetscPolymorphicSubroutine(MatCreateMPIBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,const PetscInt nnz[],const PetscInt onz[],Mat *A),(comm,bs,m,n,M,N,0,nnz,0,onz,A))
273 PetscPolymorphicSubroutine(MatCreateMPIBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,Mat *A),(comm,bs,m,n,M,N,0,PETSC_NULL,0,PETSC_NULL,A))
274 PetscPolymorphicFunction(MatCreateMPIBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,const PetscInt nnz[],PetscInt onz,const PetscInt onnz[]),(PETSC_COMM_WORLD,bs,m,n,M,N,nz,nnz,onz,onnz,&A),Mat,A)
275 PetscPolymorphicFunction(MatCreateMPIBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,PetscInt nnz),(PETSC_COMM_WORLD,bs,m,n,M,N,nz,PETSC_NULL,nnz,PETSC_NULL,&A),Mat,A)
276 PetscPolymorphicFunction(MatCreateMPIBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,const PetscInt nnz[],const PetscInt onz[]),(PETSC_COMM_WORLD,bs,m,n,M,N,0,nnz,0,onz,&A),Mat,A)
277 PetscPolymorphicFunction(MatCreateMPIBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N),(PETSC_COMM_WORLD,bs,m,n,M,N,0,PETSC_NULL,0,PETSC_NULL,&A),Mat,A)
278 PetscPolymorphicSubroutine(MatCreateMPIBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,PetscInt nnz,Mat *A),(PETSC_COMM_WORLD,bs,m,n,M,N,nz,PETSC_NULL,nnz,PETSC_NULL,A))
279 PetscPolymorphicSubroutine(MatCreateMPIBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,const PetscInt nnz[],const PetscInt onz[],Mat *A),(PETSC_COMM_WORLD,bs,m,n,M,N,0,nnz,0,onz,A))
280 PetscPolymorphicSubroutine(MatCreateMPIBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,Mat *A),(PETSC_COMM_WORLD,bs,m,n,M,N,0,PETSC_NULL,0,PETSC_NULL,A))
281 extern PetscErrorCode  MatCreateMPIBAIJWithArrays(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt,const PetscInt[],const PetscInt[],const PetscScalar[],Mat*);
282 
283 extern PetscErrorCode  MatCreateMPIAdj(MPI_Comm,PetscInt,PetscInt,PetscInt[],PetscInt[],PetscInt[],Mat*);
284 extern PetscErrorCode  MatCreateSeqSBAIJ(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,const PetscInt[],Mat*);
285 PetscPolymorphicFunction(MatCreateSeqSBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt nz,const PetscInt nnz[]),(PETSC_COMM_SELF,bs,m,n,nz,nnz,&A),Mat,A)
286 PetscPolymorphicFunction(MatCreateSeqSBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt nz),(PETSC_COMM_SELF,bs,m,n,nz,PETSC_NULL,&A),Mat,A)
287 PetscPolymorphicFunction(MatCreateSeqSBAIJ,(PetscInt bs,PetscInt m,PetscInt n,const PetscInt nnz[]),(PETSC_COMM_SELF,bs,m,n,0,nnz,&A),Mat,A)
288 PetscPolymorphicFunction(MatCreateSeqSBAIJ,(PetscInt bs,PetscInt m,PetscInt n),(PETSC_COMM_SELF,bs,m,n,0,PETSC_NULL,&A),Mat,A)
289 PetscPolymorphicSubroutine(MatCreateSeqSBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt nz,Mat *A),(PETSC_COMM_SELF,bs,m,n,nz,PETSC_NULL,A))
290 PetscPolymorphicSubroutine(MatCreateSeqSBAIJ,(PetscInt bs,PetscInt m,PetscInt n,const PetscInt nnz[],Mat *A),(PETSC_COMM_SELF,bs,m,n,0,nnz,A))
291 PetscPolymorphicSubroutine(MatCreateSeqSBAIJ,(PetscInt bs,PetscInt m,PetscInt n,Mat *A),(PETSC_COMM_SELF,bs,m,n,0,PETSC_NULL,A))
292 
293 extern PetscErrorCode  MatCreateMPISBAIJ(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt,const PetscInt[],PetscInt,const PetscInt[],Mat*);
294 PetscPolymorphicFunction(MatCreateMPISBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,const PetscInt nnz[],PetscInt onz,const PetscInt onnz[]),(comm,bs,m,n,M,N,nz,nnz,onz,onnz,&A),Mat,A)
295 PetscPolymorphicFunction(MatCreateMPISBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,PetscInt nnz),(comm,bs,m,n,M,N,nz,PETSC_NULL,nnz,PETSC_NULL,&A),Mat,A)
296 PetscPolymorphicFunction(MatCreateMPISBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,const PetscInt nnz[],const PetscInt onz[]),(comm,bs,m,n,M,N,0,nnz,0,onz,&A),Mat,A)
297 PetscPolymorphicFunction(MatCreateMPISBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N),(comm,bs,m,n,M,N,0,PETSC_NULL,0,PETSC_NULL,&A),Mat,A)
298 PetscPolymorphicSubroutine(MatCreateMPISBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,PetscInt nnz,Mat *A),(comm,bs,m,n,M,N,nz,PETSC_NULL,nnz,PETSC_NULL,A))
299 PetscPolymorphicSubroutine(MatCreateMPISBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,const PetscInt nnz[],const PetscInt onz[],Mat *A),(comm,bs,m,n,M,N,0,nnz,0,onz,A))
300 PetscPolymorphicSubroutine(MatCreateMPISBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,Mat *A),(comm,bs,m,n,M,N,0,PETSC_NULL,0,PETSC_NULL,A))
301 PetscPolymorphicFunction(MatCreateMPISBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,const PetscInt nnz[],PetscInt onz,const PetscInt onnz[]),(PETSC_COMM_WORLD,bs,m,n,M,N,nz,nnz,onz,onnz,&A),Mat,A)
302 PetscPolymorphicFunction(MatCreateMPISBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,PetscInt nnz),(PETSC_COMM_WORLD,bs,m,n,M,N,nz,PETSC_NULL,nnz,PETSC_NULL,&A),Mat,A)
303 PetscPolymorphicFunction(MatCreateMPISBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,const PetscInt nnz[],const PetscInt onz[]),(PETSC_COMM_WORLD,bs,m,n,M,N,0,nnz,0,onz,&A),Mat,A)
304 PetscPolymorphicFunction(MatCreateMPISBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N),(PETSC_COMM_WORLD,bs,m,n,M,N,0,PETSC_NULL,0,PETSC_NULL,&A),Mat,A)
305 PetscPolymorphicSubroutine(MatCreateMPISBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,PetscInt nnz,Mat *A),(PETSC_COMM_WORLD,bs,m,n,M,N,nz,PETSC_NULL,nnz,PETSC_NULL,A))
306 PetscPolymorphicSubroutine(MatCreateMPISBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,const PetscInt nnz[],const PetscInt onz[],Mat *A),(PETSC_COMM_WORLD,bs,m,n,M,N,0,nnz,0,onz,A))
307 PetscPolymorphicSubroutine(MatCreateMPISBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,Mat *A),(PETSC_COMM_WORLD,bs,m,n,M,N,0,PETSC_NULL,0,PETSC_NULL,A))
308 extern PetscErrorCode  MatCreateMPISBAIJWithArrays(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt,const PetscInt[],const PetscInt[],const PetscScalar[],Mat *);
309 extern PetscErrorCode  MatMPISBAIJSetPreallocationCSR(Mat,PetscInt,const PetscInt[],const PetscInt[],const PetscScalar[]);
310 
311 extern PetscErrorCode  MatCreateShell(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,void *,Mat*);
312 PetscPolymorphicFunction(MatCreateShell,(MPI_Comm comm,PetscInt m,PetscInt n,PetscInt M,PetscInt N,void *ctx),(comm,m,n,M,N,ctx,&A),Mat,A)
313 PetscPolymorphicFunction(MatCreateShell,(PetscInt m,PetscInt n,PetscInt M,PetscInt N,void *ctx),(PETSC_COMM_WORLD,m,n,M,N,ctx,&A),Mat,A)
314 extern PetscErrorCode  MatCreateAdic(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt,void (*)(void),Mat*);
315 extern PetscErrorCode  MatCreateNormal(Mat,Mat*);
316 PetscPolymorphicFunction(MatCreateNormal,(Mat mat),(mat,&A),Mat,A)
317 extern PetscErrorCode  MatCreateLRC(Mat,Mat,Mat,Mat*);
318 extern PetscErrorCode  MatCreateIS(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,ISLocalToGlobalMapping,Mat*);
319 extern PetscErrorCode  MatCreateSeqAIJCRL(MPI_Comm,PetscInt,PetscInt,PetscInt,const PetscInt[],Mat*);
320 extern PetscErrorCode  MatCreateMPIAIJCRL(MPI_Comm,PetscInt,PetscInt,PetscInt,const PetscInt[],PetscInt,const PetscInt[],Mat*);
321 extern PetscErrorCode  MatCreateScatter(MPI_Comm,VecScatter,Mat*);
322 extern PetscErrorCode  MatScatterSetVecScatter(Mat,VecScatter);
323 extern PetscErrorCode  MatScatterGetVecScatter(Mat,VecScatter*);
324 extern PetscErrorCode  MatCreateBlockMat(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt*,Mat*);
325 extern PetscErrorCode  MatCompositeAddMat(Mat,Mat);
326 extern PetscErrorCode  MatCompositeMerge(Mat);
327 extern PetscErrorCode  MatCreateComposite(MPI_Comm,PetscInt,const Mat*,Mat*);
328 typedef enum {MAT_COMPOSITE_ADDITIVE,MAT_COMPOSITE_MULTIPLICATIVE} MatCompositeType;
329 extern PetscErrorCode  MatCompositeSetType(Mat,MatCompositeType);
330 
331 extern PetscErrorCode  MatCreateSeqFFTW(MPI_Comm,PetscInt,const PetscInt[],Mat*);
332 extern PetscErrorCode  MatCreateSeqCUFFT(MPI_Comm,PetscInt,const PetscInt[],Mat*);
333 extern PetscErrorCode  MatCreateTranspose(Mat,Mat*);
334 extern PetscErrorCode  MatCreateSubMatrix(Mat,IS,IS,Mat*);
335 extern PetscErrorCode  MatSubMatrixUpdate(Mat,Mat,IS,IS);
336 extern PetscErrorCode  MatCreateLocalRef(Mat,IS,IS,Mat*);
337 
338 extern PetscErrorCode  MatCreatePython(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,const char[],Mat*);
339 extern PetscErrorCode  MatPythonSetType(Mat,const char[]);
340 
341 
342 extern PetscErrorCode  MatSetUp(Mat);
343 extern PetscErrorCode  MatDestroy(Mat);
344 
345 extern PetscErrorCode  MatConjugate(Mat);
346 extern PetscErrorCode  MatRealPart(Mat);
347 extern PetscErrorCode  MatImaginaryPart(Mat);
348 extern PetscErrorCode  MatGetDiagonalBlock(Mat,PetscBool *,MatReuse,Mat*);
349 extern PetscErrorCode  MatGetTrace(Mat,PetscScalar*);
350 
351 /* ------------------------------------------------------------*/
352 extern PetscErrorCode  MatSetValues(Mat,PetscInt,const PetscInt[],PetscInt,const PetscInt[],const PetscScalar[],InsertMode);
353 extern PetscErrorCode  MatSetValuesBlocked(Mat,PetscInt,const PetscInt[],PetscInt,const PetscInt[],const PetscScalar[],InsertMode);
354 extern PetscErrorCode  MatSetValuesRow(Mat,PetscInt,const PetscScalar[]);
355 extern PetscErrorCode  MatSetValuesRowLocal(Mat,PetscInt,const PetscScalar[]);
356 
357 /*S
358      MatStencil - Data structure (C struct) for storing information about a single row or
359         column of a matrix as index on an associated grid.
360 
361    Level: beginner
362 
363   Concepts: matrix; linear operator
364 
365 .seealso:  MatSetValuesStencil(), MatSetStencil(), MatSetValuesBlockStencil()
366 S*/
367 typedef struct {
368   PetscInt k,j,i,c;
369 } MatStencil;
370 
371 extern PetscErrorCode  MatSetValuesStencil(Mat,PetscInt,const MatStencil[],PetscInt,const MatStencil[],const PetscScalar[],InsertMode);
372 extern PetscErrorCode  MatSetValuesBlockedStencil(Mat,PetscInt,const MatStencil[],PetscInt,const MatStencil[],const PetscScalar[],InsertMode);
373 extern PetscErrorCode  MatSetStencil(Mat,PetscInt,const PetscInt[],const PetscInt[],PetscInt);
374 
375 extern PetscErrorCode  MatSetColoring(Mat,ISColoring);
376 extern PetscErrorCode  MatSetValuesAdic(Mat,void*);
377 extern PetscErrorCode  MatSetValuesAdifor(Mat,PetscInt,void*);
378 
379 /*E
380     MatAssemblyType - Indicates if the matrix is now to be used, or if you plan
381      to continue to add values to it
382 
383     Level: beginner
384 
385 .seealso: MatAssemblyBegin(), MatAssemblyEnd()
386 E*/
387 typedef enum {MAT_FLUSH_ASSEMBLY=1,MAT_FINAL_ASSEMBLY=0} MatAssemblyType;
388 extern PetscErrorCode  MatAssemblyBegin(Mat,MatAssemblyType);
389 extern PetscErrorCode  MatAssemblyEnd(Mat,MatAssemblyType);
390 extern PetscErrorCode  MatAssembled(Mat,PetscBool *);
391 
392 
393 
394 /*E
395     MatOption - Options that may be set for a matrix and its behavior or storage
396 
397     Level: beginner
398 
399    Any additions/changes here MUST also be made in include/finclude/petscmat.h
400 
401 .seealso: MatSetOption()
402 E*/
403 typedef enum {MAT_ROW_ORIENTED,MAT_NEW_NONZERO_LOCATIONS,
404               MAT_SYMMETRIC,
405               MAT_STRUCTURALLY_SYMMETRIC,
406               MAT_NEW_DIAGONALS,MAT_IGNORE_OFF_PROC_ENTRIES,
407               MAT_NEW_NONZERO_LOCATION_ERR,
408               MAT_NEW_NONZERO_ALLOCATION_ERR,MAT_USE_HASH_TABLE,
409               MAT_KEEP_NONZERO_PATTERN,MAT_IGNORE_ZERO_ENTRIES,
410               MAT_USE_INODES,
411               MAT_HERMITIAN,
412               MAT_SYMMETRY_ETERNAL,
413               MAT_CHECK_COMPRESSED_ROW,
414               MAT_IGNORE_LOWER_TRIANGULAR,MAT_ERROR_LOWER_TRIANGULAR,
415               MAT_GETROW_UPPERTRIANGULAR,MAT_UNUSED_NONZERO_LOCATION_ERR,
416               MAT_SPD,MAT_NO_OFF_PROC_ENTRIES,MAT_NO_OFF_PROC_ZERO_ROWS,
417               NUM_MAT_OPTIONS} MatOption;
418 extern const char *MatOptions[];
419 extern PetscErrorCode  MatSetOption(Mat,MatOption,PetscBool );
420 extern PetscErrorCode  MatGetType(Mat,const MatType*);
421 PetscPolymorphicFunction(MatGetType,(Mat mat),(mat,&t),const MatType,t)
422 
423 extern PetscErrorCode  MatGetValues(Mat,PetscInt,const PetscInt[],PetscInt,const PetscInt[],PetscScalar[]);
424 extern PetscErrorCode  MatGetRow(Mat,PetscInt,PetscInt *,const PetscInt *[],const PetscScalar*[]);
425 extern PetscErrorCode  MatRestoreRow(Mat,PetscInt,PetscInt *,const PetscInt *[],const PetscScalar*[]);
426 extern PetscErrorCode  MatGetRowUpperTriangular(Mat);
427 extern PetscErrorCode  MatRestoreRowUpperTriangular(Mat);
428 extern PetscErrorCode  MatGetColumn(Mat,PetscInt,PetscInt *,const PetscInt *[],const PetscScalar*[]);
429 extern PetscErrorCode  MatRestoreColumn(Mat,PetscInt,PetscInt *,const PetscInt *[],const PetscScalar*[]);
430 extern PetscErrorCode  MatGetColumnVector(Mat,Vec,PetscInt);
431 extern PetscErrorCode  MatGetArray(Mat,PetscScalar *[]);
432 PetscPolymorphicFunction(MatGetArray,(Mat mat),(mat,&a),PetscScalar*,a)
433 extern PetscErrorCode  MatRestoreArray(Mat,PetscScalar *[]);
434 extern PetscErrorCode  MatGetBlockSize(Mat,PetscInt *);
435 PetscPolymorphicFunction(MatGetBlockSize,(Mat mat),(mat,&a),PetscInt,a)
436 extern PetscErrorCode  MatSetBlockSize(Mat,PetscInt);
437 
438 
439 extern PetscErrorCode  MatMult(Mat,Vec,Vec);
440 extern PetscErrorCode  MatMultDiagonalBlock(Mat,Vec,Vec);
441 extern PetscErrorCode  MatMultAdd(Mat,Vec,Vec,Vec);
442 PetscPolymorphicSubroutine(MatMultAdd,(Mat A,Vec x,Vec y),(A,x,y,y))
443 extern PetscErrorCode  MatMultTranspose(Mat,Vec,Vec);
444 extern PetscErrorCode  MatMultHermitianTranspose(Mat,Vec,Vec);
445 extern PetscErrorCode  MatIsTranspose(Mat,Mat,PetscReal,PetscBool *);
446 PetscPolymorphicFunction(MatIsTranspose,(Mat A,Mat B,PetscReal tol),(A,B,tol,&t),PetscBool ,t)
447 PetscPolymorphicFunction(MatIsTranspose,(Mat A,Mat B),(A,B,0,&t),PetscBool ,t)
448 extern PetscErrorCode  MatIsHermitianTranspose(Mat,Mat,PetscReal,PetscBool *);
449 extern PetscErrorCode  MatMultTransposeAdd(Mat,Vec,Vec,Vec);
450 extern PetscErrorCode  MatMultHermitianTransposeAdd(Mat,Vec,Vec,Vec);
451 PetscPolymorphicSubroutine(MatMultTransposeAdd,(Mat A,Vec x,Vec y),(A,x,y,y))
452 extern PetscErrorCode  MatMultConstrained(Mat,Vec,Vec);
453 extern PetscErrorCode  MatMultTransposeConstrained(Mat,Vec,Vec);
454 extern PetscErrorCode  MatMatSolve(Mat,Mat,Mat);
455 
456 /*E
457     MatDuplicateOption - Indicates if a duplicated sparse matrix should have
458   its numerical values copied over or just its nonzero structure.
459 
460     Level: beginner
461 
462    Any additions/changes here MUST also be made in include/finclude/petscmat.h
463 
464 $   MAT_SHARE_NONZERO_PATTERN - the i and j arrays in the new matrix will be shared with the original matrix
465 $                               this also triggers the MAT_DO_NOT_COPY_VALUES option. This is used when you
466 $                               have several matrices with the same nonzero pattern.
467 
468 .seealso: MatDuplicate()
469 E*/
470 typedef enum {MAT_DO_NOT_COPY_VALUES,MAT_COPY_VALUES,MAT_SHARE_NONZERO_PATTERN} MatDuplicateOption;
471 
472 extern PetscErrorCode  MatConvert(Mat,const MatType,MatReuse,Mat*);
473 PetscPolymorphicFunction(MatConvert,(Mat A,const MatType t),(A,t,MAT_INITIAL_MATRIX,&a),Mat,a)
474 extern PetscErrorCode  MatDuplicate(Mat,MatDuplicateOption,Mat*);
475 PetscPolymorphicFunction(MatDuplicate,(Mat A,MatDuplicateOption o),(A,o,&a),Mat,a)
476 PetscPolymorphicFunction(MatDuplicate,(Mat A),(A,MAT_COPY_VALUES,&a),Mat,a)
477 
478 
479 extern PetscErrorCode  MatCopy(Mat,Mat,MatStructure);
480 extern PetscErrorCode  MatView(Mat,PetscViewer);
481 extern PetscErrorCode  MatIsSymmetric(Mat,PetscReal,PetscBool *);
482 PetscPolymorphicFunction(MatIsSymmetric,(Mat A,PetscReal tol),(A,tol,&t),PetscBool ,t)
483 PetscPolymorphicFunction(MatIsSymmetric,(Mat A),(A,0,&t),PetscBool ,t)
484 extern PetscErrorCode  MatIsStructurallySymmetric(Mat,PetscBool *);
485 PetscPolymorphicFunction(MatIsStructurallySymmetric,(Mat A),(A,&t),PetscBool ,t)
486 extern PetscErrorCode  MatIsHermitian(Mat,PetscReal,PetscBool *);
487 PetscPolymorphicFunction(MatIsHermitian,(Mat A),(A,0,&t),PetscBool ,t)
488 extern PetscErrorCode  MatIsSymmetricKnown(Mat,PetscBool *,PetscBool *);
489 extern PetscErrorCode  MatIsHermitianKnown(Mat,PetscBool *,PetscBool *);
490 extern PetscErrorCode  MatMissingDiagonal(Mat,PetscBool  *,PetscInt *);
491 extern PetscErrorCode  MatLoad(Mat, PetscViewer);
492 
493 extern PetscErrorCode  MatGetRowIJ(Mat,PetscInt,PetscBool ,PetscBool ,PetscInt*,PetscInt *[],PetscInt *[],PetscBool  *);
494 extern PetscErrorCode  MatRestoreRowIJ(Mat,PetscInt,PetscBool ,PetscBool ,PetscInt *,PetscInt *[],PetscInt *[],PetscBool  *);
495 extern PetscErrorCode  MatGetColumnIJ(Mat,PetscInt,PetscBool ,PetscBool ,PetscInt*,PetscInt *[],PetscInt *[],PetscBool  *);
496 extern PetscErrorCode  MatRestoreColumnIJ(Mat,PetscInt,PetscBool ,PetscBool ,PetscInt *,PetscInt *[],PetscInt *[],PetscBool  *);
497 
498 /*S
499      MatInfo - Context of matrix information, used with MatGetInfo()
500 
501    In Fortran this is simply a double precision array of dimension MAT_INFO_SIZE
502 
503    Level: intermediate
504 
505   Concepts: matrix^nonzero information
506 
507 .seealso:  MatGetInfo(), MatInfoType
508 S*/
509 typedef struct {
510   PetscLogDouble block_size;                         /* block size */
511   PetscLogDouble nz_allocated,nz_used,nz_unneeded;   /* number of nonzeros */
512   PetscLogDouble memory;                             /* memory allocated */
513   PetscLogDouble assemblies;                         /* number of matrix assemblies called */
514   PetscLogDouble mallocs;                            /* number of mallocs during MatSetValues() */
515   PetscLogDouble fill_ratio_given,fill_ratio_needed; /* fill ratio for LU/ILU */
516   PetscLogDouble factor_mallocs;                     /* number of mallocs during factorization */
517 } MatInfo;
518 
519 /*E
520     MatInfoType - Indicates if you want information about the local part of the matrix,
521      the entire parallel matrix or the maximum over all the local parts.
522 
523     Level: beginner
524 
525    Any additions/changes here MUST also be made in include/finclude/petscmat.h
526 
527 .seealso: MatGetInfo(), MatInfo
528 E*/
529 typedef enum {MAT_LOCAL=1,MAT_GLOBAL_MAX=2,MAT_GLOBAL_SUM=3} MatInfoType;
530 extern PetscErrorCode  MatGetInfo(Mat,MatInfoType,MatInfo*);
531 extern PetscErrorCode  MatGetDiagonal(Mat,Vec);
532 extern PetscErrorCode  MatGetRowMax(Mat,Vec,PetscInt[]);
533 extern PetscErrorCode  MatGetRowMin(Mat,Vec,PetscInt[]);
534 extern PetscErrorCode  MatGetRowMaxAbs(Mat,Vec,PetscInt[]);
535 extern PetscErrorCode  MatGetRowMinAbs(Mat,Vec,PetscInt[]);
536 extern PetscErrorCode  MatGetRowSum(Mat,Vec);
537 extern PetscErrorCode  MatTranspose(Mat,MatReuse,Mat*);
538 PetscPolymorphicFunction(MatTranspose,(Mat A),(A,MAT_INITIAL_MATRIX,&t),Mat,t)
539 extern PetscErrorCode  MatHermitianTranspose(Mat,MatReuse,Mat*);
540 extern PetscErrorCode  MatPermute(Mat,IS,IS,Mat *);
541 PetscPolymorphicFunction(MatPermute,(Mat A,IS is1,IS is2),(A,is1,is2,&t),Mat,t)
542 extern PetscErrorCode  MatDiagonalScale(Mat,Vec,Vec);
543 extern PetscErrorCode  MatDiagonalSet(Mat,Vec,InsertMode);
544 extern PetscErrorCode  MatEqual(Mat,Mat,PetscBool *);
545 PetscPolymorphicFunction(MatEqual,(Mat A,Mat B),(A,B,&t),PetscBool ,t)
546 extern PetscErrorCode  MatMultEqual(Mat,Mat,PetscInt,PetscBool *);
547 extern PetscErrorCode  MatMultAddEqual(Mat,Mat,PetscInt,PetscBool *);
548 extern PetscErrorCode  MatMultTransposeEqual(Mat,Mat,PetscInt,PetscBool *);
549 extern PetscErrorCode  MatMultTransposeAddEqual(Mat,Mat,PetscInt,PetscBool *);
550 
551 extern PetscErrorCode  MatNorm(Mat,NormType,PetscReal *);
552 PetscPolymorphicFunction(MatNorm,(Mat A,NormType t),(A,t,&n),PetscReal,n)
553 extern PetscErrorCode MatGetColumnNorms(Mat,NormType,PetscReal *);
554 extern PetscErrorCode  MatZeroEntries(Mat);
555 extern PetscErrorCode  MatZeroRows(Mat,PetscInt,const PetscInt [],PetscScalar,Vec,Vec);
556 extern PetscErrorCode  MatZeroRowsIS(Mat,IS,PetscScalar,Vec,Vec);
557 extern PetscErrorCode  MatZeroRowsStencil(Mat,PetscInt,const MatStencil [],PetscScalar,Vec,Vec);
558 extern PetscErrorCode  MatZeroRowsColumns(Mat,PetscInt,const PetscInt [],PetscScalar,Vec,Vec);
559 extern PetscErrorCode  MatZeroRowsColumnsIS(Mat,IS,PetscScalar,Vec,Vec);
560 
561 extern PetscErrorCode  MatUseScaledForm(Mat,PetscBool );
562 extern PetscErrorCode  MatScaleSystem(Mat,Vec,Vec);
563 extern PetscErrorCode  MatUnScaleSystem(Mat,Vec,Vec);
564 
565 extern PetscErrorCode  MatGetSize(Mat,PetscInt*,PetscInt*);
566 extern PetscErrorCode  MatGetLocalSize(Mat,PetscInt*,PetscInt*);
567 extern PetscErrorCode  MatGetOwnershipRange(Mat,PetscInt*,PetscInt*);
568 extern PetscErrorCode  MatGetOwnershipRanges(Mat,const PetscInt**);
569 extern PetscErrorCode  MatGetOwnershipRangeColumn(Mat,PetscInt*,PetscInt*);
570 extern PetscErrorCode  MatGetOwnershipRangesColumn(Mat,const PetscInt**);
571 
572 extern PetscErrorCode  MatGetSubMatrices(Mat,PetscInt,const IS[],const IS[],MatReuse,Mat *[]);
573 extern PetscErrorCode  MatGetSubMatricesParallel(Mat,PetscInt,const IS[],const IS[],MatReuse,Mat *[]);
574 extern PetscErrorCode  MatDestroyMatrices(PetscInt,Mat *[]);
575 extern PetscErrorCode  MatGetSubMatrix(Mat,IS,IS,MatReuse,Mat *);
576 extern PetscErrorCode  MatGetLocalSubMatrix(Mat,IS,IS,Mat*);
577 extern PetscErrorCode  MatRestoreLocalSubMatrix(Mat,IS,IS,Mat*);
578 extern PetscErrorCode  MatGetSeqNonzeroStructure(Mat,Mat*);
579 extern PetscErrorCode  MatDestroySeqNonzeroStructure(Mat*);
580 
581 extern PetscErrorCode  MatMerge(MPI_Comm,Mat,PetscInt,MatReuse,Mat*);
582 extern PetscErrorCode  MatMerge_SeqsToMPI(MPI_Comm,Mat,PetscInt,PetscInt,MatReuse,Mat*);
583 extern PetscErrorCode  MatMerge_SeqsToMPISymbolic(MPI_Comm,Mat,PetscInt,PetscInt,Mat*);
584 extern PetscErrorCode  MatMerge_SeqsToMPINumeric(Mat,Mat);
585 extern PetscErrorCode  MatDestroy_MPIAIJ_SeqsToMPI(Mat);
586 extern PetscErrorCode  MatGetLocalMat(Mat,MatReuse,Mat*);
587 extern PetscErrorCode  MatGetLocalMatCondensed(Mat,MatReuse,IS*,IS*,Mat*);
588 extern PetscErrorCode  MatGetBrowsOfAcols(Mat,Mat,MatReuse,IS*,IS*,PetscInt*,Mat*);
589 extern PetscErrorCode  MatGetBrowsOfAoCols(Mat,Mat,MatReuse,PetscInt**,PetscInt**,MatScalar**,Mat*);
590 #if defined (PETSC_USE_CTABLE)
591 #include "petscctable.h"
592 extern PetscErrorCode  MatGetCommunicationStructs(Mat, Vec *, PetscTable *, VecScatter *);
593 #else
594 extern PetscErrorCode  MatGetCommunicationStructs(Mat, Vec *, PetscInt *[], VecScatter *);
595 #endif
596 extern PetscErrorCode  MatGetGhosts(Mat, PetscInt *,const PetscInt *[]);
597 
598 extern PetscErrorCode  MatIncreaseOverlap(Mat,PetscInt,IS[],PetscInt);
599 
600 extern PetscErrorCode  MatMatMult(Mat,Mat,MatReuse,PetscReal,Mat*);
601 extern PetscErrorCode  MatMatMultSymbolic(Mat,Mat,PetscReal,Mat*);
602 extern PetscErrorCode  MatMatMultNumeric(Mat,Mat,Mat);
603 
604 extern PetscErrorCode  MatPtAP(Mat,Mat,MatReuse,PetscReal,Mat*);
605 extern PetscErrorCode  MatPtAPSymbolic(Mat,Mat,PetscReal,Mat*);
606 extern PetscErrorCode  MatPtAPNumeric(Mat,Mat,Mat);
607 
608 extern PetscErrorCode  MatMatMultTranspose(Mat,Mat,MatReuse,PetscReal,Mat*);
609 extern PetscErrorCode  MatMatMultTransposeSymbolic(Mat,Mat,PetscReal,Mat*);
610 extern PetscErrorCode  MatMatMultTransposeNumeric(Mat,Mat,Mat);
611 
612 extern PetscErrorCode  MatAXPY(Mat,PetscScalar,Mat,MatStructure);
613 extern PetscErrorCode  MatAYPX(Mat,PetscScalar,Mat,MatStructure);
614 
615 extern PetscErrorCode  MatScale(Mat,PetscScalar);
616 extern PetscErrorCode  MatShift(Mat,PetscScalar);
617 
618 extern PetscErrorCode  MatSetLocalToGlobalMapping(Mat,ISLocalToGlobalMapping,ISLocalToGlobalMapping);
619 extern PetscErrorCode  MatSetLocalToGlobalMappingBlock(Mat,ISLocalToGlobalMapping,ISLocalToGlobalMapping);
620 extern PetscErrorCode  MatGetLocalToGlobalMapping(Mat,ISLocalToGlobalMapping*,ISLocalToGlobalMapping*);
621 extern PetscErrorCode  MatGetLocalToGlobalMappingBlock(Mat,ISLocalToGlobalMapping*,ISLocalToGlobalMapping*);
622 extern PetscErrorCode  MatZeroRowsLocal(Mat,PetscInt,const PetscInt [],PetscScalar,Vec,Vec);
623 extern PetscErrorCode  MatZeroRowsLocalIS(Mat,IS,PetscScalar,Vec,Vec);
624 extern PetscErrorCode  MatZeroRowsColumnsLocal(Mat,PetscInt,const PetscInt [],PetscScalar,Vec,Vec);
625 extern PetscErrorCode  MatZeroRowsColumnsLocalIS(Mat,IS,PetscScalar,Vec,Vec);
626 extern PetscErrorCode  MatSetValuesLocal(Mat,PetscInt,const PetscInt[],PetscInt,const PetscInt[],const PetscScalar[],InsertMode);
627 extern PetscErrorCode  MatSetValuesBlockedLocal(Mat,PetscInt,const PetscInt[],PetscInt,const PetscInt[],const PetscScalar[],InsertMode);
628 
629 extern PetscErrorCode  MatStashSetInitialSize(Mat,PetscInt,PetscInt);
630 extern PetscErrorCode  MatStashGetInfo(Mat,PetscInt*,PetscInt*,PetscInt*,PetscInt*);
631 
632 extern PetscErrorCode  MatInterpolate(Mat,Vec,Vec);
633 extern PetscErrorCode  MatInterpolateAdd(Mat,Vec,Vec,Vec);
634 PetscPolymorphicSubroutine(MatInterpolateAdd,(Mat A,Vec x,Vec y),(A,x,y,y))
635 extern PetscErrorCode  MatRestrict(Mat,Vec,Vec);
636 extern PetscErrorCode  MatGetVecs(Mat,Vec*,Vec*);
637 extern PetscErrorCode  MatGetRedundantMatrix(Mat,PetscInt,MPI_Comm,PetscInt,MatReuse,Mat*);
638 extern PetscErrorCode  MatGetMultiProcBlock(Mat,MPI_Comm,Mat*);
639 
640 /*MC
641    MatSetValue - Set a single entry into a matrix.
642 
643    Not collective
644 
645    Input Parameters:
646 +  m - the matrix
647 .  row - the row location of the entry
648 .  col - the column location of the entry
649 .  value - the value to insert
650 -  mode - either INSERT_VALUES or ADD_VALUES
651 
652    Notes:
653    For efficiency one should use MatSetValues() and set several or many
654    values simultaneously if possible.
655 
656    Level: beginner
657 
658 .seealso: MatSetValues(), MatSetValueLocal()
659 M*/
660 PETSC_STATIC_INLINE PetscErrorCode MatSetValue(Mat v,PetscInt i,PetscInt j,PetscScalar va,InsertMode mode) {return MatSetValues(v,1,&i,1,&j,&va,mode);}
661 
662 PETSC_STATIC_INLINE PetscErrorCode MatGetValue(Mat v,PetscInt i,PetscInt j,PetscScalar *va) {return MatGetValues(v,1,&i,1,&j,va);}
663 
664 PETSC_STATIC_INLINE PetscErrorCode MatSetValueLocal(Mat v,PetscInt i,PetscInt j,PetscScalar va,InsertMode mode) {return MatSetValuesLocal(v,1,&i,1,&j,&va,mode);}
665 
666 /*MC
667    MatPreallocateInitialize - Begins the block of code that will count the number of nonzeros per
668        row in a matrix providing the data that one can use to correctly preallocate the matrix.
669 
670    Synopsis:
671    PetscErrorCode MatPreallocateInitialize(MPI_Comm comm, PetscInt nrows, PetscInt ncols, PetscInt *dnz, PetscInt *onz)
672 
673    Collective on MPI_Comm
674 
675    Input Parameters:
676 +  comm - the communicator that will share the eventually allocated matrix
677 .  nrows - the number of LOCAL rows in the matrix
678 -  ncols - the number of LOCAL columns in the matrix
679 
680    Output Parameters:
681 +  dnz - the array that will be passed to the matrix preallocation routines
682 -  ozn - the other array passed to the matrix preallocation routines
683 
684 
685    Level: intermediate
686 
687    Notes:
688     See the <A href="../../docs/manual.pdf#nameddest=ch_performance">Hints for Performance Improvment</A> chapter in the users manual for more details.
689 
690    Do not malloc or free dnz and onz, that is handled internally by these routines
691 
692    Use MatPreallocateInitializeSymmetric() for symmetric matrices (MPISBAIJ matrices)
693 
694    This is a MACRO not a function because it has a leading { that is closed by PetscPreallocateFinalize().
695 
696   Concepts: preallocation^Matrix
697 
698 .seealso: MatPreallocateFinalize(), MatPreallocateSet(), MatPreallocateSymmetricSet(), MatPreallocateSetLocal(),
699           MatPreallocateInitializeSymmetric(), MatPreallocateSymmetricSetLocal()
700 M*/
701 #define MatPreallocateInitialize(comm,nrows,ncols,dnz,onz) 0; \
702 { \
703   PetscErrorCode _4_ierr; PetscInt __nrows = (nrows),__ctmp = (ncols),__rstart,__start,__end; \
704   _4_ierr = PetscMalloc2(__nrows,PetscInt,&dnz,__nrows,PetscInt,&onz);CHKERRQ(_4_ierr); \
705   _4_ierr = PetscMemzero(dnz,__nrows*sizeof(PetscInt));CHKERRQ(_4_ierr);\
706   _4_ierr = PetscMemzero(onz,__nrows*sizeof(PetscInt));CHKERRQ(_4_ierr);\
707   _4_ierr = MPI_Scan(&__ctmp,&__end,1,MPIU_INT,MPI_SUM,comm);CHKERRQ(_4_ierr); __start = __end - __ctmp;\
708   _4_ierr = MPI_Scan(&__nrows,&__rstart,1,MPIU_INT,MPI_SUM,comm);CHKERRQ(_4_ierr); __rstart = __rstart - __nrows;
709 
710 /*MC
711    MatPreallocateSymmetricInitialize - Begins the block of code that will count the number of nonzeros per
712        row in a matrix providing the data that one can use to correctly preallocate the matrix.
713 
714    Synopsis:
715    PetscErrorCode MatPreallocateSymmetricInitialize(MPI_Comm comm, PetscInt nrows, PetscInt ncols, PetscInt *dnz, PetscInt *onz)
716 
717    Collective on MPI_Comm
718 
719    Input Parameters:
720 +  comm - the communicator that will share the eventually allocated matrix
721 .  nrows - the number of LOCAL rows in the matrix
722 -  ncols - the number of LOCAL columns in the matrix
723 
724    Output Parameters:
725 +  dnz - the array that will be passed to the matrix preallocation routines
726 -  ozn - the other array passed to the matrix preallocation routines
727 
728 
729    Level: intermediate
730 
731    Notes:
732     See the <A href="../../docs/manual.pdf#nameddest=ch_performance">Hints for Performance Improvment</A> chapter in the users manual for more details.
733 
734    Do not malloc or free dnz and onz, that is handled internally by these routines
735 
736    This is a MACRO not a function because it has a leading { that is closed by PetscPreallocateFinalize().
737 
738   Concepts: preallocation^Matrix
739 
740 .seealso: MatPreallocateFinalize(), MatPreallocateSet(), MatPreallocateSymmetricSet(), MatPreallocateSetLocal(),
741           MatPreallocateInitialize(), MatPreallocateSymmetricSetLocal()
742 M*/
743 #define MatPreallocateSymmetricInitialize(comm,nrows,ncols,dnz,onz) 0; \
744 { \
745   PetscErrorCode _4_ierr; PetscInt __nrows = (nrows),__ctmp = (ncols),__rstart,__end; \
746   _4_ierr = PetscMalloc2(__nrows,PetscInt,&dnz,__nrows,PetscInt,&onz);CHKERRQ(_4_ierr); \
747   _4_ierr = PetscMemzero(dnz,__nrows*sizeof(PetscInt));CHKERRQ(_4_ierr);\
748   _4_ierr = PetscMemzero(onz,__nrows*sizeof(PetscInt));CHKERRQ(_4_ierr);\
749   _4_ierr = MPI_Scan(&__ctmp,&__end,1,MPIU_INT,MPI_SUM,comm);CHKERRQ(_4_ierr);\
750   _4_ierr = MPI_Scan(&__nrows,&__rstart,1,MPIU_INT,MPI_SUM,comm);CHKERRQ(_4_ierr); __rstart = __rstart - __nrows;
751 
752 /*MC
753    MatPreallocateSetLocal - Indicates the locations (rows and columns) in the matrix where nonzeros will be
754        inserted using a local number of the rows and columns
755 
756    Synopsis:
757    PetscErrorCode MatPreallocateSetLocal(ISLocalToGlobalMappping map,PetscInt nrows, PetscInt *rows,PetscInt ncols, PetscInt *cols,PetscInt *dnz, PetscInt *onz)
758 
759    Not Collective
760 
761    Input Parameters:
762 +  map - the row mapping from local numbering to global numbering
763 .  nrows - the number of rows indicated
764 .  rows - the indices of the rows
765 .  cmap - the column mapping from local to global numbering
766 .  ncols - the number of columns in the matrix
767 .  cols - the columns indicated
768 .  dnz - the array that will be passed to the matrix preallocation routines
769 -  ozn - the other array passed to the matrix preallocation routines
770 
771 
772    Level: intermediate
773 
774    Notes:
775     See the <A href="../../docs/manual.pdf#nameddest=ch_performance">Hints for Performance Improvment</A> chapter in the users manual for more details.
776 
777    Do not malloc or free dnz and onz, that is handled internally by these routines
778 
779   Concepts: preallocation^Matrix
780 
781 .seealso: MatPreallocateFinalize(), MatPreallocateSet(), MatPreallocateSymmetricSet(), MatPreallocateInitialize(),
782           MatPreallocateInitialize(), MatPreallocateSymmetricSetLocal()
783 M*/
784 #define MatPreallocateSetLocal(rmap,nrows,rows,cmap,ncols,cols,dnz,onz) 0; \
785 {\
786   PetscInt __l;\
787   _4_ierr = ISLocalToGlobalMappingApply(rmap,nrows,rows,rows);CHKERRQ(_4_ierr);\
788   _4_ierr = ISLocalToGlobalMappingApply(cmap,ncols,cols,cols);CHKERRQ(_4_ierr);\
789   for (__l=0;__l<nrows;__l++) {\
790     _4_ierr = MatPreallocateSet((rows)[__l],ncols,cols,dnz,onz);CHKERRQ(_4_ierr);\
791   }\
792 }
793 
794 /*MC
795    MatPreallocateSymmetricSetLocal - Indicates the locations (rows and columns) in the matrix where nonzeros will be
796        inserted using a local number of the rows and columns
797 
798    Synopsis:
799    PetscErrorCode MatPreallocateSymmetricSetLocal(ISLocalToGlobalMappping map,PetscInt nrows, PetscInt *rows,PetscInt ncols, PetscInt *cols,PetscInt *dnz, PetscInt *onz)
800 
801    Not Collective
802 
803    Input Parameters:
804 +  map - the mapping between local numbering and global numbering
805 .  nrows - the number of rows indicated
806 .  rows - the indices of the rows
807 .  ncols - the number of columns in the matrix
808 .  cols - the columns indicated
809 .  dnz - the array that will be passed to the matrix preallocation routines
810 -  ozn - the other array passed to the matrix preallocation routines
811 
812 
813    Level: intermediate
814 
815    Notes:
816     See the <A href="../../docs/manual.pdf#nameddest=ch_performance">Hints for Performance Improvment</A> chapter in the users manual for more details.
817 
818    Do not malloc or free dnz and onz that is handled internally by these routines
819 
820   Concepts: preallocation^Matrix
821 
822 .seealso: MatPreallocateFinalize(), MatPreallocateSet(), MatPreallocateSymmetricSet(), MatPreallocateInitialize(),
823           MatPreallocateInitialize(), MatPreallocateSymmetricSetLocal(), MatPreallocateSetLocal()
824 M*/
825 #define MatPreallocateSymmetricSetLocal(map,nrows,rows,ncols,cols,dnz,onz) 0;\
826 {\
827   PetscInt __l;\
828   _4_ierr = ISLocalToGlobalMappingApply(map,nrows,rows,rows);CHKERRQ(_4_ierr);\
829   _4_ierr = ISLocalToGlobalMappingApply(map,ncols,cols,cols);CHKERRQ(_4_ierr);\
830   for (__l=0;__l<nrows;__l++) {\
831     _4_ierr = MatPreallocateSymmetricSet((rows)[__l],ncols,cols,dnz,onz);CHKERRQ(_4_ierr);\
832   }\
833 }
834 
835 /*MC
836    MatPreallocateSet - Indicates the locations (rows and columns) in the matrix where nonzeros will be
837        inserted using a local number of the rows and columns
838 
839    Synopsis:
840    PetscErrorCode MatPreallocateSet(PetscInt nrows, PetscInt *rows,PetscInt ncols, PetscInt *cols,PetscInt *dnz, PetscInt *onz)
841 
842    Not Collective
843 
844    Input Parameters:
845 +  row - the row
846 .  ncols - the number of columns in the matrix
847 -  cols - the columns indicated
848 
849    Output Parameters:
850 +  dnz - the array that will be passed to the matrix preallocation routines
851 -  ozn - the other array passed to the matrix preallocation routines
852 
853 
854    Level: intermediate
855 
856    Notes:
857     See the <A href="../../docs/manual.pdf#nameddest=ch_performance">Hints for Performance Improvment</A> chapter in the users manual for more details.
858 
859    Do not malloc or free dnz and onz that is handled internally by these routines
860 
861    This is a MACRO not a function because it uses variables declared in MatPreallocateInitialize().
862 
863   Concepts: preallocation^Matrix
864 
865 .seealso: MatPreallocateFinalize(), MatPreallocateSet(), MatPreallocateSymmetricSet(), MatPreallocateInitialize(),
866           MatPreallocateInitialize(), MatPreallocateSymmetricSetLocal(), MatPreallocateSetLocal()
867 M*/
868 #define MatPreallocateSet(row,nc,cols,dnz,onz) 0;\
869 { PetscInt __i; \
870   if (row < __rstart) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Trying to set preallocation for row %D less than first local row %D",row,__rstart);\
871   if (row >= __rstart+__nrows) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Trying to set preallocation for row %D greater than last local row %D",row,__rstart+__nrows-1);\
872   for (__i=0; __i<nc; __i++) {\
873     if ((cols)[__i] < __start || (cols)[__i] >= __end) onz[row - __rstart]++; \
874     else dnz[row - __rstart]++;\
875   }\
876 }
877 
878 /*MC
879    MatPreallocateSymmetricSet - Indicates the locations (rows and columns) in the matrix where nonzeros will be
880        inserted using a local number of the rows and columns
881 
882    Synopsis:
883    PetscErrorCode MatPreallocateSymmetricSet(PetscInt nrows, PetscInt *rows,PetscInt ncols, PetscInt *cols,PetscInt *dnz, PetscInt *onz)
884 
885    Not Collective
886 
887    Input Parameters:
888 +  nrows - the number of rows indicated
889 .  rows - the indices of the rows
890 .  ncols - the number of columns in the matrix
891 .  cols - the columns indicated
892 .  dnz - the array that will be passed to the matrix preallocation routines
893 -  ozn - the other array passed to the matrix preallocation routines
894 
895 
896    Level: intermediate
897 
898    Notes:
899     See the <A href="../../docs/manual.pdf#nameddest=ch_performance">Hints for Performance Improvment</A> chapter in the users manual for more details.
900 
901    Do not malloc or free dnz and onz that is handled internally by these routines
902 
903    This is a MACRO not a function because it uses variables declared in MatPreallocateInitialize().
904 
905   Concepts: preallocation^Matrix
906 
907 .seealso: MatPreallocateFinalize(), MatPreallocateSet(), MatPreallocateSymmetricSet(), MatPreallocateInitialize(),
908           MatPreallocateInitialize(), MatPreallocateSymmetricSetLocal(), MatPreallocateSetLocal()
909 M*/
910 #define MatPreallocateSymmetricSet(row,nc,cols,dnz,onz) 0;\
911 { PetscInt __i; \
912   for (__i=0; __i<nc; __i++) {\
913     if (cols[__i] >= __end) onz[row - __rstart]++; \
914     else if (cols[__i] >= row) dnz[row - __rstart]++;\
915   }\
916 }
917 
918 /*MC
919    MatPreallocateLocation -  An alternative to MatPreallocationSet() that puts the nonzero locations into the matrix if it exists
920 
921    Synopsis:
922    PetscErrorCode MatPreallocateLocations(Mat A,PetscInt row,PetscInt ncols,PetscInt *cols,PetscInt *dnz,PetscInt *onz)
923 
924    Not Collective
925 
926    Input Parameters:
927 .  A - matrix
928 .  row - row where values exist (must be local to this process)
929 .  ncols - number of columns
930 .  cols - columns with nonzeros
931 .  dnz - the array that will be passed to the matrix preallocation routines
932 -  ozn - the other array passed to the matrix preallocation routines
933 
934 
935    Level: intermediate
936 
937    Notes:
938     See the <A href="../../docs/manual.pdf#nameddest=ch_performance">Hints for Performance Improvment</A> chapter in the users manual for more details.
939 
940    Do not malloc or free dnz and onz that is handled internally by these routines
941 
942    This is a MACRO not a function because it uses a bunch of variables private to the MatPreallocation.... routines.
943 
944   Concepts: preallocation^Matrix
945 
946 .seealso: MatPreallocateInitialize(), MatPreallocateSet(), MatPreallocateSymmetricSet(), MatPreallocateSetLocal(),
947           MatPreallocateSymmetricInitialize(), MatPreallocateSymmetricSetLocal()
948 M*/
949 #define MatPreallocateLocation(A,row,ncols,cols,dnz,onz) 0;if (A) {ierr = MatSetValues(A,1,&row,ncols,cols,PETSC_NULL,INSERT_VALUES);CHKERRQ(ierr);} else {ierr =  MatPreallocateSet(row,ncols,cols,dnz,onz);CHKERRQ(ierr);}
950 
951 
952 /*MC
953    MatPreallocateFinalize - Ends the block of code that will count the number of nonzeros per
954        row in a matrix providing the data that one can use to correctly preallocate the matrix.
955 
956    Synopsis:
957    PetscErrorCode MatPreallocateFinalize(PetscInt *dnz, PetscInt *onz)
958 
959    Collective on MPI_Comm
960 
961    Input Parameters:
962 +  dnz - the array that was be passed to the matrix preallocation routines
963 -  ozn - the other array passed to the matrix preallocation routines
964 
965 
966    Level: intermediate
967 
968    Notes:
969     See the <A href="../../docs/manual.pdf#nameddest=ch_performance">Hints for Performance Improvment</A> chapter in the users manual for more details.
970 
971    Do not malloc or free dnz and onz that is handled internally by these routines
972 
973    This is a MACRO not a function because it closes the { started in MatPreallocateInitialize().
974 
975   Concepts: preallocation^Matrix
976 
977 .seealso: MatPreallocateInitialize(), MatPreallocateSet(), MatPreallocateSymmetricSet(), MatPreallocateSetLocal(),
978           MatPreallocateSymmetricInitialize(), MatPreallocateSymmetricSetLocal()
979 M*/
980 #define MatPreallocateFinalize(dnz,onz) 0;_4_ierr = PetscFree2(dnz,onz);CHKERRQ(_4_ierr);}
981 
982 
983 
984 /* Routines unique to particular data structures */
985 extern PetscErrorCode  MatShellGetContext(Mat,void **);
986 PetscPolymorphicFunction(MatShellGetContext,(Mat A),(A,&t),void*,t)
987 
988 extern PetscErrorCode  MatInodeAdjustForInodes(Mat,IS*,IS*);
989 extern PetscErrorCode  MatInodeGetInodeSizes(Mat,PetscInt *,PetscInt *[],PetscInt *);
990 
991 extern PetscErrorCode  MatSeqAIJSetColumnIndices(Mat,PetscInt[]);
992 extern PetscErrorCode  MatSeqBAIJSetColumnIndices(Mat,PetscInt[]);
993 extern PetscErrorCode  MatCreateSeqAIJWithArrays(MPI_Comm,PetscInt,PetscInt,PetscInt[],PetscInt[],PetscScalar[],Mat*);
994 extern PetscErrorCode  MatCreateSeqBAIJWithArrays(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt[],PetscInt[],PetscScalar[],Mat*);
995 extern PetscErrorCode  MatCreateSeqSBAIJWithArrays(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt[],PetscInt[],PetscScalar[],Mat*);
996 
997 #define MAT_SKIP_ALLOCATION -4
998 
999 extern PetscErrorCode  MatSeqBAIJSetPreallocation(Mat,PetscInt,PetscInt,const PetscInt[]);
1000 PetscPolymorphicSubroutine(MatSeqBAIJSetPreallocation,(Mat A,PetscInt bs,const PetscInt nnz[]),(A,bs,0,nnz))
1001 extern PetscErrorCode  MatSeqSBAIJSetPreallocation(Mat,PetscInt,PetscInt,const PetscInt[]);
1002 PetscPolymorphicSubroutine(MatSeqSBAIJSetPreallocation,(Mat A,PetscInt bs,const PetscInt nnz[]),(A,bs,0,nnz))
1003 extern PetscErrorCode  MatSeqAIJSetPreallocation(Mat,PetscInt,const PetscInt[]);
1004 PetscPolymorphicSubroutine(MatSeqAIJSetPreallocation,(Mat A,const PetscInt nnz[]),(A,0,nnz))
1005 
1006 extern PetscErrorCode  MatMPIBAIJSetPreallocation(Mat,PetscInt,PetscInt,const PetscInt[],PetscInt,const PetscInt[]);
1007 PetscPolymorphicSubroutine(MatMPIBAIJSetPreallocation,(Mat A,PetscInt bs,const PetscInt nnz[],const PetscInt onz[]),(A,bs,0,nnz,0,onz))
1008 extern PetscErrorCode  MatMPISBAIJSetPreallocation(Mat,PetscInt,PetscInt,const PetscInt[],PetscInt,const PetscInt[]);
1009 extern PetscErrorCode  MatMPIAIJSetPreallocation(Mat,PetscInt,const PetscInt[],PetscInt,const PetscInt[]);
1010 extern PetscErrorCode  MatSeqAIJSetPreallocationCSR(Mat,const PetscInt [],const PetscInt [],const PetscScalar []);
1011 extern PetscErrorCode  MatSeqBAIJSetPreallocationCSR(Mat,PetscInt,const PetscInt[],const PetscInt[],const PetscScalar[]);
1012 extern PetscErrorCode  MatMPIAIJSetPreallocationCSR(Mat,const PetscInt[],const PetscInt[],const PetscScalar[]);
1013 extern PetscErrorCode  MatMPIBAIJSetPreallocationCSR(Mat,PetscInt,const PetscInt[],const PetscInt[],const PetscScalar[]);
1014 extern PetscErrorCode  MatMPIAdjSetPreallocation(Mat,PetscInt[],PetscInt[],PetscInt[]);
1015 extern PetscErrorCode  MatMPIDenseSetPreallocation(Mat,PetscScalar[]);
1016 extern PetscErrorCode  MatSeqDenseSetPreallocation(Mat,PetscScalar[]);
1017 extern PetscErrorCode  MatMPIAIJGetSeqAIJ(Mat,Mat*,Mat*,PetscInt*[]);
1018 extern PetscErrorCode  MatMPIBAIJGetSeqBAIJ(Mat,Mat*,Mat*,PetscInt*[]);
1019 extern PetscErrorCode  MatAdicSetLocalFunction(Mat,void (*)(void));
1020 
1021 extern PetscErrorCode  MatSeqDenseSetLDA(Mat,PetscInt);
1022 extern PetscErrorCode  MatDenseGetLocalMatrix(Mat,Mat*);
1023 
1024 extern PetscErrorCode  MatStoreValues(Mat);
1025 extern PetscErrorCode  MatRetrieveValues(Mat);
1026 
1027 extern PetscErrorCode  MatDAADSetCtx(Mat,void*);
1028 
1029 /*
1030   These routines are not usually accessed directly, rather solving is
1031   done through the KSP and PC interfaces.
1032 */
1033 
1034 /*E
1035     MatOrderingType - String with the name of a PETSc matrix ordering or the creation function
1036        with an optional dynamic library name, for example
1037        http://www.mcs.anl.gov/petsc/lib.a:orderingcreate()
1038 
1039    Level: beginner
1040 
1041    Cannot use const because the PC objects manipulate the string
1042 
1043 .seealso: MatGetOrdering()
1044 E*/
1045 #define MatOrderingType char*
1046 #define MATORDERINGNATURAL     "natural"
1047 #define MATORDERINGND          "nd"
1048 #define MATORDERING1WD         "1wd"
1049 #define MATORDERINGRCM         "rcm"
1050 #define MATORDERINGQMD         "qmd"
1051 #define MATORDERINGROWLENGTH   "rowlength"
1052 #define MATORDERINGDSC_ND      "dsc_nd"         /* these three are only for DSCPACK, see its documentation for details */
1053 #define MATORDERINGDSC_MMD     "dsc_mmd"
1054 #define MATORDERINGDSC_MDF     "dsc_mdf"
1055 #define MATORDERINGAMD         "amd"            /* only works if UMFPACK is installed with PETSc */
1056 
1057 extern PetscErrorCode  MatGetOrdering(Mat,const MatOrderingType,IS*,IS*);
1058 extern PetscErrorCode  MatGetOrderingList(PetscFList *list);
1059 extern PetscErrorCode  MatOrderingRegister(const char[],const char[],const char[],PetscErrorCode(*)(Mat,const MatOrderingType,IS*,IS*));
1060 
1061 /*MC
1062    MatOrderingRegisterDynamic - Adds a new sparse matrix ordering to the matrix package.
1063 
1064    Synopsis:
1065    PetscErrorCode MatOrderingRegisterDynamic(const char *name_ordering,const char *path,const char *name_create,PetscErrorCode (*routine_create)(MatOrdering))
1066 
1067    Not Collective
1068 
1069    Input Parameters:
1070 +  sname - name of ordering (for example MATORDERINGND)
1071 .  path - location of library where creation routine is
1072 .  name - name of function that creates the ordering type,a string
1073 -  function - function pointer that creates the ordering
1074 
1075    Level: developer
1076 
1077    If dynamic libraries are used, then the fourth input argument (function)
1078    is ignored.
1079 
1080    Sample usage:
1081 .vb
1082    MatOrderingRegisterDynamic("my_order",/home/username/my_lib/lib/libO/solaris/mylib.a,
1083                "MyOrder",MyOrder);
1084 .ve
1085 
1086    Then, your partitioner can be chosen with the procedural interface via
1087 $     MatOrderingSetType(part,"my_order)
1088    or at runtime via the option
1089 $     -pc_factor_mat_ordering_type my_order
1090 
1091    ${PETSC_ARCH} occuring in pathname will be replaced with appropriate values.
1092 
1093 .keywords: matrix, ordering, register
1094 
1095 .seealso: MatOrderingRegisterDestroy(), MatOrderingRegisterAll()
1096 M*/
1097 #if defined(PETSC_USE_DYNAMIC_LIBRARIES)
1098 #define MatOrderingRegisterDynamic(a,b,c,d) MatOrderingRegister(a,b,c,0)
1099 #else
1100 #define MatOrderingRegisterDynamic(a,b,c,d) MatOrderingRegister(a,b,c,d)
1101 #endif
1102 
1103 extern PetscErrorCode  MatOrderingRegisterDestroy(void);
1104 extern PetscErrorCode  MatOrderingRegisterAll(const char[]);
1105 extern PetscBool  MatOrderingRegisterAllCalled;
1106 extern PetscFList MatOrderingList;
1107 
1108 extern PetscErrorCode  MatReorderForNonzeroDiagonal(Mat,PetscReal,IS,IS);
1109 
1110 /*S
1111     MatFactorShiftType - Numeric Shift.
1112 
1113    Level: beginner
1114 
1115 S*/
1116 typedef enum {MAT_SHIFT_NONE,MAT_SHIFT_NONZERO,MAT_SHIFT_POSITIVE_DEFINITE,MAT_SHIFT_INBLOCKS} MatFactorShiftType;
1117 extern const char *MatFactorShiftTypes[];
1118 
1119 /*S
1120    MatFactorInfo - Data passed into the matrix factorization routines
1121 
1122    In Fortran these are simply double precision arrays of size MAT_FACTORINFO_SIZE, that is use
1123 $     MatFactorInfo  info(MAT_FACTORINFO_SIZE)
1124 
1125    Notes: These are not usually directly used by users, instead use PC type of LU, ILU, CHOLESKY or ICC.
1126 
1127       You can use MatFactorInfoInitialize() to set default values.
1128 
1129    Level: developer
1130 
1131 .seealso: MatLUFactorSymbolic(), MatILUFactorSymbolic(), MatCholeskyFactorSymbolic(), MatICCFactorSymbolic(), MatICCFactor(),
1132           MatFactorInfoInitialize()
1133 
1134 S*/
1135 typedef struct {
1136   PetscReal     diagonal_fill;  /* force diagonal to fill in if initially not filled */
1137   PetscReal     usedt;
1138   PetscReal     dt;             /* drop tolerance */
1139   PetscReal     dtcol;          /* tolerance for pivoting */
1140   PetscReal     dtcount;        /* maximum nonzeros to be allowed per row */
1141   PetscReal     fill;           /* expected fill, nonzeros in factored matrix/nonzeros in original matrix */
1142   PetscReal     levels;         /* ICC/ILU(levels) */
1143   PetscReal     pivotinblocks;  /* for BAIJ and SBAIJ matrices pivot in factorization on blocks, default 1.0
1144                                    factorization may be faster if do not pivot */
1145   PetscReal     zeropivot;      /* pivot is called zero if less than this */
1146   PetscReal     shifttype;      /* type of shift added to matrix factor to prevent zero pivots */
1147   PetscReal     shiftamount;     /* how large the shift is */
1148 } MatFactorInfo;
1149 
1150 extern PetscErrorCode  MatFactorInfoInitialize(MatFactorInfo*);
1151 extern PetscErrorCode  MatCholeskyFactor(Mat,IS,const MatFactorInfo*);
1152 extern PetscErrorCode  MatCholeskyFactorSymbolic(Mat,Mat,IS,const MatFactorInfo*);
1153 extern PetscErrorCode  MatCholeskyFactorNumeric(Mat,Mat,const MatFactorInfo*);
1154 extern PetscErrorCode  MatLUFactor(Mat,IS,IS,const MatFactorInfo*);
1155 extern PetscErrorCode  MatILUFactor(Mat,IS,IS,const MatFactorInfo*);
1156 extern PetscErrorCode  MatLUFactorSymbolic(Mat,Mat,IS,IS,const MatFactorInfo*);
1157 extern PetscErrorCode  MatILUFactorSymbolic(Mat,Mat,IS,IS,const MatFactorInfo*);
1158 extern PetscErrorCode  MatICCFactorSymbolic(Mat,Mat,IS,const MatFactorInfo*);
1159 extern PetscErrorCode  MatICCFactor(Mat,IS,const MatFactorInfo*);
1160 extern PetscErrorCode  MatLUFactorNumeric(Mat,Mat,const MatFactorInfo*);
1161 extern PetscErrorCode  MatGetInertia(Mat,PetscInt*,PetscInt*,PetscInt*);
1162 extern PetscErrorCode  MatSolve(Mat,Vec,Vec);
1163 extern PetscErrorCode  MatForwardSolve(Mat,Vec,Vec);
1164 extern PetscErrorCode  MatBackwardSolve(Mat,Vec,Vec);
1165 extern PetscErrorCode  MatSolveAdd(Mat,Vec,Vec,Vec);
1166 extern PetscErrorCode  MatSolveTranspose(Mat,Vec,Vec);
1167 extern PetscErrorCode  MatSolveTransposeAdd(Mat,Vec,Vec,Vec);
1168 extern PetscErrorCode  MatSolves(Mat,Vecs,Vecs);
1169 
1170 extern PetscErrorCode  MatSetUnfactored(Mat);
1171 
1172 /*E
1173     MatSORType - What type of (S)SOR to perform
1174 
1175     Level: beginner
1176 
1177    May be bitwise ORd together
1178 
1179    Any additions/changes here MUST also be made in include/finclude/petscmat.h
1180 
1181    MatSORType may be bitwise ORd together, so do not change the numbers
1182 
1183 .seealso: MatSOR()
1184 E*/
1185 typedef enum {SOR_FORWARD_SWEEP=1,SOR_BACKWARD_SWEEP=2,SOR_SYMMETRIC_SWEEP=3,
1186               SOR_LOCAL_FORWARD_SWEEP=4,SOR_LOCAL_BACKWARD_SWEEP=8,
1187               SOR_LOCAL_SYMMETRIC_SWEEP=12,SOR_ZERO_INITIAL_GUESS=16,
1188               SOR_EISENSTAT=32,SOR_APPLY_UPPER=64,SOR_APPLY_LOWER=128} MatSORType;
1189 extern PetscErrorCode  MatSOR(Mat,Vec,PetscReal,MatSORType,PetscReal,PetscInt,PetscInt,Vec);
1190 
1191 /*
1192     These routines are for efficiently computing Jacobians via finite differences.
1193 */
1194 
1195 /*E
1196     MatColoringType - String with the name of a PETSc matrix coloring or the creation function
1197        with an optional dynamic library name, for example
1198        http://www.mcs.anl.gov/petsc/lib.a:coloringcreate()
1199 
1200    Level: beginner
1201 
1202 .seealso: MatGetColoring()
1203 E*/
1204 #define MatColoringType char*
1205 #define MATCOLORINGNATURAL "natural"
1206 #define MATCOLORINGSL      "sl"
1207 #define MATCOLORINGLF      "lf"
1208 #define MATCOLORINGID      "id"
1209 
1210 extern PetscErrorCode  MatGetColoring(Mat,const MatColoringType,ISColoring*);
1211 extern PetscErrorCode  MatColoringRegister(const char[],const char[],const char[],PetscErrorCode(*)(Mat,MatColoringType,ISColoring *));
1212 
1213 /*MC
1214    MatColoringRegisterDynamic - Adds a new sparse matrix coloring to the
1215                                matrix package.
1216 
1217    Synopsis:
1218    PetscErrorCode MatColoringRegisterDynamic(const char *name_coloring,const char *path,const char *name_create,PetscErrorCode (*routine_create)(MatColoring))
1219 
1220    Not Collective
1221 
1222    Input Parameters:
1223 +  sname - name of Coloring (for example MATCOLORINGSL)
1224 .  path - location of library where creation routine is
1225 .  name - name of function that creates the Coloring type, a string
1226 -  function - function pointer that creates the coloring
1227 
1228    Level: developer
1229 
1230    If dynamic libraries are used, then the fourth input argument (function)
1231    is ignored.
1232 
1233    Sample usage:
1234 .vb
1235    MatColoringRegisterDynamic("my_color",/home/username/my_lib/lib/libO/solaris/mylib.a,
1236                "MyColor",MyColor);
1237 .ve
1238 
1239    Then, your partitioner can be chosen with the procedural interface via
1240 $     MatColoringSetType(part,"my_color")
1241    or at runtime via the option
1242 $     -mat_coloring_type my_color
1243 
1244    $PETSC_ARCH occuring in pathname will be replaced with appropriate values.
1245 
1246 .keywords: matrix, Coloring, register
1247 
1248 .seealso: MatColoringRegisterDestroy(), MatColoringRegisterAll()
1249 M*/
1250 #if defined(PETSC_USE_DYNAMIC_LIBRARIES)
1251 #define MatColoringRegisterDynamic(a,b,c,d) MatColoringRegister(a,b,c,0)
1252 #else
1253 #define MatColoringRegisterDynamic(a,b,c,d) MatColoringRegister(a,b,c,d)
1254 #endif
1255 
1256 extern PetscBool  MatColoringRegisterAllCalled;
1257 
1258 extern PetscErrorCode  MatColoringRegisterAll(const char[]);
1259 extern PetscErrorCode  MatColoringRegisterDestroy(void);
1260 extern PetscErrorCode  MatColoringPatch(Mat,PetscInt,PetscInt,ISColoringValue[],ISColoring*);
1261 
1262 /*S
1263      MatFDColoring - Object for computing a sparse Jacobian via finite differences
1264         and coloring
1265 
1266    Level: beginner
1267 
1268   Concepts: coloring, sparse Jacobian, finite differences
1269 
1270 .seealso:  MatFDColoringCreate()
1271 S*/
1272 typedef struct _p_MatFDColoring* MatFDColoring;
1273 
1274 extern PetscErrorCode  MatFDColoringCreate(Mat,ISColoring,MatFDColoring *);
1275 extern PetscErrorCode  MatFDColoringDestroy(MatFDColoring);
1276 extern PetscErrorCode  MatFDColoringView(MatFDColoring,PetscViewer);
1277 extern PetscErrorCode  MatFDColoringSetFunction(MatFDColoring,PetscErrorCode (*)(void),void*);
1278 extern PetscErrorCode  MatFDColoringGetFunction(MatFDColoring,PetscErrorCode (**)(void),void**);
1279 extern PetscErrorCode  MatFDColoringSetParameters(MatFDColoring,PetscReal,PetscReal);
1280 extern PetscErrorCode  MatFDColoringSetFromOptions(MatFDColoring);
1281 extern PetscErrorCode  MatFDColoringApply(Mat,MatFDColoring,Vec,MatStructure*,void *);
1282 extern PetscErrorCode  MatFDColoringApplyTS(Mat,MatFDColoring,PetscReal,Vec,MatStructure*,void *);
1283 extern PetscErrorCode  MatFDColoringSetF(MatFDColoring,Vec);
1284 extern PetscErrorCode  MatFDColoringGetPerturbedColumns(MatFDColoring,PetscInt*,PetscInt*[]);
1285 /*
1286     These routines are for partitioning matrices: currently used only
1287   for adjacency matrix, MatCreateMPIAdj().
1288 */
1289 
1290 /*S
1291      MatPartitioning - Object for managing the partitioning of a matrix or graph
1292 
1293    Level: beginner
1294 
1295   Concepts: partitioning
1296 
1297 .seealso:  MatPartitioningCreate(), MatPartitioningType
1298 S*/
1299 typedef struct _p_MatPartitioning* MatPartitioning;
1300 
1301 /*E
1302     MatPartitioningType - String with the name of a PETSc matrix partitioning or the creation function
1303        with an optional dynamic library name, for example
1304        http://www.mcs.anl.gov/petsc/lib.a:partitioningcreate()
1305 
1306    Level: beginner
1307 
1308 .seealso: MatPartitioningCreate(), MatPartitioning
1309 E*/
1310 #define MatPartitioningType char*
1311 #define MATPARTITIONINGCURRENT  "current"
1312 #define MATPARTITIONINGSQUARE   "square"
1313 #define MATPARTITIONINGPARMETIS "parmetis"
1314 #define MATPARTITIONINGCHACO    "chaco"
1315 #define MATPARTITIONINGJOSTLE   "jostle"
1316 #define MATPARTITIONINGPARTY    "party"
1317 #define MATPARTITIONINGSCOTCH   "scotch"
1318 
1319 
1320 extern PetscErrorCode  MatPartitioningCreate(MPI_Comm,MatPartitioning*);
1321 extern PetscErrorCode  MatPartitioningSetType(MatPartitioning,const MatPartitioningType);
1322 extern PetscErrorCode  MatPartitioningSetNParts(MatPartitioning,PetscInt);
1323 extern PetscErrorCode  MatPartitioningSetAdjacency(MatPartitioning,Mat);
1324 extern PetscErrorCode  MatPartitioningSetVertexWeights(MatPartitioning,const PetscInt[]);
1325 extern PetscErrorCode  MatPartitioningSetPartitionWeights(MatPartitioning,const PetscReal []);
1326 extern PetscErrorCode  MatPartitioningApply(MatPartitioning,IS*);
1327 extern PetscErrorCode  MatPartitioningDestroy(MatPartitioning);
1328 
1329 extern PetscErrorCode  MatPartitioningRegister(const char[],const char[],const char[],PetscErrorCode (*)(MatPartitioning));
1330 
1331 /*MC
1332    MatPartitioningRegisterDynamic - Adds a new sparse matrix partitioning to the
1333    matrix package.
1334 
1335    Synopsis:
1336    PetscErrorCode MatPartitioningRegisterDynamic(const char *name_partitioning,const char *path,const char *name_create,PetscErrorCode (*routine_create)(MatPartitioning))
1337 
1338    Not Collective
1339 
1340    Input Parameters:
1341 +  sname - name of partitioning (for example MATPARTITIONINGCURRENT) or parmetis
1342 .  path - location of library where creation routine is
1343 .  name - name of function that creates the partitioning type, a string
1344 -  function - function pointer that creates the partitioning type
1345 
1346    Level: developer
1347 
1348    If dynamic libraries are used, then the fourth input argument (function)
1349    is ignored.
1350 
1351    Sample usage:
1352 .vb
1353    MatPartitioningRegisterDynamic("my_part",/home/username/my_lib/lib/libO/solaris/mylib.a,
1354                "MyPartCreate",MyPartCreate);
1355 .ve
1356 
1357    Then, your partitioner can be chosen with the procedural interface via
1358 $     MatPartitioningSetType(part,"my_part")
1359    or at runtime via the option
1360 $     -mat_partitioning_type my_part
1361 
1362    $PETSC_ARCH occuring in pathname will be replaced with appropriate values.
1363 
1364 .keywords: matrix, partitioning, register
1365 
1366 .seealso: MatPartitioningRegisterDestroy(), MatPartitioningRegisterAll()
1367 M*/
1368 #if defined(PETSC_USE_DYNAMIC_LIBRARIES)
1369 #define MatPartitioningRegisterDynamic(a,b,c,d) MatPartitioningRegister(a,b,c,0)
1370 #else
1371 #define MatPartitioningRegisterDynamic(a,b,c,d) MatPartitioningRegister(a,b,c,d)
1372 #endif
1373 
1374 extern PetscBool  MatPartitioningRegisterAllCalled;
1375 
1376 extern PetscErrorCode  MatPartitioningRegisterAll(const char[]);
1377 extern PetscErrorCode  MatPartitioningRegisterDestroy(void);
1378 
1379 extern PetscErrorCode  MatPartitioningView(MatPartitioning,PetscViewer);
1380 extern PetscErrorCode  MatPartitioningSetFromOptions(MatPartitioning);
1381 extern PetscErrorCode  MatPartitioningGetType(MatPartitioning,const MatPartitioningType*);
1382 
1383 extern PetscErrorCode  MatPartitioningParmetisSetCoarseSequential(MatPartitioning);
1384 extern PetscErrorCode  MatPartitioningParmetisGetEdgeCut(MatPartitioning, PetscInt *);
1385 
1386 extern PetscErrorCode  MatPartitioningJostleSetCoarseLevel(MatPartitioning,PetscReal);
1387 extern PetscErrorCode  MatPartitioningJostleSetCoarseSequential(MatPartitioning);
1388 
1389 typedef enum {MP_CHACO_MULTILEVEL_KL,MP_CHACO_SPECTRAL,MP_CHACO_LINEAR,MP_CHACO_RANDOM, MP_CHACO_SCATTERED} MPChacoGlobalType;
1390 extern PetscErrorCode  MatPartitioningChacoSetGlobal(MatPartitioning, MPChacoGlobalType);
1391 typedef enum { MP_CHACO_KERNIGHAN_LIN, MP_CHACO_NONE } MPChacoLocalType;
1392 extern PetscErrorCode  MatPartitioningChacoSetLocal(MatPartitioning, MPChacoLocalType);
1393 extern PetscErrorCode  MatPartitioningChacoSetCoarseLevel(MatPartitioning,PetscReal);
1394 typedef enum { MP_CHACO_LANCZOS, MP_CHACO_RQI_SYMMLQ } MPChacoEigenType;
1395 extern PetscErrorCode  MatPartitioningChacoSetEigenSolver(MatPartitioning,MPChacoEigenType);
1396 extern PetscErrorCode  MatPartitioningChacoSetEigenTol(MatPartitioning, PetscReal);
1397 extern PetscErrorCode  MatPartitioningChacoSetEigenNumber(MatPartitioning, PetscInt);
1398 
1399 #define MP_PARTY_OPT "opt"
1400 #define MP_PARTY_LIN "lin"
1401 #define MP_PARTY_SCA "sca"
1402 #define MP_PARTY_RAN "ran"
1403 #define MP_PARTY_GBF "gbf"
1404 #define MP_PARTY_GCF "gcf"
1405 #define MP_PARTY_BUB "bub"
1406 #define MP_PARTY_DEF "def"
1407 extern PetscErrorCode  MatPartitioningPartySetGlobal(MatPartitioning, const char*);
1408 #define MP_PARTY_HELPFUL_SETS "hs"
1409 #define MP_PARTY_KERNIGHAN_LIN "kl"
1410 #define MP_PARTY_NONE "no"
1411 extern PetscErrorCode  MatPartitioningPartySetLocal(MatPartitioning, const char*);
1412 extern PetscErrorCode  MatPartitioningPartySetCoarseLevel(MatPartitioning,PetscReal);
1413 extern PetscErrorCode  MatPartitioningPartySetBipart(MatPartitioning,PetscBool );
1414 extern PetscErrorCode  MatPartitioningPartySetMatchOptimization(MatPartitioning,PetscBool );
1415 
1416 typedef enum { MP_SCOTCH_GREEDY, MP_SCOTCH_GPS, MP_SCOTCH_GR_GPS } MPScotchGlobalType;
1417 extern PetscErrorCode  MatPartitioningScotchSetArch(MatPartitioning,const char*);
1418 extern PetscErrorCode  MatPartitioningScotchSetMultilevel(MatPartitioning);
1419 extern PetscErrorCode  MatPartitioningScotchSetGlobal(MatPartitioning,MPScotchGlobalType);
1420 extern PetscErrorCode  MatPartitioningScotchSetCoarseLevel(MatPartitioning,PetscReal);
1421 extern PetscErrorCode  MatPartitioningScotchSetHostList(MatPartitioning,const char*);
1422 typedef enum { MP_SCOTCH_KERNIGHAN_LIN, MP_SCOTCH_NONE } MPScotchLocalType;
1423 extern PetscErrorCode  MatPartitioningScotchSetLocal(MatPartitioning,MPScotchLocalType);
1424 extern PetscErrorCode  MatPartitioningScotchSetMapping(MatPartitioning);
1425 extern PetscErrorCode  MatPartitioningScotchSetStrategy(MatPartitioning,char*);
1426 
1427 extern PetscErrorCode MatMeshToVertexGraph(Mat,PetscInt,Mat*);
1428 extern PetscErrorCode MatMeshToCellGraph(Mat,PetscInt,Mat*);
1429 
1430 /*
1431     If you add entries here you must also add them to finclude/petscmat.h
1432 */
1433 typedef enum { MATOP_SET_VALUES=0,
1434                MATOP_GET_ROW=1,
1435                MATOP_RESTORE_ROW=2,
1436                MATOP_MULT=3,
1437                MATOP_MULT_ADD=4,
1438                MATOP_MULT_TRANSPOSE=5,
1439                MATOP_MULT_TRANSPOSE_ADD=6,
1440                MATOP_SOLVE=7,
1441                MATOP_SOLVE_ADD=8,
1442                MATOP_SOLVE_TRANSPOSE=9,
1443                MATOP_SOLVE_TRANSPOSE_ADD=10,
1444                MATOP_LUFACTOR=11,
1445                MATOP_CHOLESKYFACTOR=12,
1446                MATOP_SOR=13,
1447                MATOP_TRANSPOSE=14,
1448                MATOP_GETINFO=15,
1449                MATOP_EQUAL=16,
1450                MATOP_GET_DIAGONAL=17,
1451                MATOP_DIAGONAL_SCALE=18,
1452                MATOP_NORM=19,
1453                MATOP_ASSEMBLY_BEGIN=20,
1454                MATOP_ASSEMBLY_END=21,
1455                MATOP_SET_OPTION=22,
1456                MATOP_ZERO_ENTRIES=23,
1457                MATOP_ZERO_ROWS=24,
1458                MATOP_LUFACTOR_SYMBOLIC=25,
1459                MATOP_LUFACTOR_NUMERIC=26,
1460                MATOP_CHOLESKY_FACTOR_SYMBOLIC=27,
1461                MATOP_CHOLESKY_FACTOR_NUMERIC=28,
1462                MATOP_SETUP_PREALLOCATION=29,
1463                MATOP_ILUFACTOR_SYMBOLIC=30,
1464                MATOP_ICCFACTOR_SYMBOLIC=31,
1465                MATOP_GET_ARRAY=32,
1466                MATOP_RESTORE_ARRAY=33,
1467                MATOP_DUPLICATE=34,
1468                MATOP_FORWARD_SOLVE=35,
1469                MATOP_BACKWARD_SOLVE=36,
1470                MATOP_ILUFACTOR=37,
1471                MATOP_ICCFACTOR=38,
1472                MATOP_AXPY=39,
1473                MATOP_GET_SUBMATRICES=40,
1474                MATOP_INCREASE_OVERLAP=41,
1475                MATOP_GET_VALUES=42,
1476                MATOP_COPY=43,
1477                MATOP_GET_ROW_MAX=44,
1478                MATOP_SCALE=45,
1479                MATOP_SHIFT=46,
1480                MATOP_DIAGONAL_SET=47,
1481                MATOP_ILUDT_FACTOR=48,
1482                MATOP_SET_BLOCK_SIZE=49,
1483                MATOP_GET_ROW_IJ=50,
1484                MATOP_RESTORE_ROW_IJ=51,
1485                MATOP_GET_COLUMN_IJ=52,
1486                MATOP_RESTORE_COLUMN_IJ=53,
1487                MATOP_FDCOLORING_CREATE=54,
1488                MATOP_COLORING_PATCH=55,
1489                MATOP_SET_UNFACTORED=56,
1490                MATOP_PERMUTE=57,
1491                MATOP_SET_VALUES_BLOCKED=58,
1492                MATOP_GET_SUBMATRIX=59,
1493                MATOP_DESTROY=60,
1494                MATOP_VIEW=61,
1495                MATOP_CONVERT_FROM=62,
1496                MATOP_USE_SCALED_FORM=63,
1497                MATOP_SCALE_SYSTEM=64,
1498                MATOP_UNSCALE_SYSTEM=65,
1499                MATOP_SET_LOCAL_TO_GLOBAL_MAP=66,
1500                MATOP_SET_VALUES_LOCAL=67,
1501                MATOP_ZERO_ROWS_LOCAL=68,
1502                MATOP_GET_ROW_MAX_ABS=69,
1503                MATOP_GET_ROW_MIN_ABS=70,
1504                MATOP_CONVERT=71,
1505                MATOP_SET_COLORING=72,
1506                MATOP_SET_VALUES_ADIC=73,
1507                MATOP_SET_VALUES_ADIFOR=74,
1508                MATOP_FD_COLORING_APPLY=75,
1509                MATOP_SET_FROM_OPTIONS=76,
1510                MATOP_MULT_CON=77,
1511                MATOP_MULT_TRANSPOSE_CON=78,
1512                MATOP_PERMUTE_SPARSIFY=79,
1513                MATOP_MULT_MULTIPLE=80,
1514                MATOP_SOLVE_MULTIPLE=81,
1515                MATOP_GET_INERTIA=82,
1516                MATOP_LOAD=83,
1517                MATOP_IS_SYMMETRIC=84,
1518                MATOP_IS_HERMITIAN=85,
1519                MATOP_IS_STRUCTURALLY_SYMMETRIC=86,
1520                MATOP_DUMMY=87,
1521                MATOP_GET_VECS=88,
1522                MATOP_MAT_MULT=89,
1523                MATOP_MAT_MULT_SYMBOLIC=90,
1524                MATOP_MAT_MULT_NUMERIC=91,
1525                MATOP_PTAP=92,
1526                MATOP_PTAP_SYMBOLIC=93,
1527                MATOP_PTAP_NUMERIC=94,
1528                MATOP_MAT_MULTTRANSPOSE=95,
1529                MATOP_MAT_MULTTRANSPOSE_SYM=96,
1530                MATOP_MAT_MULTTRANSPOSE_NUM=97,
1531                MATOP_PTAP_SYMBOLIC_SEQAIJ=98,
1532                MATOP_PTAP_NUMERIC_SEQAIJ=99,
1533                MATOP_PTAP_SYMBOLIC_MPIAIJ=100,
1534                MATOP_PTAP_NUMERIC_MPIAIJ=101,
1535                MATOP_CONJUGATE=102,
1536                MATOP_SET_SIZES=103,
1537                MATOP_SET_VALUES_ROW=104,
1538                MATOP_REAL_PART=105,
1539                MATOP_IMAG_PART=106,
1540                MATOP_GET_ROW_UTRIANGULAR=107,
1541                MATOP_RESTORE_ROW_UTRIANGULAR=108,
1542                MATOP_MATSOLVE=109,
1543                MATOP_GET_REDUNDANTMATRIX=110,
1544                MATOP_GET_ROW_MIN=111,
1545                MATOP_GET_COLUMN_VEC=112,
1546                MATOP_MISSING_DIAGONAL=113,
1547                MATOP_MATGETSEQNONZEROSTRUCTURE=114,
1548                MATOP_CREATE=115,
1549                MATOP_GET_GHOSTS=116,
1550                MATOP_GET_LOCALSUBMATRIX=117,
1551                MATOP_RESTORE_LOCALSUBMATRIX=118,
1552                MATOP_MULT_DIAGONAL_BLOCK=119,
1553                MATOP_HERMITIANTRANSPOSE=120,
1554                MATOP_MULTHERMITIANTRANSPOSE=121,
1555                MATOP_MULTHERMITIANTRANSPOSEADD=122,
1556                MATOP_GETMULTIPROCBLOCK=123,
1557 	       MATOP_GET_SUBMATRICES_PARALLEL=128
1558              } MatOperation;
1559 extern PetscErrorCode  MatHasOperation(Mat,MatOperation,PetscBool *);
1560 extern PetscErrorCode  MatShellSetOperation(Mat,MatOperation,void(*)(void));
1561 extern PetscErrorCode  MatShellGetOperation(Mat,MatOperation,void(**)(void));
1562 extern PetscErrorCode  MatShellSetContext(Mat,void*);
1563 
1564 /*
1565    Codes for matrices stored on disk. By default they are
1566    stored in a universal format. By changing the format with
1567    PetscViewerSetFormat(viewer,PETSC_VIEWER_NATIVE); the matrices will
1568    be stored in a way natural for the matrix, for example dense matrices
1569    would be stored as dense. Matrices stored this way may only be
1570    read into matrices of the same type.
1571 */
1572 #define MATRIX_BINARY_FORMAT_DENSE -1
1573 
1574 extern PetscErrorCode  MatMPIBAIJSetHashTableFactor(Mat,PetscReal);
1575 extern PetscErrorCode  MatISGetLocalMat(Mat,Mat*);
1576 
1577 /*S
1578      MatNullSpace - Object that removes a null space from a vector, i.e.
1579          orthogonalizes the vector to a subsapce
1580 
1581    Level: advanced
1582 
1583   Concepts: matrix; linear operator, null space
1584 
1585   Users manual sections:
1586 .   sec_singular
1587 
1588 .seealso:  MatNullSpaceCreate()
1589 S*/
1590 typedef struct _p_MatNullSpace* MatNullSpace;
1591 
1592 extern PetscErrorCode  MatNullSpaceCreate(MPI_Comm,PetscBool ,PetscInt,const Vec[],MatNullSpace*);
1593 extern PetscErrorCode  MatNullSpaceSetFunction(MatNullSpace,PetscErrorCode (*)(MatNullSpace,Vec,void*),void*);
1594 extern PetscErrorCode  MatNullSpaceDestroy(MatNullSpace);
1595 extern PetscErrorCode  MatNullSpaceRemove(MatNullSpace,Vec,Vec*);
1596 extern PetscErrorCode  MatNullSpaceAttach(Mat,MatNullSpace);
1597 extern PetscErrorCode  MatNullSpaceTest(MatNullSpace,Mat,PetscBool  *);
1598 
1599 extern PetscErrorCode  MatReorderingSeqSBAIJ(Mat,IS);
1600 extern PetscErrorCode  MatMPISBAIJSetHashTableFactor(Mat,PetscReal);
1601 extern PetscErrorCode  MatSeqSBAIJSetColumnIndices(Mat,PetscInt *);
1602 extern PetscErrorCode  MatSeqBAIJInvertBlockDiagonal(Mat);
1603 
1604 extern PetscErrorCode  MatCreateMAIJ(Mat,PetscInt,Mat*);
1605 extern PetscErrorCode  MatMAIJRedimension(Mat,PetscInt,Mat*);
1606 extern PetscErrorCode  MatMAIJGetAIJ(Mat,Mat*);
1607 
1608 extern PetscErrorCode  MatComputeExplicitOperator(Mat,Mat*);
1609 
1610 extern PetscErrorCode  MatDiagonalScaleLocal(Mat,Vec);
1611 
1612 extern PetscErrorCode  MatCreateMFFD(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,Mat*);
1613 extern PetscErrorCode  MatMFFDSetBase(Mat,Vec,Vec);
1614 extern PetscErrorCode  MatMFFDSetFunction(Mat,PetscErrorCode(*)(void*,Vec,Vec),void*);
1615 extern PetscErrorCode  MatMFFDSetFunctioni(Mat,PetscErrorCode (*)(void*,PetscInt,Vec,PetscScalar*));
1616 extern PetscErrorCode  MatMFFDSetFunctioniBase(Mat,PetscErrorCode (*)(void*,Vec));
1617 extern PetscErrorCode  MatMFFDAddNullSpace(Mat,MatNullSpace);
1618 extern PetscErrorCode  MatMFFDSetHHistory(Mat,PetscScalar[],PetscInt);
1619 extern PetscErrorCode  MatMFFDResetHHistory(Mat);
1620 extern PetscErrorCode  MatMFFDSetFunctionError(Mat,PetscReal);
1621 extern PetscErrorCode  MatMFFDSetPeriod(Mat,PetscInt);
1622 extern PetscErrorCode  MatMFFDGetH(Mat,PetscScalar *);
1623 extern PetscErrorCode  MatMFFDSetOptionsPrefix(Mat,const char[]);
1624 extern PetscErrorCode  MatMFFDSetFromOptions(Mat);
1625 extern PetscErrorCode  MatMFFDCheckPositivity(void*,Vec,Vec,PetscScalar*);
1626 extern PetscErrorCode  MatMFFDSetCheckh(Mat,PetscErrorCode (*)(void*,Vec,Vec,PetscScalar*),void*);
1627 
1628 /*S
1629     MatMFFD - A data structured used to manage the computation of the h differencing parameter for matrix-free
1630               Jacobian vector products
1631 
1632     Notes: MATMFFD is a specific MatType which uses the MatMFFD data structure
1633 
1634            MatMFFD*() methods actually take the Mat as their first argument. Not a MatMFFD data structure
1635 
1636     Level: developer
1637 
1638 .seealso: MATMFFD, MatCreateMFFD(), MatMFFDSetFuction(), MatMFFDSetType(), MatMFFDRegister()
1639 S*/
1640 typedef struct _p_MatMFFD* MatMFFD;
1641 
1642 /*E
1643     MatMFFDType - algorithm used to compute the h used in computing matrix-vector products via differencing of the function
1644 
1645    Level: beginner
1646 
1647 .seealso: MatMFFDSetType(), MatMFFDRegister()
1648 E*/
1649 #define MatMFFDType char*
1650 #define MATMFFD_DS  "ds"
1651 #define MATMFFD_WP  "wp"
1652 
1653 extern PetscErrorCode  MatMFFDSetType(Mat,const MatMFFDType);
1654 extern PetscErrorCode  MatMFFDRegister(const char[],const char[],const char[],PetscErrorCode (*)(MatMFFD));
1655 
1656 /*MC
1657    MatMFFDRegisterDynamic - Adds a method to the MatMFFD registry.
1658 
1659    Synopsis:
1660    PetscErrorCode MatMFFDRegisterDynamic(const char *name_solver,const char *path,const char *name_create,PetscErrorCode (*routine_create)(MatMFFD))
1661 
1662    Not Collective
1663 
1664    Input Parameters:
1665 +  name_solver - name of a new user-defined compute-h module
1666 .  path - path (either absolute or relative) the library containing this solver
1667 .  name_create - name of routine to create method context
1668 -  routine_create - routine to create method context
1669 
1670    Level: developer
1671 
1672    Notes:
1673    MatMFFDRegisterDynamic() may be called multiple times to add several user-defined solvers.
1674 
1675    If dynamic libraries are used, then the fourth input argument (routine_create)
1676    is ignored.
1677 
1678    Sample usage:
1679 .vb
1680    MatMFFDRegisterDynamic("my_h",/home/username/my_lib/lib/libO/solaris/mylib.a,
1681                "MyHCreate",MyHCreate);
1682 .ve
1683 
1684    Then, your solver can be chosen with the procedural interface via
1685 $     MatMFFDSetType(mfctx,"my_h")
1686    or at runtime via the option
1687 $     -snes_mf_type my_h
1688 
1689 .keywords: MatMFFD, register
1690 
1691 .seealso: MatMFFDRegisterAll(), MatMFFDRegisterDestroy()
1692 M*/
1693 #if defined(PETSC_USE_DYNAMIC_LIBRARIES)
1694 #define MatMFFDRegisterDynamic(a,b,c,d) MatMFFDRegister(a,b,c,0)
1695 #else
1696 #define MatMFFDRegisterDynamic(a,b,c,d) MatMFFDRegister(a,b,c,d)
1697 #endif
1698 
1699 extern PetscErrorCode  MatMFFDRegisterAll(const char[]);
1700 extern PetscErrorCode  MatMFFDRegisterDestroy(void);
1701 extern PetscErrorCode  MatMFFDDSSetUmin(Mat,PetscReal);
1702 extern PetscErrorCode  MatMFFDWPSetComputeNormU(Mat,PetscBool );
1703 
1704 
1705 extern PetscErrorCode  PetscViewerMathematicaPutMatrix(PetscViewer, PetscInt, PetscInt, PetscReal *);
1706 extern PetscErrorCode  PetscViewerMathematicaPutCSRMatrix(PetscViewer, PetscInt, PetscInt, PetscInt *, PetscInt *, PetscReal *);
1707 
1708 /*
1709    PETSc interface to MUMPS
1710 */
1711 #ifdef PETSC_HAVE_MUMPS
1712 extern PetscErrorCode  MatMumpsSetIcntl(Mat,PetscInt,PetscInt);
1713 #endif
1714 
1715 /*
1716    PETSc interface to SUPERLU
1717 */
1718 #ifdef PETSC_HAVE_SUPERLU
1719 extern PetscErrorCode  MatSuperluSetILUDropTol(Mat,PetscReal);
1720 #endif
1721 
1722 extern PetscErrorCode  MatCreateNest(MPI_Comm comm,PetscInt,const IS[],PetscInt,const IS[],const Mat[],Mat*);
1723 extern PetscErrorCode  MatNestGetSize(Mat A,PetscInt *M,PetscInt *N);
1724 extern PetscErrorCode  MatNestGetSubMats(Mat A,PetscInt *M,PetscInt *N,Mat ***mat);
1725 extern PetscErrorCode  MatNestGetSubMat(Mat A,PetscInt idxm,PetscInt jdxm,Mat *sub);
1726 extern PetscErrorCode  MatNestSetVecType(Mat,const VecType);
1727 
1728 PETSC_EXTERN_CXX_END
1729 #endif
1730