xref: /petsc/include/petscmat.h (revision bc59fbc54ac8b63d9331f83ad670091a818dfe89)
1 /*
2      Include file for the matrix component of PETSc
3 */
4 #ifndef __PETSCMAT_H
5 #define __PETSCMAT_H
6 #include "petscvec.h"
7 PETSC_EXTERN_CXX_BEGIN
8 
9 /*S
10      Mat - Abstract PETSc matrix object
11 
12    Level: beginner
13 
14   Concepts: matrix; linear operator
15 
16 .seealso:  MatCreate(), MatType, MatSetType()
17 S*/
18 typedef struct _p_Mat*           Mat;
19 
20 /*E
21     MatType - String with the name of a PETSc matrix or the creation function
22        with an optional dynamic library name, for example
23        http://www.mcs.anl.gov/petsc/lib.a:mymatcreate()
24 
25    Level: beginner
26 
27 .seealso: MatSetType(), Mat, MatSolverPackage
28 E*/
29 #define MatType char*
30 #define MATSAME            "same"
31 #define MATMAIJ            "maij"
32 #define MATSEQMAIJ           "seqmaij"
33 #define MATMPIMAIJ           "mpimaij"
34 #define MATIS              "is"
35 #define MATAIJ             "aij"
36 #define MATSEQAIJ            "seqaij"
37 #define MATMPIAIJ            "mpiaij"
38 #define MATAIJCRL              "aijcrl"
39 #define MATSEQAIJCRL             "seqaijcrl"
40 #define MATMPIAIJCRL             "mpiaijcrl"
41 #define MATAIJCUSP             "aijcusp"
42 #define MATSEQAIJCUSP            "seqaijcusp"
43 #define MATMPIAIJCUSP            "mpiaijcusp"
44 #define MATAIJPERM             "aijperm"
45 #define MATSEQAIJPERM            "seqaijperm"
46 #define MATMPIAIJPERM            "mpiaijperm"
47 #define MATSHELL           "shell"
48 #define MATDENSE           "dense"
49 #define MATSEQDENSE          "seqdense"
50 #define MATMPIDENSE          "mpidense"
51 #define MATBAIJ            "baij"
52 #define MATSEQBAIJ           "seqbaij"
53 #define MATMPIBAIJ           "mpibaij"
54 #define MATMPIADJ          "mpiadj"
55 #define MATSBAIJ           "sbaij"
56 #define MATSEQSBAIJ          "seqsbaij"
57 #define MATMPISBAIJ          "mpisbaij"
58 #define MATDAAD            "daad"
59 #define MATMFFD            "mffd"
60 #define MATNORMAL          "normal"
61 #define MATLRC             "lrc"
62 #define MATSCATTER         "scatter"
63 #define MATBLOCKMAT        "blockmat"
64 #define MATCOMPOSITE       "composite"
65 #define MATFFT             "fft"
66 #define MATFFTW              "fftw"
67 #define MATSEQCUFFT          "seqcufft"
68 #define MATTRANSPOSEMAT    "transpose"
69 #define MATSCHURCOMPLEMENT "schurcomplement"
70 #define MATPYTHON          "python"
71 #define MATHYPRESTRUCT     "hyprestruct"
72 #define MATHYPRESSTRUCT    "hypresstruct"
73 #define MATSUBMATRIX       "submatrix"
74 #define MATLOCALREF        "localref"
75 #define MATNEST            "nest"
76 
77 /*E
78     MatSolverPackage - String with the name of a PETSc matrix solver type.
79 
80     For example: "petsc" indicates what PETSc provides, "superlu" indicates either
81        SuperLU or SuperLU_Dist etc.
82 
83 
84    Level: beginner
85 
86 .seealso: MatGetFactor(), Mat, MatSetType(), MatType
87 E*/
88 #define MatSolverPackage char*
89 #define MATSOLVERSPOOLES      "spooles"
90 #define MATSOLVERSUPERLU      "superlu"
91 #define MATSOLVERSUPERLU_DIST "superlu_dist"
92 #define MATSOLVERUMFPACK      "umfpack"
93 #define MATSOLVERCHOLMOD      "cholmod"
94 #define MATSOLVERESSL         "essl"
95 #define MATSOLVERLUSOL        "lusol"
96 #define MATSOLVERMUMPS        "mumps"
97 #define MATSOLVERPASTIX       "pastix"
98 #define MATSOLVERDSCPACK      "dscpack"
99 #define MATSOLVERMATLAB       "matlab"
100 #define MATSOLVERPETSC        "petsc"
101 #define MATSOLVERPLAPACK      "plapack"
102 #define MATSOLVERBAS          "bas"
103 
104 /*E
105     MatFactorType - indicates what type of factorization is requested
106 
107     Level: beginner
108 
109    Any additions/changes here MUST also be made in include/finclude/petscmat.h
110 
111 .seealso: MatSolverPackage, MatGetFactor()
112 E*/
113 typedef enum {MAT_FACTOR_NONE, MAT_FACTOR_LU, MAT_FACTOR_CHOLESKY, MAT_FACTOR_ILU, MAT_FACTOR_ICC,MAT_FACTOR_ILUDT} MatFactorType;
114 extern const char *const MatFactorTypes[];
115 
116 extern PetscErrorCode  MatGetFactor(Mat,const MatSolverPackage,MatFactorType,Mat*);
117 extern PetscErrorCode  MatGetFactorAvailable(Mat,const MatSolverPackage,MatFactorType,PetscBool *);
118 extern PetscErrorCode  MatFactorGetSolverPackage(Mat,const MatSolverPackage*);
119 extern PetscErrorCode  MatGetFactorType(Mat,MatFactorType*);
120 
121 /* Logging support */
122 #define    MAT_FILE_CLASSID 1211216    /* used to indicate matrices in binary files */
123 extern PetscClassId  MAT_CLASSID;
124 extern PetscClassId  MAT_FDCOLORING_CLASSID;
125 extern PetscClassId  MAT_PARTITIONING_CLASSID;
126 extern PetscClassId  MAT_NULLSPACE_CLASSID;
127 extern PetscClassId  MATMFFD_CLASSID;
128 
129 /*E
130     MatReuse - Indicates if matrices obtained from a previous call to MatGetSubMatrices()
131      or MatGetSubMatrix() are to be reused to store the new matrix values. For MatConvert() is used to indicate
132      that the input matrix is to be replaced with the converted matrix.
133 
134     Level: beginner
135 
136    Any additions/changes here MUST also be made in include/finclude/petscmat.h
137 
138 .seealso: MatGetSubMatrices(), MatGetSubMatrix(), MatDestroyMatrices(), MatConvert()
139 E*/
140 typedef enum {MAT_INITIAL_MATRIX,MAT_REUSE_MATRIX,MAT_IGNORE_MATRIX} MatReuse;
141 
142 /*E
143     MatGetSubMatrixOption - Indicates if matrices obtained from a call to MatGetSubMatrices()
144      include the matrix values. Currently it is only used by MatGetSeqNonzerostructure().
145 
146     Level: beginner
147 
148 .seealso: MatGetSeqNonzerostructure()
149 E*/
150 typedef enum {MAT_DO_NOT_GET_VALUES,MAT_GET_VALUES} MatGetSubMatrixOption;
151 
152 extern PetscErrorCode  MatInitializePackage(const char[]);
153 
154 extern PetscErrorCode  MatCreate(MPI_Comm,Mat*);
155 PetscPolymorphicFunction(MatCreate,(MPI_Comm comm),(comm,&A),Mat,A)
156 PetscPolymorphicFunction(MatCreate,(),(PETSC_COMM_WORLD,&A),Mat,A)
157 extern PetscErrorCode  MatSetSizes(Mat,PetscInt,PetscInt,PetscInt,PetscInt);
158 extern PetscErrorCode  MatSetType(Mat,const MatType);
159 extern PetscErrorCode  MatSetFromOptions(Mat);
160 extern PetscErrorCode  MatSetUpPreallocation(Mat);
161 extern PetscErrorCode  MatRegisterAll(const char[]);
162 extern PetscErrorCode  MatRegister(const char[],const char[],const char[],PetscErrorCode(*)(Mat));
163 
164 extern PetscErrorCode  MatSetOptionsPrefix(Mat,const char[]);
165 extern PetscErrorCode  MatAppendOptionsPrefix(Mat,const char[]);
166 extern PetscErrorCode  MatGetOptionsPrefix(Mat,const char*[]);
167 
168 /*MC
169    MatRegisterDynamic - Adds a new matrix type
170 
171    Synopsis:
172    PetscErrorCode MatRegisterDynamic(const char *name,const char *path,const char *name_create,PetscErrorCode (*routine_create)(Mat))
173 
174    Not Collective
175 
176    Input Parameters:
177 +  name - name of a new user-defined matrix type
178 .  path - path (either absolute or relative) the library containing this solver
179 .  name_create - name of routine to create method context
180 -  routine_create - routine to create method context
181 
182    Notes:
183    MatRegisterDynamic() may be called multiple times to add several user-defined solvers.
184 
185    If dynamic libraries are used, then the fourth input argument (routine_create)
186    is ignored.
187 
188    Sample usage:
189 .vb
190    MatRegisterDynamic("my_mat",/home/username/my_lib/lib/libO/solaris/mylib.a,
191                "MyMatCreate",MyMatCreate);
192 .ve
193 
194    Then, your solver can be chosen with the procedural interface via
195 $     MatSetType(Mat,"my_mat")
196    or at runtime via the option
197 $     -mat_type my_mat
198 
199    Level: advanced
200 
201    Notes: ${PETSC_ARCH} occuring in pathname will be replaced with appropriate values.
202          If your function is not being put into a shared library then use VecRegister() instead
203 
204 .keywords: Mat, register
205 
206 .seealso: MatRegisterAll(), MatRegisterDestroy()
207 
208 M*/
209 #if defined(PETSC_USE_DYNAMIC_LIBRARIES)
210 #define MatRegisterDynamic(a,b,c,d) MatRegister(a,b,c,0)
211 #else
212 #define MatRegisterDynamic(a,b,c,d) MatRegister(a,b,c,d)
213 #endif
214 
215 extern PetscBool  MatRegisterAllCalled;
216 extern PetscFList MatList;
217 extern PetscFList MatColoringList;
218 extern PetscFList MatPartitioningList;
219 
220 /*E
221     MatStructure - Indicates if the matrix has the same nonzero structure
222 
223     Level: beginner
224 
225    Any additions/changes here MUST also be made in include/finclude/petscmat.h
226 
227 .seealso: MatCopy(), KSPSetOperators(), PCSetOperators()
228 E*/
229 typedef enum {SAME_NONZERO_PATTERN,DIFFERENT_NONZERO_PATTERN,SAME_PRECONDITIONER,SUBSET_NONZERO_PATTERN} MatStructure;
230 
231 extern PetscErrorCode  MatCreateSeqDense(MPI_Comm,PetscInt,PetscInt,PetscScalar[],Mat*);
232 extern PetscErrorCode  MatCreateMPIDense(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,PetscScalar[],Mat*);
233 extern PetscErrorCode  MatCreateSeqAIJ(MPI_Comm,PetscInt,PetscInt,PetscInt,const PetscInt[],Mat*);
234 PetscPolymorphicFunction(MatCreateSeqAIJ,(PetscInt m,PetscInt n,PetscInt nz,const PetscInt nnz[]),(PETSC_COMM_SELF,m,n,nz,nnz,&A),Mat,A)
235 PetscPolymorphicFunction(MatCreateSeqAIJ,(PetscInt m,PetscInt n,PetscInt nz),(PETSC_COMM_SELF,m,n,nz,PETSC_NULL,&A),Mat,A)
236 PetscPolymorphicFunction(MatCreateSeqAIJ,(PetscInt m,PetscInt n,const PetscInt nnz[]),(PETSC_COMM_SELF,m,n,0,nnz,&A),Mat,A)
237 PetscPolymorphicFunction(MatCreateSeqAIJ,(PetscInt m,PetscInt n),(PETSC_COMM_SELF,m,n,0,PETSC_NULL,&A),Mat,A)
238 PetscPolymorphicSubroutine(MatCreateSeqAIJ,(PetscInt m,PetscInt n,PetscInt nz,Mat *A),(PETSC_COMM_SELF,m,n,nz,PETSC_NULL,A))
239 PetscPolymorphicSubroutine(MatCreateSeqAIJ,(PetscInt m,PetscInt n,const PetscInt nnz[],Mat *A),(PETSC_COMM_SELF,m,n,0,nnz,A))
240 PetscPolymorphicSubroutine(MatCreateSeqAIJ,(PetscInt m,PetscInt n,Mat *A),(PETSC_COMM_SELF,m,n,0,PETSC_NULL,A))
241 extern PetscErrorCode  MatCreateMPIAIJ(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt,const PetscInt[],PetscInt,const PetscInt[],Mat*);
242 PetscPolymorphicFunction(MatCreateMPIAIJ,(MPI_Comm comm,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,const PetscInt nnz[],PetscInt onz,const PetscInt onnz[]),(comm,m,n,M,N,nz,nnz,onz,onnz,&A),Mat,A)
243 PetscPolymorphicFunction(MatCreateMPIAIJ,(MPI_Comm comm,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,PetscInt nnz),(comm,m,n,M,N,nz,PETSC_NULL,nnz,PETSC_NULL,&A),Mat,A)
244 PetscPolymorphicFunction(MatCreateMPIAIJ,(MPI_Comm comm,PetscInt m,PetscInt n,PetscInt M,PetscInt N,const PetscInt nnz[],const PetscInt onz[]),(comm,m,n,M,N,0,nnz,0,onz,&A),Mat,A)
245 PetscPolymorphicFunction(MatCreateMPIAIJ,(MPI_Comm comm,PetscInt m,PetscInt n,PetscInt M,PetscInt N),(comm,m,n,M,N,0,PETSC_NULL,0,PETSC_NULL,&A),Mat,A)
246 PetscPolymorphicSubroutine(MatCreateMPIAIJ,(MPI_Comm comm,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,PetscInt nnz,Mat *A),(comm,m,n,M,N,nz,PETSC_NULL,nnz,PETSC_NULL,A))
247 PetscPolymorphicSubroutine(MatCreateMPIAIJ,(MPI_Comm comm,PetscInt m,PetscInt n,PetscInt M,PetscInt N,const PetscInt nnz[],const PetscInt onz[],Mat *A),(comm,m,n,M,N,0,nnz,0,onz,A))
248 PetscPolymorphicSubroutine(MatCreateMPIAIJ,(MPI_Comm comm,PetscInt m,PetscInt n,PetscInt M,PetscInt N,Mat *A),(comm,m,n,M,N,0,PETSC_NULL,0,PETSC_NULL,A))
249 PetscPolymorphicFunction(MatCreateMPIAIJ,(PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,const PetscInt nnz[],PetscInt onz,const PetscInt onnz[]),(PETSC_COMM_WORLD,m,n,M,N,nz,nnz,onz,onnz,&A),Mat,A)
250 PetscPolymorphicFunction(MatCreateMPIAIJ,(PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,PetscInt nnz),(PETSC_COMM_WORLD,m,n,M,N,nz,PETSC_NULL,nnz,PETSC_NULL,&A),Mat,A)
251 PetscPolymorphicFunction(MatCreateMPIAIJ,(PetscInt m,PetscInt n,PetscInt M,PetscInt N,const PetscInt nnz[],const PetscInt onz[]),(PETSC_COMM_WORLD,m,n,M,N,0,nnz,0,onz,&A),Mat,A)
252 PetscPolymorphicFunction(MatCreateMPIAIJ,(PetscInt m,PetscInt n,PetscInt M,PetscInt N),(PETSC_COMM_WORLD,m,n,M,N,0,PETSC_NULL,0,PETSC_NULL,&A),Mat,A)
253 PetscPolymorphicSubroutine(MatCreateMPIAIJ,(PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,PetscInt nnz,Mat *A),(PETSC_COMM_WORLD,m,n,M,N,nz,PETSC_NULL,nnz,PETSC_NULL,A))
254 PetscPolymorphicSubroutine(MatCreateMPIAIJ,(PetscInt m,PetscInt n,PetscInt M,PetscInt N,const PetscInt nnz[],const PetscInt onz[],Mat *A),(PETSC_COMM_WORLD,m,n,M,N,0,nnz,0,onz,A))
255 PetscPolymorphicSubroutine(MatCreateMPIAIJ,(PetscInt m,PetscInt n,PetscInt M,PetscInt N,Mat *A),(PETSC_COMM_WORLD,m,n,M,N,0,PETSC_NULL,0,PETSC_NULL,A))
256 extern PetscErrorCode  MatCreateMPIAIJWithArrays(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,const PetscInt[],const PetscInt[],const PetscScalar[],Mat *);
257 extern PetscErrorCode  MatCreateMPIAIJWithSplitArrays(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt[],PetscInt[],PetscScalar[],PetscInt[],PetscInt[],PetscScalar[],Mat*);
258 
259 extern PetscErrorCode  MatCreateSeqBAIJ(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,const PetscInt[],Mat*);
260 PetscPolymorphicFunction(MatCreateSeqBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt nz,const PetscInt nnz[]),(PETSC_COMM_SELF,bs,m,n,nz,nnz,&A),Mat,A)
261 PetscPolymorphicFunction(MatCreateSeqBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt nz),(PETSC_COMM_SELF,bs,m,n,nz,PETSC_NULL,&A),Mat,A)
262 PetscPolymorphicFunction(MatCreateSeqBAIJ,(PetscInt bs,PetscInt m,PetscInt n,const PetscInt nnz[]),(PETSC_COMM_SELF,bs,m,n,0,nnz,&A),Mat,A)
263 PetscPolymorphicFunction(MatCreateSeqBAIJ,(PetscInt bs,PetscInt m,PetscInt n),(PETSC_COMM_SELF,bs,m,n,0,PETSC_NULL,&A),Mat,A)
264 PetscPolymorphicSubroutine(MatCreateSeqBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt nz,Mat *A),(PETSC_COMM_SELF,bs,m,n,nz,PETSC_NULL,A))
265 PetscPolymorphicSubroutine(MatCreateSeqBAIJ,(PetscInt bs,PetscInt m,PetscInt n,const PetscInt nnz[],Mat *A),(PETSC_COMM_SELF,bs,m,n,0,nnz,A))
266 PetscPolymorphicSubroutine(MatCreateSeqBAIJ,(PetscInt bs,PetscInt m,PetscInt n,Mat *A),(PETSC_COMM_SELF,bs,m,n,0,PETSC_NULL,A))
267 extern PetscErrorCode  MatCreateMPIBAIJ(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt,const PetscInt[],PetscInt,const PetscInt[],Mat*);
268 PetscPolymorphicFunction(MatCreateMPIBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,const PetscInt nnz[],PetscInt onz,const PetscInt onnz[]),(comm,bs,m,n,M,N,nz,nnz,onz,onnz,&A),Mat,A)
269 PetscPolymorphicFunction(MatCreateMPIBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,PetscInt nnz),(comm,bs,m,n,M,N,nz,PETSC_NULL,nnz,PETSC_NULL,&A),Mat,A)
270 PetscPolymorphicFunction(MatCreateMPIBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,const PetscInt nnz[],const PetscInt onz[]),(comm,bs,m,n,M,N,0,nnz,0,onz,&A),Mat,A)
271 PetscPolymorphicFunction(MatCreateMPIBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N),(comm,bs,m,n,M,N,0,PETSC_NULL,0,PETSC_NULL,&A),Mat,A)
272 PetscPolymorphicSubroutine(MatCreateMPIBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,PetscInt nnz,Mat *A),(comm,bs,m,n,M,N,nz,PETSC_NULL,nnz,PETSC_NULL,A))
273 PetscPolymorphicSubroutine(MatCreateMPIBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,const PetscInt nnz[],const PetscInt onz[],Mat *A),(comm,bs,m,n,M,N,0,nnz,0,onz,A))
274 PetscPolymorphicSubroutine(MatCreateMPIBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,Mat *A),(comm,bs,m,n,M,N,0,PETSC_NULL,0,PETSC_NULL,A))
275 PetscPolymorphicFunction(MatCreateMPIBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,const PetscInt nnz[],PetscInt onz,const PetscInt onnz[]),(PETSC_COMM_WORLD,bs,m,n,M,N,nz,nnz,onz,onnz,&A),Mat,A)
276 PetscPolymorphicFunction(MatCreateMPIBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,PetscInt nnz),(PETSC_COMM_WORLD,bs,m,n,M,N,nz,PETSC_NULL,nnz,PETSC_NULL,&A),Mat,A)
277 PetscPolymorphicFunction(MatCreateMPIBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,const PetscInt nnz[],const PetscInt onz[]),(PETSC_COMM_WORLD,bs,m,n,M,N,0,nnz,0,onz,&A),Mat,A)
278 PetscPolymorphicFunction(MatCreateMPIBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N),(PETSC_COMM_WORLD,bs,m,n,M,N,0,PETSC_NULL,0,PETSC_NULL,&A),Mat,A)
279 PetscPolymorphicSubroutine(MatCreateMPIBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,PetscInt nnz,Mat *A),(PETSC_COMM_WORLD,bs,m,n,M,N,nz,PETSC_NULL,nnz,PETSC_NULL,A))
280 PetscPolymorphicSubroutine(MatCreateMPIBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,const PetscInt nnz[],const PetscInt onz[],Mat *A),(PETSC_COMM_WORLD,bs,m,n,M,N,0,nnz,0,onz,A))
281 PetscPolymorphicSubroutine(MatCreateMPIBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,Mat *A),(PETSC_COMM_WORLD,bs,m,n,M,N,0,PETSC_NULL,0,PETSC_NULL,A))
282 extern PetscErrorCode  MatCreateMPIBAIJWithArrays(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt,const PetscInt[],const PetscInt[],const PetscScalar[],Mat*);
283 
284 extern PetscErrorCode  MatCreateMPIAdj(MPI_Comm,PetscInt,PetscInt,PetscInt[],PetscInt[],PetscInt[],Mat*);
285 extern PetscErrorCode  MatCreateSeqSBAIJ(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,const PetscInt[],Mat*);
286 PetscPolymorphicFunction(MatCreateSeqSBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt nz,const PetscInt nnz[]),(PETSC_COMM_SELF,bs,m,n,nz,nnz,&A),Mat,A)
287 PetscPolymorphicFunction(MatCreateSeqSBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt nz),(PETSC_COMM_SELF,bs,m,n,nz,PETSC_NULL,&A),Mat,A)
288 PetscPolymorphicFunction(MatCreateSeqSBAIJ,(PetscInt bs,PetscInt m,PetscInt n,const PetscInt nnz[]),(PETSC_COMM_SELF,bs,m,n,0,nnz,&A),Mat,A)
289 PetscPolymorphicFunction(MatCreateSeqSBAIJ,(PetscInt bs,PetscInt m,PetscInt n),(PETSC_COMM_SELF,bs,m,n,0,PETSC_NULL,&A),Mat,A)
290 PetscPolymorphicSubroutine(MatCreateSeqSBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt nz,Mat *A),(PETSC_COMM_SELF,bs,m,n,nz,PETSC_NULL,A))
291 PetscPolymorphicSubroutine(MatCreateSeqSBAIJ,(PetscInt bs,PetscInt m,PetscInt n,const PetscInt nnz[],Mat *A),(PETSC_COMM_SELF,bs,m,n,0,nnz,A))
292 PetscPolymorphicSubroutine(MatCreateSeqSBAIJ,(PetscInt bs,PetscInt m,PetscInt n,Mat *A),(PETSC_COMM_SELF,bs,m,n,0,PETSC_NULL,A))
293 
294 extern PetscErrorCode  MatCreateMPISBAIJ(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt,const PetscInt[],PetscInt,const PetscInt[],Mat*);
295 PetscPolymorphicFunction(MatCreateMPISBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,const PetscInt nnz[],PetscInt onz,const PetscInt onnz[]),(comm,bs,m,n,M,N,nz,nnz,onz,onnz,&A),Mat,A)
296 PetscPolymorphicFunction(MatCreateMPISBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,PetscInt nnz),(comm,bs,m,n,M,N,nz,PETSC_NULL,nnz,PETSC_NULL,&A),Mat,A)
297 PetscPolymorphicFunction(MatCreateMPISBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,const PetscInt nnz[],const PetscInt onz[]),(comm,bs,m,n,M,N,0,nnz,0,onz,&A),Mat,A)
298 PetscPolymorphicFunction(MatCreateMPISBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N),(comm,bs,m,n,M,N,0,PETSC_NULL,0,PETSC_NULL,&A),Mat,A)
299 PetscPolymorphicSubroutine(MatCreateMPISBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,PetscInt nnz,Mat *A),(comm,bs,m,n,M,N,nz,PETSC_NULL,nnz,PETSC_NULL,A))
300 PetscPolymorphicSubroutine(MatCreateMPISBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,const PetscInt nnz[],const PetscInt onz[],Mat *A),(comm,bs,m,n,M,N,0,nnz,0,onz,A))
301 PetscPolymorphicSubroutine(MatCreateMPISBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,Mat *A),(comm,bs,m,n,M,N,0,PETSC_NULL,0,PETSC_NULL,A))
302 PetscPolymorphicFunction(MatCreateMPISBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,const PetscInt nnz[],PetscInt onz,const PetscInt onnz[]),(PETSC_COMM_WORLD,bs,m,n,M,N,nz,nnz,onz,onnz,&A),Mat,A)
303 PetscPolymorphicFunction(MatCreateMPISBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,PetscInt nnz),(PETSC_COMM_WORLD,bs,m,n,M,N,nz,PETSC_NULL,nnz,PETSC_NULL,&A),Mat,A)
304 PetscPolymorphicFunction(MatCreateMPISBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,const PetscInt nnz[],const PetscInt onz[]),(PETSC_COMM_WORLD,bs,m,n,M,N,0,nnz,0,onz,&A),Mat,A)
305 PetscPolymorphicFunction(MatCreateMPISBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N),(PETSC_COMM_WORLD,bs,m,n,M,N,0,PETSC_NULL,0,PETSC_NULL,&A),Mat,A)
306 PetscPolymorphicSubroutine(MatCreateMPISBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,PetscInt nnz,Mat *A),(PETSC_COMM_WORLD,bs,m,n,M,N,nz,PETSC_NULL,nnz,PETSC_NULL,A))
307 PetscPolymorphicSubroutine(MatCreateMPISBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,const PetscInt nnz[],const PetscInt onz[],Mat *A),(PETSC_COMM_WORLD,bs,m,n,M,N,0,nnz,0,onz,A))
308 PetscPolymorphicSubroutine(MatCreateMPISBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,Mat *A),(PETSC_COMM_WORLD,bs,m,n,M,N,0,PETSC_NULL,0,PETSC_NULL,A))
309 extern PetscErrorCode  MatCreateMPISBAIJWithArrays(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt,const PetscInt[],const PetscInt[],const PetscScalar[],Mat *);
310 extern PetscErrorCode  MatMPISBAIJSetPreallocationCSR(Mat,PetscInt,const PetscInt[],const PetscInt[],const PetscScalar[]);
311 
312 extern PetscErrorCode  MatCreateShell(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,void *,Mat*);
313 PetscPolymorphicFunction(MatCreateShell,(MPI_Comm comm,PetscInt m,PetscInt n,PetscInt M,PetscInt N,void *ctx),(comm,m,n,M,N,ctx,&A),Mat,A)
314 PetscPolymorphicFunction(MatCreateShell,(PetscInt m,PetscInt n,PetscInt M,PetscInt N,void *ctx),(PETSC_COMM_WORLD,m,n,M,N,ctx,&A),Mat,A)
315 extern PetscErrorCode  MatCreateAdic(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt,void (*)(void),Mat*);
316 extern PetscErrorCode  MatCreateNormal(Mat,Mat*);
317 PetscPolymorphicFunction(MatCreateNormal,(Mat mat),(mat,&A),Mat,A)
318 extern PetscErrorCode  MatCreateLRC(Mat,Mat,Mat,Mat*);
319 extern PetscErrorCode  MatCreateIS(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,ISLocalToGlobalMapping,Mat*);
320 extern PetscErrorCode  MatCreateSeqAIJCRL(MPI_Comm,PetscInt,PetscInt,PetscInt,const PetscInt[],Mat*);
321 extern PetscErrorCode  MatCreateMPIAIJCRL(MPI_Comm,PetscInt,PetscInt,PetscInt,const PetscInt[],PetscInt,const PetscInt[],Mat*);
322 extern PetscErrorCode  MatCreateScatter(MPI_Comm,VecScatter,Mat*);
323 extern PetscErrorCode  MatScatterSetVecScatter(Mat,VecScatter);
324 extern PetscErrorCode  MatScatterGetVecScatter(Mat,VecScatter*);
325 extern PetscErrorCode  MatCreateBlockMat(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt*,Mat*);
326 extern PetscErrorCode  MatCompositeAddMat(Mat,Mat);
327 extern PetscErrorCode  MatCompositeMerge(Mat);
328 extern PetscErrorCode  MatCreateComposite(MPI_Comm,PetscInt,const Mat*,Mat*);
329 typedef enum {MAT_COMPOSITE_ADDITIVE,MAT_COMPOSITE_MULTIPLICATIVE} MatCompositeType;
330 extern PetscErrorCode  MatCompositeSetType(Mat,MatCompositeType);
331 
332 extern PetscErrorCode  MatCreateFFTW(MPI_Comm,PetscInt,const PetscInt[],Mat*);
333 extern PetscErrorCode  MatCreateSeqCUFFT(MPI_Comm,PetscInt,const PetscInt[],Mat*);
334 extern PetscErrorCode  MatCreateTranspose(Mat,Mat*);
335 extern PetscErrorCode  MatCreateSubMatrix(Mat,IS,IS,Mat*);
336 extern PetscErrorCode  MatSubMatrixUpdate(Mat,Mat,IS,IS);
337 extern PetscErrorCode  MatCreateLocalRef(Mat,IS,IS,Mat*);
338 
339 extern PetscErrorCode  MatCreatePython(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,const char[],Mat*);
340 extern PetscErrorCode  MatPythonSetType(Mat,const char[]);
341 
342 
343 extern PetscErrorCode  MatSetUp(Mat);
344 extern PetscErrorCode  MatDestroy(Mat);
345 
346 extern PetscErrorCode  MatConjugate(Mat);
347 extern PetscErrorCode  MatRealPart(Mat);
348 extern PetscErrorCode  MatImaginaryPart(Mat);
349 extern PetscErrorCode  MatGetDiagonalBlock(Mat,PetscBool *,MatReuse,Mat*);
350 extern PetscErrorCode  MatGetTrace(Mat,PetscScalar*);
351 
352 /* ------------------------------------------------------------*/
353 extern PetscErrorCode  MatSetValues(Mat,PetscInt,const PetscInt[],PetscInt,const PetscInt[],const PetscScalar[],InsertMode);
354 extern PetscErrorCode  MatSetValuesBlocked(Mat,PetscInt,const PetscInt[],PetscInt,const PetscInt[],const PetscScalar[],InsertMode);
355 extern PetscErrorCode  MatSetValuesRow(Mat,PetscInt,const PetscScalar[]);
356 extern PetscErrorCode  MatSetValuesRowLocal(Mat,PetscInt,const PetscScalar[]);
357 
358 /*S
359      MatStencil - Data structure (C struct) for storing information about a single row or
360         column of a matrix as index on an associated grid.
361 
362    Level: beginner
363 
364   Concepts: matrix; linear operator
365 
366 .seealso:  MatSetValuesStencil(), MatSetStencil(), MatSetValuesBlockStencil()
367 S*/
368 typedef struct {
369   PetscInt k,j,i,c;
370 } MatStencil;
371 
372 extern PetscErrorCode  MatSetValuesStencil(Mat,PetscInt,const MatStencil[],PetscInt,const MatStencil[],const PetscScalar[],InsertMode);
373 extern PetscErrorCode  MatSetValuesBlockedStencil(Mat,PetscInt,const MatStencil[],PetscInt,const MatStencil[],const PetscScalar[],InsertMode);
374 extern PetscErrorCode  MatSetStencil(Mat,PetscInt,const PetscInt[],const PetscInt[],PetscInt);
375 
376 extern PetscErrorCode  MatSetColoring(Mat,ISColoring);
377 extern PetscErrorCode  MatSetValuesAdic(Mat,void*);
378 extern PetscErrorCode  MatSetValuesAdifor(Mat,PetscInt,void*);
379 
380 /*E
381     MatAssemblyType - Indicates if the matrix is now to be used, or if you plan
382      to continue to add values to it
383 
384     Level: beginner
385 
386 .seealso: MatAssemblyBegin(), MatAssemblyEnd()
387 E*/
388 typedef enum {MAT_FLUSH_ASSEMBLY=1,MAT_FINAL_ASSEMBLY=0} MatAssemblyType;
389 extern PetscErrorCode  MatAssemblyBegin(Mat,MatAssemblyType);
390 extern PetscErrorCode  MatAssemblyEnd(Mat,MatAssemblyType);
391 extern PetscErrorCode  MatAssembled(Mat,PetscBool *);
392 
393 
394 
395 /*E
396     MatOption - Options that may be set for a matrix and its behavior or storage
397 
398     Level: beginner
399 
400    Any additions/changes here MUST also be made in include/finclude/petscmat.h
401 
402 .seealso: MatSetOption()
403 E*/
404 typedef enum {MAT_ROW_ORIENTED,MAT_NEW_NONZERO_LOCATIONS,
405               MAT_SYMMETRIC,
406               MAT_STRUCTURALLY_SYMMETRIC,
407               MAT_NEW_DIAGONALS,MAT_IGNORE_OFF_PROC_ENTRIES,
408               MAT_NEW_NONZERO_LOCATION_ERR,
409               MAT_NEW_NONZERO_ALLOCATION_ERR,MAT_USE_HASH_TABLE,
410               MAT_KEEP_NONZERO_PATTERN,MAT_IGNORE_ZERO_ENTRIES,
411               MAT_USE_INODES,
412               MAT_HERMITIAN,
413               MAT_SYMMETRY_ETERNAL,
414               MAT_CHECK_COMPRESSED_ROW,
415               MAT_IGNORE_LOWER_TRIANGULAR,MAT_ERROR_LOWER_TRIANGULAR,
416               MAT_GETROW_UPPERTRIANGULAR,MAT_UNUSED_NONZERO_LOCATION_ERR,
417               MAT_SPD,MAT_NO_OFF_PROC_ENTRIES,MAT_NO_OFF_PROC_ZERO_ROWS,
418               NUM_MAT_OPTIONS} MatOption;
419 extern const char *MatOptions[];
420 extern PetscErrorCode  MatSetOption(Mat,MatOption,PetscBool );
421 extern PetscErrorCode  MatGetType(Mat,const MatType*);
422 PetscPolymorphicFunction(MatGetType,(Mat mat),(mat,&t),const MatType,t)
423 
424 extern PetscErrorCode  MatGetValues(Mat,PetscInt,const PetscInt[],PetscInt,const PetscInt[],PetscScalar[]);
425 extern PetscErrorCode  MatGetRow(Mat,PetscInt,PetscInt *,const PetscInt *[],const PetscScalar*[]);
426 extern PetscErrorCode  MatRestoreRow(Mat,PetscInt,PetscInt *,const PetscInt *[],const PetscScalar*[]);
427 extern PetscErrorCode  MatGetRowUpperTriangular(Mat);
428 extern PetscErrorCode  MatRestoreRowUpperTriangular(Mat);
429 extern PetscErrorCode  MatGetColumn(Mat,PetscInt,PetscInt *,const PetscInt *[],const PetscScalar*[]);
430 extern PetscErrorCode  MatRestoreColumn(Mat,PetscInt,PetscInt *,const PetscInt *[],const PetscScalar*[]);
431 extern PetscErrorCode  MatGetColumnVector(Mat,Vec,PetscInt);
432 extern PetscErrorCode  MatGetArray(Mat,PetscScalar *[]);
433 PetscPolymorphicFunction(MatGetArray,(Mat mat),(mat,&a),PetscScalar*,a)
434 extern PetscErrorCode  MatRestoreArray(Mat,PetscScalar *[]);
435 extern PetscErrorCode  MatGetBlockSize(Mat,PetscInt *);
436 PetscPolymorphicFunction(MatGetBlockSize,(Mat mat),(mat,&a),PetscInt,a)
437 extern PetscErrorCode  MatSetBlockSize(Mat,PetscInt);
438 
439 
440 extern PetscErrorCode  MatMult(Mat,Vec,Vec);
441 extern PetscErrorCode  MatMultDiagonalBlock(Mat,Vec,Vec);
442 extern PetscErrorCode  MatMultAdd(Mat,Vec,Vec,Vec);
443 PetscPolymorphicSubroutine(MatMultAdd,(Mat A,Vec x,Vec y),(A,x,y,y))
444 extern PetscErrorCode  MatMultTranspose(Mat,Vec,Vec);
445 extern PetscErrorCode  MatMultHermitianTranspose(Mat,Vec,Vec);
446 extern PetscErrorCode  MatIsTranspose(Mat,Mat,PetscReal,PetscBool *);
447 PetscPolymorphicFunction(MatIsTranspose,(Mat A,Mat B,PetscReal tol),(A,B,tol,&t),PetscBool ,t)
448 PetscPolymorphicFunction(MatIsTranspose,(Mat A,Mat B),(A,B,0,&t),PetscBool ,t)
449 extern PetscErrorCode  MatIsHermitianTranspose(Mat,Mat,PetscReal,PetscBool *);
450 extern PetscErrorCode  MatMultTransposeAdd(Mat,Vec,Vec,Vec);
451 extern PetscErrorCode  MatMultHermitianTransposeAdd(Mat,Vec,Vec,Vec);
452 PetscPolymorphicSubroutine(MatMultTransposeAdd,(Mat A,Vec x,Vec y),(A,x,y,y))
453 extern PetscErrorCode  MatMultConstrained(Mat,Vec,Vec);
454 extern PetscErrorCode  MatMultTransposeConstrained(Mat,Vec,Vec);
455 extern PetscErrorCode  MatMatSolve(Mat,Mat,Mat);
456 
457 /*E
458     MatDuplicateOption - Indicates if a duplicated sparse matrix should have
459   its numerical values copied over or just its nonzero structure.
460 
461     Level: beginner
462 
463    Any additions/changes here MUST also be made in include/finclude/petscmat.h
464 
465 $   MAT_SHARE_NONZERO_PATTERN - the i and j arrays in the new matrix will be shared with the original matrix
466 $                               this also triggers the MAT_DO_NOT_COPY_VALUES option. This is used when you
467 $                               have several matrices with the same nonzero pattern.
468 
469 .seealso: MatDuplicate()
470 E*/
471 typedef enum {MAT_DO_NOT_COPY_VALUES,MAT_COPY_VALUES,MAT_SHARE_NONZERO_PATTERN} MatDuplicateOption;
472 
473 extern PetscErrorCode  MatConvert(Mat,const MatType,MatReuse,Mat*);
474 PetscPolymorphicFunction(MatConvert,(Mat A,const MatType t),(A,t,MAT_INITIAL_MATRIX,&a),Mat,a)
475 extern PetscErrorCode  MatDuplicate(Mat,MatDuplicateOption,Mat*);
476 PetscPolymorphicFunction(MatDuplicate,(Mat A,MatDuplicateOption o),(A,o,&a),Mat,a)
477 PetscPolymorphicFunction(MatDuplicate,(Mat A),(A,MAT_COPY_VALUES,&a),Mat,a)
478 
479 
480 extern PetscErrorCode  MatCopy(Mat,Mat,MatStructure);
481 extern PetscErrorCode  MatView(Mat,PetscViewer);
482 extern PetscErrorCode  MatIsSymmetric(Mat,PetscReal,PetscBool *);
483 PetscPolymorphicFunction(MatIsSymmetric,(Mat A,PetscReal tol),(A,tol,&t),PetscBool ,t)
484 PetscPolymorphicFunction(MatIsSymmetric,(Mat A),(A,0,&t),PetscBool ,t)
485 extern PetscErrorCode  MatIsStructurallySymmetric(Mat,PetscBool *);
486 PetscPolymorphicFunction(MatIsStructurallySymmetric,(Mat A),(A,&t),PetscBool ,t)
487 extern PetscErrorCode  MatIsHermitian(Mat,PetscReal,PetscBool *);
488 PetscPolymorphicFunction(MatIsHermitian,(Mat A),(A,0,&t),PetscBool ,t)
489 extern PetscErrorCode  MatIsSymmetricKnown(Mat,PetscBool *,PetscBool *);
490 extern PetscErrorCode  MatIsHermitianKnown(Mat,PetscBool *,PetscBool *);
491 extern PetscErrorCode  MatMissingDiagonal(Mat,PetscBool  *,PetscInt *);
492 extern PetscErrorCode  MatLoad(Mat, PetscViewer);
493 
494 extern PetscErrorCode  MatGetRowIJ(Mat,PetscInt,PetscBool ,PetscBool ,PetscInt*,PetscInt *[],PetscInt *[],PetscBool  *);
495 extern PetscErrorCode  MatRestoreRowIJ(Mat,PetscInt,PetscBool ,PetscBool ,PetscInt *,PetscInt *[],PetscInt *[],PetscBool  *);
496 extern PetscErrorCode  MatGetColumnIJ(Mat,PetscInt,PetscBool ,PetscBool ,PetscInt*,PetscInt *[],PetscInt *[],PetscBool  *);
497 extern PetscErrorCode  MatRestoreColumnIJ(Mat,PetscInt,PetscBool ,PetscBool ,PetscInt *,PetscInt *[],PetscInt *[],PetscBool  *);
498 
499 /*S
500      MatInfo - Context of matrix information, used with MatGetInfo()
501 
502    In Fortran this is simply a double precision array of dimension MAT_INFO_SIZE
503 
504    Level: intermediate
505 
506   Concepts: matrix^nonzero information
507 
508 .seealso:  MatGetInfo(), MatInfoType
509 S*/
510 typedef struct {
511   PetscLogDouble block_size;                         /* block size */
512   PetscLogDouble nz_allocated,nz_used,nz_unneeded;   /* number of nonzeros */
513   PetscLogDouble memory;                             /* memory allocated */
514   PetscLogDouble assemblies;                         /* number of matrix assemblies called */
515   PetscLogDouble mallocs;                            /* number of mallocs during MatSetValues() */
516   PetscLogDouble fill_ratio_given,fill_ratio_needed; /* fill ratio for LU/ILU */
517   PetscLogDouble factor_mallocs;                     /* number of mallocs during factorization */
518 } MatInfo;
519 
520 /*E
521     MatInfoType - Indicates if you want information about the local part of the matrix,
522      the entire parallel matrix or the maximum over all the local parts.
523 
524     Level: beginner
525 
526    Any additions/changes here MUST also be made in include/finclude/petscmat.h
527 
528 .seealso: MatGetInfo(), MatInfo
529 E*/
530 typedef enum {MAT_LOCAL=1,MAT_GLOBAL_MAX=2,MAT_GLOBAL_SUM=3} MatInfoType;
531 extern PetscErrorCode  MatGetInfo(Mat,MatInfoType,MatInfo*);
532 extern PetscErrorCode  MatGetDiagonal(Mat,Vec);
533 extern PetscErrorCode  MatGetRowMax(Mat,Vec,PetscInt[]);
534 extern PetscErrorCode  MatGetRowMin(Mat,Vec,PetscInt[]);
535 extern PetscErrorCode  MatGetRowMaxAbs(Mat,Vec,PetscInt[]);
536 extern PetscErrorCode  MatGetRowMinAbs(Mat,Vec,PetscInt[]);
537 extern PetscErrorCode  MatGetRowSum(Mat,Vec);
538 extern PetscErrorCode  MatTranspose(Mat,MatReuse,Mat*);
539 PetscPolymorphicFunction(MatTranspose,(Mat A),(A,MAT_INITIAL_MATRIX,&t),Mat,t)
540 extern PetscErrorCode  MatHermitianTranspose(Mat,MatReuse,Mat*);
541 extern PetscErrorCode  MatPermute(Mat,IS,IS,Mat *);
542 PetscPolymorphicFunction(MatPermute,(Mat A,IS is1,IS is2),(A,is1,is2,&t),Mat,t)
543 extern PetscErrorCode  MatDiagonalScale(Mat,Vec,Vec);
544 extern PetscErrorCode  MatDiagonalSet(Mat,Vec,InsertMode);
545 extern PetscErrorCode  MatEqual(Mat,Mat,PetscBool *);
546 PetscPolymorphicFunction(MatEqual,(Mat A,Mat B),(A,B,&t),PetscBool ,t)
547 extern PetscErrorCode  MatMultEqual(Mat,Mat,PetscInt,PetscBool *);
548 extern PetscErrorCode  MatMultAddEqual(Mat,Mat,PetscInt,PetscBool *);
549 extern PetscErrorCode  MatMultTransposeEqual(Mat,Mat,PetscInt,PetscBool *);
550 extern PetscErrorCode  MatMultTransposeAddEqual(Mat,Mat,PetscInt,PetscBool *);
551 
552 extern PetscErrorCode  MatNorm(Mat,NormType,PetscReal *);
553 PetscPolymorphicFunction(MatNorm,(Mat A,NormType t),(A,t,&n),PetscReal,n)
554 extern PetscErrorCode MatGetColumnNorms(Mat,NormType,PetscReal *);
555 extern PetscErrorCode  MatZeroEntries(Mat);
556 extern PetscErrorCode  MatZeroRows(Mat,PetscInt,const PetscInt [],PetscScalar,Vec,Vec);
557 extern PetscErrorCode  MatZeroRowsIS(Mat,IS,PetscScalar,Vec,Vec);
558 extern PetscErrorCode  MatZeroRowsStencil(Mat,PetscInt,const MatStencil [],PetscScalar,Vec,Vec);
559 extern PetscErrorCode  MatZeroRowsColumns(Mat,PetscInt,const PetscInt [],PetscScalar,Vec,Vec);
560 extern PetscErrorCode  MatZeroRowsColumnsIS(Mat,IS,PetscScalar,Vec,Vec);
561 
562 extern PetscErrorCode  MatUseScaledForm(Mat,PetscBool );
563 extern PetscErrorCode  MatScaleSystem(Mat,Vec,Vec);
564 extern PetscErrorCode  MatUnScaleSystem(Mat,Vec,Vec);
565 
566 extern PetscErrorCode  MatGetSize(Mat,PetscInt*,PetscInt*);
567 extern PetscErrorCode  MatGetLocalSize(Mat,PetscInt*,PetscInt*);
568 extern PetscErrorCode  MatGetOwnershipRange(Mat,PetscInt*,PetscInt*);
569 extern PetscErrorCode  MatGetOwnershipRanges(Mat,const PetscInt**);
570 extern PetscErrorCode  MatGetOwnershipRangeColumn(Mat,PetscInt*,PetscInt*);
571 extern PetscErrorCode  MatGetOwnershipRangesColumn(Mat,const PetscInt**);
572 
573 extern PetscErrorCode  MatGetSubMatrices(Mat,PetscInt,const IS[],const IS[],MatReuse,Mat *[]);
574 extern PetscErrorCode  MatGetSubMatricesParallel(Mat,PetscInt,const IS[],const IS[],MatReuse,Mat *[]);
575 extern PetscErrorCode  MatDestroyMatrices(PetscInt,Mat *[]);
576 extern PetscErrorCode  MatGetSubMatrix(Mat,IS,IS,MatReuse,Mat *);
577 extern PetscErrorCode  MatGetLocalSubMatrix(Mat,IS,IS,Mat*);
578 extern PetscErrorCode  MatRestoreLocalSubMatrix(Mat,IS,IS,Mat*);
579 extern PetscErrorCode  MatGetSeqNonzeroStructure(Mat,Mat*);
580 extern PetscErrorCode  MatDestroySeqNonzeroStructure(Mat*);
581 
582 extern PetscErrorCode  MatMerge(MPI_Comm,Mat,PetscInt,MatReuse,Mat*);
583 extern PetscErrorCode  MatMerge_SeqsToMPI(MPI_Comm,Mat,PetscInt,PetscInt,MatReuse,Mat*);
584 extern PetscErrorCode  MatMerge_SeqsToMPISymbolic(MPI_Comm,Mat,PetscInt,PetscInt,Mat*);
585 extern PetscErrorCode  MatMerge_SeqsToMPINumeric(Mat,Mat);
586 extern PetscErrorCode  MatDestroy_MPIAIJ_SeqsToMPI(Mat);
587 extern PetscErrorCode  MatGetLocalMat(Mat,MatReuse,Mat*);
588 extern PetscErrorCode  MatGetLocalMatCondensed(Mat,MatReuse,IS*,IS*,Mat*);
589 extern PetscErrorCode  MatGetBrowsOfAcols(Mat,Mat,MatReuse,IS*,IS*,PetscInt*,Mat*);
590 extern PetscErrorCode  MatGetBrowsOfAoCols(Mat,Mat,MatReuse,PetscInt**,PetscInt**,MatScalar**,Mat*);
591 #if defined (PETSC_USE_CTABLE)
592 #include "petscctable.h"
593 extern PetscErrorCode  MatGetCommunicationStructs(Mat, Vec *, PetscTable *, VecScatter *);
594 #else
595 extern PetscErrorCode  MatGetCommunicationStructs(Mat, Vec *, PetscInt *[], VecScatter *);
596 #endif
597 extern PetscErrorCode  MatGetGhosts(Mat, PetscInt *,const PetscInt *[]);
598 
599 extern PetscErrorCode  MatIncreaseOverlap(Mat,PetscInt,IS[],PetscInt);
600 
601 extern PetscErrorCode  MatMatMult(Mat,Mat,MatReuse,PetscReal,Mat*);
602 extern PetscErrorCode  MatMatMultSymbolic(Mat,Mat,PetscReal,Mat*);
603 extern PetscErrorCode  MatMatMultNumeric(Mat,Mat,Mat);
604 
605 extern PetscErrorCode  MatPtAP(Mat,Mat,MatReuse,PetscReal,Mat*);
606 extern PetscErrorCode  MatPtAPSymbolic(Mat,Mat,PetscReal,Mat*);
607 extern PetscErrorCode  MatPtAPNumeric(Mat,Mat,Mat);
608 
609 extern PetscErrorCode  MatMatMultTranspose(Mat,Mat,MatReuse,PetscReal,Mat*);
610 extern PetscErrorCode  MatMatMultTransposeSymbolic(Mat,Mat,PetscReal,Mat*);
611 extern PetscErrorCode  MatMatMultTransposeNumeric(Mat,Mat,Mat);
612 
613 extern PetscErrorCode  MatAXPY(Mat,PetscScalar,Mat,MatStructure);
614 extern PetscErrorCode  MatAYPX(Mat,PetscScalar,Mat,MatStructure);
615 
616 extern PetscErrorCode  MatScale(Mat,PetscScalar);
617 extern PetscErrorCode  MatShift(Mat,PetscScalar);
618 
619 extern PetscErrorCode  MatSetLocalToGlobalMapping(Mat,ISLocalToGlobalMapping,ISLocalToGlobalMapping);
620 extern PetscErrorCode  MatSetLocalToGlobalMappingBlock(Mat,ISLocalToGlobalMapping,ISLocalToGlobalMapping);
621 extern PetscErrorCode  MatGetLocalToGlobalMapping(Mat,ISLocalToGlobalMapping*,ISLocalToGlobalMapping*);
622 extern PetscErrorCode  MatGetLocalToGlobalMappingBlock(Mat,ISLocalToGlobalMapping*,ISLocalToGlobalMapping*);
623 extern PetscErrorCode  MatZeroRowsLocal(Mat,PetscInt,const PetscInt [],PetscScalar,Vec,Vec);
624 extern PetscErrorCode  MatZeroRowsLocalIS(Mat,IS,PetscScalar,Vec,Vec);
625 extern PetscErrorCode  MatZeroRowsColumnsLocal(Mat,PetscInt,const PetscInt [],PetscScalar,Vec,Vec);
626 extern PetscErrorCode  MatZeroRowsColumnsLocalIS(Mat,IS,PetscScalar,Vec,Vec);
627 extern PetscErrorCode  MatSetValuesLocal(Mat,PetscInt,const PetscInt[],PetscInt,const PetscInt[],const PetscScalar[],InsertMode);
628 extern PetscErrorCode  MatSetValuesBlockedLocal(Mat,PetscInt,const PetscInt[],PetscInt,const PetscInt[],const PetscScalar[],InsertMode);
629 
630 extern PetscErrorCode  MatStashSetInitialSize(Mat,PetscInt,PetscInt);
631 extern PetscErrorCode  MatStashGetInfo(Mat,PetscInt*,PetscInt*,PetscInt*,PetscInt*);
632 
633 extern PetscErrorCode  MatInterpolate(Mat,Vec,Vec);
634 extern PetscErrorCode  MatInterpolateAdd(Mat,Vec,Vec,Vec);
635 PetscPolymorphicSubroutine(MatInterpolateAdd,(Mat A,Vec x,Vec y),(A,x,y,y))
636 extern PetscErrorCode  MatRestrict(Mat,Vec,Vec);
637 extern PetscErrorCode  MatGetVecs(Mat,Vec*,Vec*);
638 extern PetscErrorCode  MatGetRedundantMatrix(Mat,PetscInt,MPI_Comm,PetscInt,MatReuse,Mat*);
639 extern PetscErrorCode  MatGetMultiProcBlock(Mat,MPI_Comm,Mat*);
640 extern PetscErrorCode  MatFindZeroDiagonals(Mat,IS*);
641 
642 /*MC
643    MatSetValue - Set a single entry into a matrix.
644 
645    Not collective
646 
647    Input Parameters:
648 +  m - the matrix
649 .  row - the row location of the entry
650 .  col - the column location of the entry
651 .  value - the value to insert
652 -  mode - either INSERT_VALUES or ADD_VALUES
653 
654    Notes:
655    For efficiency one should use MatSetValues() and set several or many
656    values simultaneously if possible.
657 
658    Level: beginner
659 
660 .seealso: MatSetValues(), MatSetValueLocal()
661 M*/
662 PETSC_STATIC_INLINE PetscErrorCode MatSetValue(Mat v,PetscInt i,PetscInt j,PetscScalar va,InsertMode mode) {return MatSetValues(v,1,&i,1,&j,&va,mode);}
663 
664 PETSC_STATIC_INLINE PetscErrorCode MatGetValue(Mat v,PetscInt i,PetscInt j,PetscScalar *va) {return MatGetValues(v,1,&i,1,&j,va);}
665 
666 PETSC_STATIC_INLINE PetscErrorCode MatSetValueLocal(Mat v,PetscInt i,PetscInt j,PetscScalar va,InsertMode mode) {return MatSetValuesLocal(v,1,&i,1,&j,&va,mode);}
667 
668 /*MC
669    MatPreallocateInitialize - Begins the block of code that will count the number of nonzeros per
670        row in a matrix providing the data that one can use to correctly preallocate the matrix.
671 
672    Synopsis:
673    PetscErrorCode MatPreallocateInitialize(MPI_Comm comm, PetscInt nrows, PetscInt ncols, PetscInt *dnz, PetscInt *onz)
674 
675    Collective on MPI_Comm
676 
677    Input Parameters:
678 +  comm - the communicator that will share the eventually allocated matrix
679 .  nrows - the number of LOCAL rows in the matrix
680 -  ncols - the number of LOCAL columns in the matrix
681 
682    Output Parameters:
683 +  dnz - the array that will be passed to the matrix preallocation routines
684 -  ozn - the other array passed to the matrix preallocation routines
685 
686 
687    Level: intermediate
688 
689    Notes:
690     See the <A href="../../docs/manual.pdf#nameddest=ch_performance">Hints for Performance Improvment</A> chapter in the users manual for more details.
691 
692    Do not malloc or free dnz and onz, that is handled internally by these routines
693 
694    Use MatPreallocateInitializeSymmetric() for symmetric matrices (MPISBAIJ matrices)
695 
696    This is a MACRO not a function because it has a leading { that is closed by PetscPreallocateFinalize().
697 
698   Concepts: preallocation^Matrix
699 
700 .seealso: MatPreallocateFinalize(), MatPreallocateSet(), MatPreallocateSymmetricSet(), MatPreallocateSetLocal(),
701           MatPreallocateInitializeSymmetric(), MatPreallocateSymmetricSetLocal()
702 M*/
703 #define MatPreallocateInitialize(comm,nrows,ncols,dnz,onz) 0; \
704 { \
705   PetscErrorCode _4_ierr; PetscInt __nrows = (nrows),__ctmp = (ncols),__rstart,__start,__end; \
706   _4_ierr = PetscMalloc2(__nrows,PetscInt,&dnz,__nrows,PetscInt,&onz);CHKERRQ(_4_ierr); \
707   _4_ierr = PetscMemzero(dnz,__nrows*sizeof(PetscInt));CHKERRQ(_4_ierr);\
708   _4_ierr = PetscMemzero(onz,__nrows*sizeof(PetscInt));CHKERRQ(_4_ierr);\
709   _4_ierr = MPI_Scan(&__ctmp,&__end,1,MPIU_INT,MPI_SUM,comm);CHKERRQ(_4_ierr); __start = __end - __ctmp;\
710   _4_ierr = MPI_Scan(&__nrows,&__rstart,1,MPIU_INT,MPI_SUM,comm);CHKERRQ(_4_ierr); __rstart = __rstart - __nrows;
711 
712 /*MC
713    MatPreallocateSymmetricInitialize - Begins the block of code that will count the number of nonzeros per
714        row in a matrix providing the data that one can use to correctly preallocate the matrix.
715 
716    Synopsis:
717    PetscErrorCode MatPreallocateSymmetricInitialize(MPI_Comm comm, PetscInt nrows, PetscInt ncols, PetscInt *dnz, PetscInt *onz)
718 
719    Collective on MPI_Comm
720 
721    Input Parameters:
722 +  comm - the communicator that will share the eventually allocated matrix
723 .  nrows - the number of LOCAL rows in the matrix
724 -  ncols - the number of LOCAL columns in the matrix
725 
726    Output Parameters:
727 +  dnz - the array that will be passed to the matrix preallocation routines
728 -  ozn - the other array passed to the matrix preallocation routines
729 
730 
731    Level: intermediate
732 
733    Notes:
734     See the <A href="../../docs/manual.pdf#nameddest=ch_performance">Hints for Performance Improvment</A> chapter in the users manual for more details.
735 
736    Do not malloc or free dnz and onz, that is handled internally by these routines
737 
738    This is a MACRO not a function because it has a leading { that is closed by PetscPreallocateFinalize().
739 
740   Concepts: preallocation^Matrix
741 
742 .seealso: MatPreallocateFinalize(), MatPreallocateSet(), MatPreallocateSymmetricSet(), MatPreallocateSetLocal(),
743           MatPreallocateInitialize(), MatPreallocateSymmetricSetLocal()
744 M*/
745 #define MatPreallocateSymmetricInitialize(comm,nrows,ncols,dnz,onz) 0; \
746 { \
747   PetscErrorCode _4_ierr; PetscInt __nrows = (nrows),__ctmp = (ncols),__rstart,__end; \
748   _4_ierr = PetscMalloc2(__nrows,PetscInt,&dnz,__nrows,PetscInt,&onz);CHKERRQ(_4_ierr); \
749   _4_ierr = PetscMemzero(dnz,__nrows*sizeof(PetscInt));CHKERRQ(_4_ierr);\
750   _4_ierr = PetscMemzero(onz,__nrows*sizeof(PetscInt));CHKERRQ(_4_ierr);\
751   _4_ierr = MPI_Scan(&__ctmp,&__end,1,MPIU_INT,MPI_SUM,comm);CHKERRQ(_4_ierr);\
752   _4_ierr = MPI_Scan(&__nrows,&__rstart,1,MPIU_INT,MPI_SUM,comm);CHKERRQ(_4_ierr); __rstart = __rstart - __nrows;
753 
754 /*MC
755    MatPreallocateSetLocal - Indicates the locations (rows and columns) in the matrix where nonzeros will be
756        inserted using a local number of the rows and columns
757 
758    Synopsis:
759    PetscErrorCode MatPreallocateSetLocal(ISLocalToGlobalMappping map,PetscInt nrows, PetscInt *rows,PetscInt ncols, PetscInt *cols,PetscInt *dnz, PetscInt *onz)
760 
761    Not Collective
762 
763    Input Parameters:
764 +  map - the row mapping from local numbering to global numbering
765 .  nrows - the number of rows indicated
766 .  rows - the indices of the rows
767 .  cmap - the column mapping from local to global numbering
768 .  ncols - the number of columns in the matrix
769 .  cols - the columns indicated
770 .  dnz - the array that will be passed to the matrix preallocation routines
771 -  ozn - the other array passed to the matrix preallocation routines
772 
773 
774    Level: intermediate
775 
776    Notes:
777     See the <A href="../../docs/manual.pdf#nameddest=ch_performance">Hints for Performance Improvment</A> chapter in the users manual for more details.
778 
779    Do not malloc or free dnz and onz, that is handled internally by these routines
780 
781   Concepts: preallocation^Matrix
782 
783 .seealso: MatPreallocateFinalize(), MatPreallocateSet(), MatPreallocateSymmetricSet(), MatPreallocateInitialize(),
784           MatPreallocateInitialize(), MatPreallocateSymmetricSetLocal()
785 M*/
786 #define MatPreallocateSetLocal(rmap,nrows,rows,cmap,ncols,cols,dnz,onz) 0; \
787 {\
788   PetscInt __l;\
789   _4_ierr = ISLocalToGlobalMappingApply(rmap,nrows,rows,rows);CHKERRQ(_4_ierr);\
790   _4_ierr = ISLocalToGlobalMappingApply(cmap,ncols,cols,cols);CHKERRQ(_4_ierr);\
791   for (__l=0;__l<nrows;__l++) {\
792     _4_ierr = MatPreallocateSet((rows)[__l],ncols,cols,dnz,onz);CHKERRQ(_4_ierr);\
793   }\
794 }
795 
796 /*MC
797    MatPreallocateSymmetricSetLocal - Indicates the locations (rows and columns) in the matrix where nonzeros will be
798        inserted using a local number of the rows and columns
799 
800    Synopsis:
801    PetscErrorCode MatPreallocateSymmetricSetLocal(ISLocalToGlobalMappping map,PetscInt nrows, PetscInt *rows,PetscInt ncols, PetscInt *cols,PetscInt *dnz, PetscInt *onz)
802 
803    Not Collective
804 
805    Input Parameters:
806 +  map - the mapping between local numbering and global numbering
807 .  nrows - the number of rows indicated
808 .  rows - the indices of the rows
809 .  ncols - the number of columns in the matrix
810 .  cols - the columns indicated
811 .  dnz - the array that will be passed to the matrix preallocation routines
812 -  ozn - the other array passed to the matrix preallocation routines
813 
814 
815    Level: intermediate
816 
817    Notes:
818     See the <A href="../../docs/manual.pdf#nameddest=ch_performance">Hints for Performance Improvment</A> chapter in the users manual for more details.
819 
820    Do not malloc or free dnz and onz that is handled internally by these routines
821 
822   Concepts: preallocation^Matrix
823 
824 .seealso: MatPreallocateFinalize(), MatPreallocateSet(), MatPreallocateSymmetricSet(), MatPreallocateInitialize(),
825           MatPreallocateInitialize(), MatPreallocateSymmetricSetLocal(), MatPreallocateSetLocal()
826 M*/
827 #define MatPreallocateSymmetricSetLocal(map,nrows,rows,ncols,cols,dnz,onz) 0;\
828 {\
829   PetscInt __l;\
830   _4_ierr = ISLocalToGlobalMappingApply(map,nrows,rows,rows);CHKERRQ(_4_ierr);\
831   _4_ierr = ISLocalToGlobalMappingApply(map,ncols,cols,cols);CHKERRQ(_4_ierr);\
832   for (__l=0;__l<nrows;__l++) {\
833     _4_ierr = MatPreallocateSymmetricSet((rows)[__l],ncols,cols,dnz,onz);CHKERRQ(_4_ierr);\
834   }\
835 }
836 
837 /*MC
838    MatPreallocateSet - Indicates the locations (rows and columns) in the matrix where nonzeros will be
839        inserted using a local number of the rows and columns
840 
841    Synopsis:
842    PetscErrorCode MatPreallocateSet(PetscInt nrows, PetscInt *rows,PetscInt ncols, PetscInt *cols,PetscInt *dnz, PetscInt *onz)
843 
844    Not Collective
845 
846    Input Parameters:
847 +  row - the row
848 .  ncols - the number of columns in the matrix
849 -  cols - the columns indicated
850 
851    Output Parameters:
852 +  dnz - the array that will be passed to the matrix preallocation routines
853 -  ozn - the other array passed to the matrix preallocation routines
854 
855 
856    Level: intermediate
857 
858    Notes:
859     See the <A href="../../docs/manual.pdf#nameddest=ch_performance">Hints for Performance Improvment</A> chapter in the users manual for more details.
860 
861    Do not malloc or free dnz and onz that is handled internally by these routines
862 
863    This is a MACRO not a function because it uses variables declared in MatPreallocateInitialize().
864 
865   Concepts: preallocation^Matrix
866 
867 .seealso: MatPreallocateFinalize(), MatPreallocateSet(), MatPreallocateSymmetricSet(), MatPreallocateInitialize(),
868           MatPreallocateInitialize(), MatPreallocateSymmetricSetLocal(), MatPreallocateSetLocal()
869 M*/
870 #define MatPreallocateSet(row,nc,cols,dnz,onz) 0;\
871 { PetscInt __i; \
872   if (row < __rstart) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Trying to set preallocation for row %D less than first local row %D",row,__rstart);\
873   if (row >= __rstart+__nrows) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Trying to set preallocation for row %D greater than last local row %D",row,__rstart+__nrows-1);\
874   for (__i=0; __i<nc; __i++) {\
875     if ((cols)[__i] < __start || (cols)[__i] >= __end) onz[row - __rstart]++; \
876     else dnz[row - __rstart]++;\
877   }\
878 }
879 
880 /*MC
881    MatPreallocateSymmetricSet - Indicates the locations (rows and columns) in the matrix where nonzeros will be
882        inserted using a local number of the rows and columns
883 
884    Synopsis:
885    PetscErrorCode MatPreallocateSymmetricSet(PetscInt nrows, PetscInt *rows,PetscInt ncols, PetscInt *cols,PetscInt *dnz, PetscInt *onz)
886 
887    Not Collective
888 
889    Input Parameters:
890 +  nrows - the number of rows indicated
891 .  rows - the indices of the rows
892 .  ncols - the number of columns in the matrix
893 .  cols - the columns indicated
894 .  dnz - the array that will be passed to the matrix preallocation routines
895 -  ozn - the other array passed to the matrix preallocation routines
896 
897 
898    Level: intermediate
899 
900    Notes:
901     See the <A href="../../docs/manual.pdf#nameddest=ch_performance">Hints for Performance Improvment</A> chapter in the users manual for more details.
902 
903    Do not malloc or free dnz and onz that is handled internally by these routines
904 
905    This is a MACRO not a function because it uses variables declared in MatPreallocateInitialize().
906 
907   Concepts: preallocation^Matrix
908 
909 .seealso: MatPreallocateFinalize(), MatPreallocateSet(), MatPreallocateSymmetricSet(), MatPreallocateInitialize(),
910           MatPreallocateInitialize(), MatPreallocateSymmetricSetLocal(), MatPreallocateSetLocal()
911 M*/
912 #define MatPreallocateSymmetricSet(row,nc,cols,dnz,onz) 0;\
913 { PetscInt __i; \
914   for (__i=0; __i<nc; __i++) {\
915     if (cols[__i] >= __end) onz[row - __rstart]++; \
916     else if (cols[__i] >= row) dnz[row - __rstart]++;\
917   }\
918 }
919 
920 /*MC
921    MatPreallocateLocation -  An alternative to MatPreallocationSet() that puts the nonzero locations into the matrix if it exists
922 
923    Synopsis:
924    PetscErrorCode MatPreallocateLocations(Mat A,PetscInt row,PetscInt ncols,PetscInt *cols,PetscInt *dnz,PetscInt *onz)
925 
926    Not Collective
927 
928    Input Parameters:
929 .  A - matrix
930 .  row - row where values exist (must be local to this process)
931 .  ncols - number of columns
932 .  cols - columns with nonzeros
933 .  dnz - the array that will be passed to the matrix preallocation routines
934 -  ozn - the other array passed to the matrix preallocation routines
935 
936 
937    Level: intermediate
938 
939    Notes:
940     See the <A href="../../docs/manual.pdf#nameddest=ch_performance">Hints for Performance Improvment</A> chapter in the users manual for more details.
941 
942    Do not malloc or free dnz and onz that is handled internally by these routines
943 
944    This is a MACRO not a function because it uses a bunch of variables private to the MatPreallocation.... routines.
945 
946   Concepts: preallocation^Matrix
947 
948 .seealso: MatPreallocateInitialize(), MatPreallocateSet(), MatPreallocateSymmetricSet(), MatPreallocateSetLocal(),
949           MatPreallocateSymmetricInitialize(), MatPreallocateSymmetricSetLocal()
950 M*/
951 #define MatPreallocateLocation(A,row,ncols,cols,dnz,onz) 0;if (A) {ierr = MatSetValues(A,1,&row,ncols,cols,PETSC_NULL,INSERT_VALUES);CHKERRQ(ierr);} else {ierr =  MatPreallocateSet(row,ncols,cols,dnz,onz);CHKERRQ(ierr);}
952 
953 
954 /*MC
955    MatPreallocateFinalize - Ends the block of code that will count the number of nonzeros per
956        row in a matrix providing the data that one can use to correctly preallocate the matrix.
957 
958    Synopsis:
959    PetscErrorCode MatPreallocateFinalize(PetscInt *dnz, PetscInt *onz)
960 
961    Collective on MPI_Comm
962 
963    Input Parameters:
964 +  dnz - the array that was be passed to the matrix preallocation routines
965 -  ozn - the other array passed to the matrix preallocation routines
966 
967 
968    Level: intermediate
969 
970    Notes:
971     See the <A href="../../docs/manual.pdf#nameddest=ch_performance">Hints for Performance Improvment</A> chapter in the users manual for more details.
972 
973    Do not malloc or free dnz and onz that is handled internally by these routines
974 
975    This is a MACRO not a function because it closes the { started in MatPreallocateInitialize().
976 
977   Concepts: preallocation^Matrix
978 
979 .seealso: MatPreallocateInitialize(), MatPreallocateSet(), MatPreallocateSymmetricSet(), MatPreallocateSetLocal(),
980           MatPreallocateSymmetricInitialize(), MatPreallocateSymmetricSetLocal()
981 M*/
982 #define MatPreallocateFinalize(dnz,onz) 0;_4_ierr = PetscFree2(dnz,onz);CHKERRQ(_4_ierr);}
983 
984 
985 
986 /* Routines unique to particular data structures */
987 extern PetscErrorCode  MatShellGetContext(Mat,void **);
988 PetscPolymorphicFunction(MatShellGetContext,(Mat A),(A,&t),void*,t)
989 
990 extern PetscErrorCode  MatInodeAdjustForInodes(Mat,IS*,IS*);
991 extern PetscErrorCode  MatInodeGetInodeSizes(Mat,PetscInt *,PetscInt *[],PetscInt *);
992 
993 extern PetscErrorCode  MatSeqAIJSetColumnIndices(Mat,PetscInt[]);
994 extern PetscErrorCode  MatSeqBAIJSetColumnIndices(Mat,PetscInt[]);
995 extern PetscErrorCode  MatCreateSeqAIJWithArrays(MPI_Comm,PetscInt,PetscInt,PetscInt[],PetscInt[],PetscScalar[],Mat*);
996 extern PetscErrorCode  MatCreateSeqBAIJWithArrays(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt[],PetscInt[],PetscScalar[],Mat*);
997 extern PetscErrorCode  MatCreateSeqSBAIJWithArrays(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt[],PetscInt[],PetscScalar[],Mat*);
998 
999 #define MAT_SKIP_ALLOCATION -4
1000 
1001 extern PetscErrorCode  MatSeqBAIJSetPreallocation(Mat,PetscInt,PetscInt,const PetscInt[]);
1002 PetscPolymorphicSubroutine(MatSeqBAIJSetPreallocation,(Mat A,PetscInt bs,const PetscInt nnz[]),(A,bs,0,nnz))
1003 extern PetscErrorCode  MatSeqSBAIJSetPreallocation(Mat,PetscInt,PetscInt,const PetscInt[]);
1004 PetscPolymorphicSubroutine(MatSeqSBAIJSetPreallocation,(Mat A,PetscInt bs,const PetscInt nnz[]),(A,bs,0,nnz))
1005 extern PetscErrorCode  MatSeqAIJSetPreallocation(Mat,PetscInt,const PetscInt[]);
1006 PetscPolymorphicSubroutine(MatSeqAIJSetPreallocation,(Mat A,const PetscInt nnz[]),(A,0,nnz))
1007 
1008 extern PetscErrorCode  MatMPIBAIJSetPreallocation(Mat,PetscInt,PetscInt,const PetscInt[],PetscInt,const PetscInt[]);
1009 PetscPolymorphicSubroutine(MatMPIBAIJSetPreallocation,(Mat A,PetscInt bs,const PetscInt nnz[],const PetscInt onz[]),(A,bs,0,nnz,0,onz))
1010 extern PetscErrorCode  MatMPISBAIJSetPreallocation(Mat,PetscInt,PetscInt,const PetscInt[],PetscInt,const PetscInt[]);
1011 extern PetscErrorCode  MatMPIAIJSetPreallocation(Mat,PetscInt,const PetscInt[],PetscInt,const PetscInt[]);
1012 extern PetscErrorCode  MatSeqAIJSetPreallocationCSR(Mat,const PetscInt [],const PetscInt [],const PetscScalar []);
1013 extern PetscErrorCode  MatSeqBAIJSetPreallocationCSR(Mat,PetscInt,const PetscInt[],const PetscInt[],const PetscScalar[]);
1014 extern PetscErrorCode  MatMPIAIJSetPreallocationCSR(Mat,const PetscInt[],const PetscInt[],const PetscScalar[]);
1015 extern PetscErrorCode  MatMPIBAIJSetPreallocationCSR(Mat,PetscInt,const PetscInt[],const PetscInt[],const PetscScalar[]);
1016 extern PetscErrorCode  MatMPIAdjSetPreallocation(Mat,PetscInt[],PetscInt[],PetscInt[]);
1017 extern PetscErrorCode  MatMPIDenseSetPreallocation(Mat,PetscScalar[]);
1018 extern PetscErrorCode  MatSeqDenseSetPreallocation(Mat,PetscScalar[]);
1019 extern PetscErrorCode  MatMPIAIJGetSeqAIJ(Mat,Mat*,Mat*,PetscInt*[]);
1020 extern PetscErrorCode  MatMPIBAIJGetSeqBAIJ(Mat,Mat*,Mat*,PetscInt*[]);
1021 extern PetscErrorCode  MatAdicSetLocalFunction(Mat,void (*)(void));
1022 
1023 extern PetscErrorCode  MatSeqDenseSetLDA(Mat,PetscInt);
1024 extern PetscErrorCode  MatDenseGetLocalMatrix(Mat,Mat*);
1025 
1026 extern PetscErrorCode  MatStoreValues(Mat);
1027 extern PetscErrorCode  MatRetrieveValues(Mat);
1028 
1029 extern PetscErrorCode  MatDAADSetCtx(Mat,void*);
1030 
1031 extern PetscErrorCode  MatFindNonzeroRows(Mat,IS*);
1032 /*
1033   These routines are not usually accessed directly, rather solving is
1034   done through the KSP and PC interfaces.
1035 */
1036 
1037 /*E
1038     MatOrderingType - String with the name of a PETSc matrix ordering or the creation function
1039        with an optional dynamic library name, for example
1040        http://www.mcs.anl.gov/petsc/lib.a:orderingcreate()
1041 
1042    Level: beginner
1043 
1044    Cannot use const because the PC objects manipulate the string
1045 
1046 .seealso: MatGetOrdering()
1047 E*/
1048 #define MatOrderingType char*
1049 #define MATORDERINGNATURAL     "natural"
1050 #define MATORDERINGND          "nd"
1051 #define MATORDERING1WD         "1wd"
1052 #define MATORDERINGRCM         "rcm"
1053 #define MATORDERINGQMD         "qmd"
1054 #define MATORDERINGROWLENGTH   "rowlength"
1055 #define MATORDERINGDSC_ND      "dsc_nd"         /* these three are only for DSCPACK, see its documentation for details */
1056 #define MATORDERINGDSC_MMD     "dsc_mmd"
1057 #define MATORDERINGDSC_MDF     "dsc_mdf"
1058 #define MATORDERINGAMD         "amd"            /* only works if UMFPACK is installed with PETSc */
1059 
1060 extern PetscErrorCode  MatGetOrdering(Mat,const MatOrderingType,IS*,IS*);
1061 extern PetscErrorCode  MatGetOrderingList(PetscFList *list);
1062 extern PetscErrorCode  MatOrderingRegister(const char[],const char[],const char[],PetscErrorCode(*)(Mat,const MatOrderingType,IS*,IS*));
1063 
1064 /*MC
1065    MatOrderingRegisterDynamic - Adds a new sparse matrix ordering to the matrix package.
1066 
1067    Synopsis:
1068    PetscErrorCode MatOrderingRegisterDynamic(const char *name_ordering,const char *path,const char *name_create,PetscErrorCode (*routine_create)(MatOrdering))
1069 
1070    Not Collective
1071 
1072    Input Parameters:
1073 +  sname - name of ordering (for example MATORDERINGND)
1074 .  path - location of library where creation routine is
1075 .  name - name of function that creates the ordering type,a string
1076 -  function - function pointer that creates the ordering
1077 
1078    Level: developer
1079 
1080    If dynamic libraries are used, then the fourth input argument (function)
1081    is ignored.
1082 
1083    Sample usage:
1084 .vb
1085    MatOrderingRegisterDynamic("my_order",/home/username/my_lib/lib/libO/solaris/mylib.a,
1086                "MyOrder",MyOrder);
1087 .ve
1088 
1089    Then, your partitioner can be chosen with the procedural interface via
1090 $     MatOrderingSetType(part,"my_order)
1091    or at runtime via the option
1092 $     -pc_factor_mat_ordering_type my_order
1093 
1094    ${PETSC_ARCH} occuring in pathname will be replaced with appropriate values.
1095 
1096 .keywords: matrix, ordering, register
1097 
1098 .seealso: MatOrderingRegisterDestroy(), MatOrderingRegisterAll()
1099 M*/
1100 #if defined(PETSC_USE_DYNAMIC_LIBRARIES)
1101 #define MatOrderingRegisterDynamic(a,b,c,d) MatOrderingRegister(a,b,c,0)
1102 #else
1103 #define MatOrderingRegisterDynamic(a,b,c,d) MatOrderingRegister(a,b,c,d)
1104 #endif
1105 
1106 extern PetscErrorCode  MatOrderingRegisterDestroy(void);
1107 extern PetscErrorCode  MatOrderingRegisterAll(const char[]);
1108 extern PetscBool  MatOrderingRegisterAllCalled;
1109 extern PetscFList MatOrderingList;
1110 
1111 extern PetscErrorCode  MatReorderForNonzeroDiagonal(Mat,PetscReal,IS,IS);
1112 
1113 /*S
1114     MatFactorShiftType - Numeric Shift.
1115 
1116    Level: beginner
1117 
1118 S*/
1119 typedef enum {MAT_SHIFT_NONE,MAT_SHIFT_NONZERO,MAT_SHIFT_POSITIVE_DEFINITE,MAT_SHIFT_INBLOCKS} MatFactorShiftType;
1120 extern const char *MatFactorShiftTypes[];
1121 
1122 /*S
1123    MatFactorInfo - Data passed into the matrix factorization routines
1124 
1125    In Fortran these are simply double precision arrays of size MAT_FACTORINFO_SIZE, that is use
1126 $     MatFactorInfo  info(MAT_FACTORINFO_SIZE)
1127 
1128    Notes: These are not usually directly used by users, instead use PC type of LU, ILU, CHOLESKY or ICC.
1129 
1130       You can use MatFactorInfoInitialize() to set default values.
1131 
1132    Level: developer
1133 
1134 .seealso: MatLUFactorSymbolic(), MatILUFactorSymbolic(), MatCholeskyFactorSymbolic(), MatICCFactorSymbolic(), MatICCFactor(),
1135           MatFactorInfoInitialize()
1136 
1137 S*/
1138 typedef struct {
1139   PetscReal     diagonal_fill;  /* force diagonal to fill in if initially not filled */
1140   PetscReal     usedt;
1141   PetscReal     dt;             /* drop tolerance */
1142   PetscReal     dtcol;          /* tolerance for pivoting */
1143   PetscReal     dtcount;        /* maximum nonzeros to be allowed per row */
1144   PetscReal     fill;           /* expected fill, nonzeros in factored matrix/nonzeros in original matrix */
1145   PetscReal     levels;         /* ICC/ILU(levels) */
1146   PetscReal     pivotinblocks;  /* for BAIJ and SBAIJ matrices pivot in factorization on blocks, default 1.0
1147                                    factorization may be faster if do not pivot */
1148   PetscReal     zeropivot;      /* pivot is called zero if less than this */
1149   PetscReal     shifttype;      /* type of shift added to matrix factor to prevent zero pivots */
1150   PetscReal     shiftamount;     /* how large the shift is */
1151 } MatFactorInfo;
1152 
1153 extern PetscErrorCode  MatFactorInfoInitialize(MatFactorInfo*);
1154 extern PetscErrorCode  MatCholeskyFactor(Mat,IS,const MatFactorInfo*);
1155 extern PetscErrorCode  MatCholeskyFactorSymbolic(Mat,Mat,IS,const MatFactorInfo*);
1156 extern PetscErrorCode  MatCholeskyFactorNumeric(Mat,Mat,const MatFactorInfo*);
1157 extern PetscErrorCode  MatLUFactor(Mat,IS,IS,const MatFactorInfo*);
1158 extern PetscErrorCode  MatILUFactor(Mat,IS,IS,const MatFactorInfo*);
1159 extern PetscErrorCode  MatLUFactorSymbolic(Mat,Mat,IS,IS,const MatFactorInfo*);
1160 extern PetscErrorCode  MatILUFactorSymbolic(Mat,Mat,IS,IS,const MatFactorInfo*);
1161 extern PetscErrorCode  MatICCFactorSymbolic(Mat,Mat,IS,const MatFactorInfo*);
1162 extern PetscErrorCode  MatICCFactor(Mat,IS,const MatFactorInfo*);
1163 extern PetscErrorCode  MatLUFactorNumeric(Mat,Mat,const MatFactorInfo*);
1164 extern PetscErrorCode  MatGetInertia(Mat,PetscInt*,PetscInt*,PetscInt*);
1165 extern PetscErrorCode  MatSolve(Mat,Vec,Vec);
1166 extern PetscErrorCode  MatForwardSolve(Mat,Vec,Vec);
1167 extern PetscErrorCode  MatBackwardSolve(Mat,Vec,Vec);
1168 extern PetscErrorCode  MatSolveAdd(Mat,Vec,Vec,Vec);
1169 extern PetscErrorCode  MatSolveTranspose(Mat,Vec,Vec);
1170 extern PetscErrorCode  MatSolveTransposeAdd(Mat,Vec,Vec,Vec);
1171 extern PetscErrorCode  MatSolves(Mat,Vecs,Vecs);
1172 
1173 extern PetscErrorCode  MatSetUnfactored(Mat);
1174 
1175 /*E
1176     MatSORType - What type of (S)SOR to perform
1177 
1178     Level: beginner
1179 
1180    May be bitwise ORd together
1181 
1182    Any additions/changes here MUST also be made in include/finclude/petscmat.h
1183 
1184    MatSORType may be bitwise ORd together, so do not change the numbers
1185 
1186 .seealso: MatSOR()
1187 E*/
1188 typedef enum {SOR_FORWARD_SWEEP=1,SOR_BACKWARD_SWEEP=2,SOR_SYMMETRIC_SWEEP=3,
1189               SOR_LOCAL_FORWARD_SWEEP=4,SOR_LOCAL_BACKWARD_SWEEP=8,
1190               SOR_LOCAL_SYMMETRIC_SWEEP=12,SOR_ZERO_INITIAL_GUESS=16,
1191               SOR_EISENSTAT=32,SOR_APPLY_UPPER=64,SOR_APPLY_LOWER=128} MatSORType;
1192 extern PetscErrorCode  MatSOR(Mat,Vec,PetscReal,MatSORType,PetscReal,PetscInt,PetscInt,Vec);
1193 
1194 /*
1195     These routines are for efficiently computing Jacobians via finite differences.
1196 */
1197 
1198 /*E
1199     MatColoringType - String with the name of a PETSc matrix coloring or the creation function
1200        with an optional dynamic library name, for example
1201        http://www.mcs.anl.gov/petsc/lib.a:coloringcreate()
1202 
1203    Level: beginner
1204 
1205 .seealso: MatGetColoring()
1206 E*/
1207 #define MatColoringType char*
1208 #define MATCOLORINGNATURAL "natural"
1209 #define MATCOLORINGSL      "sl"
1210 #define MATCOLORINGLF      "lf"
1211 #define MATCOLORINGID      "id"
1212 
1213 extern PetscErrorCode  MatGetColoring(Mat,const MatColoringType,ISColoring*);
1214 extern PetscErrorCode  MatColoringRegister(const char[],const char[],const char[],PetscErrorCode(*)(Mat,MatColoringType,ISColoring *));
1215 
1216 /*MC
1217    MatColoringRegisterDynamic - Adds a new sparse matrix coloring to the
1218                                matrix package.
1219 
1220    Synopsis:
1221    PetscErrorCode MatColoringRegisterDynamic(const char *name_coloring,const char *path,const char *name_create,PetscErrorCode (*routine_create)(MatColoring))
1222 
1223    Not Collective
1224 
1225    Input Parameters:
1226 +  sname - name of Coloring (for example MATCOLORINGSL)
1227 .  path - location of library where creation routine is
1228 .  name - name of function that creates the Coloring type, a string
1229 -  function - function pointer that creates the coloring
1230 
1231    Level: developer
1232 
1233    If dynamic libraries are used, then the fourth input argument (function)
1234    is ignored.
1235 
1236    Sample usage:
1237 .vb
1238    MatColoringRegisterDynamic("my_color",/home/username/my_lib/lib/libO/solaris/mylib.a,
1239                "MyColor",MyColor);
1240 .ve
1241 
1242    Then, your partitioner can be chosen with the procedural interface via
1243 $     MatColoringSetType(part,"my_color")
1244    or at runtime via the option
1245 $     -mat_coloring_type my_color
1246 
1247    $PETSC_ARCH occuring in pathname will be replaced with appropriate values.
1248 
1249 .keywords: matrix, Coloring, register
1250 
1251 .seealso: MatColoringRegisterDestroy(), MatColoringRegisterAll()
1252 M*/
1253 #if defined(PETSC_USE_DYNAMIC_LIBRARIES)
1254 #define MatColoringRegisterDynamic(a,b,c,d) MatColoringRegister(a,b,c,0)
1255 #else
1256 #define MatColoringRegisterDynamic(a,b,c,d) MatColoringRegister(a,b,c,d)
1257 #endif
1258 
1259 extern PetscBool  MatColoringRegisterAllCalled;
1260 
1261 extern PetscErrorCode  MatColoringRegisterAll(const char[]);
1262 extern PetscErrorCode  MatColoringRegisterDestroy(void);
1263 extern PetscErrorCode  MatColoringPatch(Mat,PetscInt,PetscInt,ISColoringValue[],ISColoring*);
1264 
1265 /*S
1266      MatFDColoring - Object for computing a sparse Jacobian via finite differences
1267         and coloring
1268 
1269    Level: beginner
1270 
1271   Concepts: coloring, sparse Jacobian, finite differences
1272 
1273 .seealso:  MatFDColoringCreate()
1274 S*/
1275 typedef struct _p_MatFDColoring* MatFDColoring;
1276 
1277 extern PetscErrorCode  MatFDColoringCreate(Mat,ISColoring,MatFDColoring *);
1278 extern PetscErrorCode  MatFDColoringDestroy(MatFDColoring);
1279 extern PetscErrorCode  MatFDColoringView(MatFDColoring,PetscViewer);
1280 extern PetscErrorCode  MatFDColoringSetFunction(MatFDColoring,PetscErrorCode (*)(void),void*);
1281 extern PetscErrorCode  MatFDColoringGetFunction(MatFDColoring,PetscErrorCode (**)(void),void**);
1282 extern PetscErrorCode  MatFDColoringSetParameters(MatFDColoring,PetscReal,PetscReal);
1283 extern PetscErrorCode  MatFDColoringSetFromOptions(MatFDColoring);
1284 extern PetscErrorCode  MatFDColoringApply(Mat,MatFDColoring,Vec,MatStructure*,void *);
1285 extern PetscErrorCode  MatFDColoringApplyTS(Mat,MatFDColoring,PetscReal,Vec,MatStructure*,void *);
1286 extern PetscErrorCode  MatFDColoringSetF(MatFDColoring,Vec);
1287 extern PetscErrorCode  MatFDColoringGetPerturbedColumns(MatFDColoring,PetscInt*,PetscInt*[]);
1288 /*
1289     These routines are for partitioning matrices: currently used only
1290   for adjacency matrix, MatCreateMPIAdj().
1291 */
1292 
1293 /*S
1294      MatPartitioning - Object for managing the partitioning of a matrix or graph
1295 
1296    Level: beginner
1297 
1298   Concepts: partitioning
1299 
1300 .seealso:  MatPartitioningCreate(), MatPartitioningType
1301 S*/
1302 typedef struct _p_MatPartitioning* MatPartitioning;
1303 
1304 /*E
1305     MatPartitioningType - String with the name of a PETSc matrix partitioning or the creation function
1306        with an optional dynamic library name, for example
1307        http://www.mcs.anl.gov/petsc/lib.a:partitioningcreate()
1308 
1309    Level: beginner
1310 
1311 .seealso: MatPartitioningCreate(), MatPartitioning
1312 E*/
1313 #define MatPartitioningType char*
1314 #define MATPARTITIONINGCURRENT  "current"
1315 #define MATPARTITIONINGSQUARE   "square"
1316 #define MATPARTITIONINGPARMETIS "parmetis"
1317 #define MATPARTITIONINGCHACO    "chaco"
1318 #define MATPARTITIONINGJOSTLE   "jostle"
1319 #define MATPARTITIONINGPARTY    "party"
1320 #define MATPARTITIONINGSCOTCH   "scotch"
1321 
1322 
1323 extern PetscErrorCode  MatPartitioningCreate(MPI_Comm,MatPartitioning*);
1324 extern PetscErrorCode  MatPartitioningSetType(MatPartitioning,const MatPartitioningType);
1325 extern PetscErrorCode  MatPartitioningSetNParts(MatPartitioning,PetscInt);
1326 extern PetscErrorCode  MatPartitioningSetAdjacency(MatPartitioning,Mat);
1327 extern PetscErrorCode  MatPartitioningSetVertexWeights(MatPartitioning,const PetscInt[]);
1328 extern PetscErrorCode  MatPartitioningSetPartitionWeights(MatPartitioning,const PetscReal []);
1329 extern PetscErrorCode  MatPartitioningApply(MatPartitioning,IS*);
1330 extern PetscErrorCode  MatPartitioningDestroy(MatPartitioning);
1331 
1332 extern PetscErrorCode  MatPartitioningRegister(const char[],const char[],const char[],PetscErrorCode (*)(MatPartitioning));
1333 
1334 /*MC
1335    MatPartitioningRegisterDynamic - Adds a new sparse matrix partitioning to the
1336    matrix package.
1337 
1338    Synopsis:
1339    PetscErrorCode MatPartitioningRegisterDynamic(const char *name_partitioning,const char *path,const char *name_create,PetscErrorCode (*routine_create)(MatPartitioning))
1340 
1341    Not Collective
1342 
1343    Input Parameters:
1344 +  sname - name of partitioning (for example MATPARTITIONINGCURRENT) or parmetis
1345 .  path - location of library where creation routine is
1346 .  name - name of function that creates the partitioning type, a string
1347 -  function - function pointer that creates the partitioning type
1348 
1349    Level: developer
1350 
1351    If dynamic libraries are used, then the fourth input argument (function)
1352    is ignored.
1353 
1354    Sample usage:
1355 .vb
1356    MatPartitioningRegisterDynamic("my_part",/home/username/my_lib/lib/libO/solaris/mylib.a,
1357                "MyPartCreate",MyPartCreate);
1358 .ve
1359 
1360    Then, your partitioner can be chosen with the procedural interface via
1361 $     MatPartitioningSetType(part,"my_part")
1362    or at runtime via the option
1363 $     -mat_partitioning_type my_part
1364 
1365    $PETSC_ARCH occuring in pathname will be replaced with appropriate values.
1366 
1367 .keywords: matrix, partitioning, register
1368 
1369 .seealso: MatPartitioningRegisterDestroy(), MatPartitioningRegisterAll()
1370 M*/
1371 #if defined(PETSC_USE_DYNAMIC_LIBRARIES)
1372 #define MatPartitioningRegisterDynamic(a,b,c,d) MatPartitioningRegister(a,b,c,0)
1373 #else
1374 #define MatPartitioningRegisterDynamic(a,b,c,d) MatPartitioningRegister(a,b,c,d)
1375 #endif
1376 
1377 extern PetscBool  MatPartitioningRegisterAllCalled;
1378 
1379 extern PetscErrorCode  MatPartitioningRegisterAll(const char[]);
1380 extern PetscErrorCode  MatPartitioningRegisterDestroy(void);
1381 
1382 extern PetscErrorCode  MatPartitioningView(MatPartitioning,PetscViewer);
1383 extern PetscErrorCode  MatPartitioningSetFromOptions(MatPartitioning);
1384 extern PetscErrorCode  MatPartitioningGetType(MatPartitioning,const MatPartitioningType*);
1385 
1386 extern PetscErrorCode  MatPartitioningParmetisSetCoarseSequential(MatPartitioning);
1387 extern PetscErrorCode  MatPartitioningParmetisGetEdgeCut(MatPartitioning, PetscInt *);
1388 
1389 extern PetscErrorCode  MatPartitioningJostleSetCoarseLevel(MatPartitioning,PetscReal);
1390 extern PetscErrorCode  MatPartitioningJostleSetCoarseSequential(MatPartitioning);
1391 
1392 typedef enum {MP_CHACO_MULTILEVEL_KL,MP_CHACO_SPECTRAL,MP_CHACO_LINEAR,MP_CHACO_RANDOM, MP_CHACO_SCATTERED} MPChacoGlobalType;
1393 extern PetscErrorCode  MatPartitioningChacoSetGlobal(MatPartitioning, MPChacoGlobalType);
1394 typedef enum { MP_CHACO_KERNIGHAN_LIN, MP_CHACO_NONE } MPChacoLocalType;
1395 extern PetscErrorCode  MatPartitioningChacoSetLocal(MatPartitioning, MPChacoLocalType);
1396 extern PetscErrorCode  MatPartitioningChacoSetCoarseLevel(MatPartitioning,PetscReal);
1397 typedef enum { MP_CHACO_LANCZOS, MP_CHACO_RQI_SYMMLQ } MPChacoEigenType;
1398 extern PetscErrorCode  MatPartitioningChacoSetEigenSolver(MatPartitioning,MPChacoEigenType);
1399 extern PetscErrorCode  MatPartitioningChacoSetEigenTol(MatPartitioning, PetscReal);
1400 extern PetscErrorCode  MatPartitioningChacoSetEigenNumber(MatPartitioning, PetscInt);
1401 
1402 #define MP_PARTY_OPT "opt"
1403 #define MP_PARTY_LIN "lin"
1404 #define MP_PARTY_SCA "sca"
1405 #define MP_PARTY_RAN "ran"
1406 #define MP_PARTY_GBF "gbf"
1407 #define MP_PARTY_GCF "gcf"
1408 #define MP_PARTY_BUB "bub"
1409 #define MP_PARTY_DEF "def"
1410 extern PetscErrorCode  MatPartitioningPartySetGlobal(MatPartitioning, const char*);
1411 #define MP_PARTY_HELPFUL_SETS "hs"
1412 #define MP_PARTY_KERNIGHAN_LIN "kl"
1413 #define MP_PARTY_NONE "no"
1414 extern PetscErrorCode  MatPartitioningPartySetLocal(MatPartitioning, const char*);
1415 extern PetscErrorCode  MatPartitioningPartySetCoarseLevel(MatPartitioning,PetscReal);
1416 extern PetscErrorCode  MatPartitioningPartySetBipart(MatPartitioning,PetscBool );
1417 extern PetscErrorCode  MatPartitioningPartySetMatchOptimization(MatPartitioning,PetscBool );
1418 
1419 typedef enum { MP_SCOTCH_GREEDY, MP_SCOTCH_GPS, MP_SCOTCH_GR_GPS } MPScotchGlobalType;
1420 extern PetscErrorCode  MatPartitioningScotchSetArch(MatPartitioning,const char*);
1421 extern PetscErrorCode  MatPartitioningScotchSetMultilevel(MatPartitioning);
1422 extern PetscErrorCode  MatPartitioningScotchSetGlobal(MatPartitioning,MPScotchGlobalType);
1423 extern PetscErrorCode  MatPartitioningScotchSetCoarseLevel(MatPartitioning,PetscReal);
1424 extern PetscErrorCode  MatPartitioningScotchSetHostList(MatPartitioning,const char*);
1425 typedef enum { MP_SCOTCH_KERNIGHAN_LIN, MP_SCOTCH_NONE } MPScotchLocalType;
1426 extern PetscErrorCode  MatPartitioningScotchSetLocal(MatPartitioning,MPScotchLocalType);
1427 extern PetscErrorCode  MatPartitioningScotchSetMapping(MatPartitioning);
1428 extern PetscErrorCode  MatPartitioningScotchSetStrategy(MatPartitioning,char*);
1429 
1430 extern PetscErrorCode MatMeshToVertexGraph(Mat,PetscInt,Mat*);
1431 extern PetscErrorCode MatMeshToCellGraph(Mat,PetscInt,Mat*);
1432 
1433 /*
1434     If you add entries here you must also add them to finclude/petscmat.h
1435 */
1436 typedef enum { MATOP_SET_VALUES=0,
1437                MATOP_GET_ROW=1,
1438                MATOP_RESTORE_ROW=2,
1439                MATOP_MULT=3,
1440                MATOP_MULT_ADD=4,
1441                MATOP_MULT_TRANSPOSE=5,
1442                MATOP_MULT_TRANSPOSE_ADD=6,
1443                MATOP_SOLVE=7,
1444                MATOP_SOLVE_ADD=8,
1445                MATOP_SOLVE_TRANSPOSE=9,
1446                MATOP_SOLVE_TRANSPOSE_ADD=10,
1447                MATOP_LUFACTOR=11,
1448                MATOP_CHOLESKYFACTOR=12,
1449                MATOP_SOR=13,
1450                MATOP_TRANSPOSE=14,
1451                MATOP_GETINFO=15,
1452                MATOP_EQUAL=16,
1453                MATOP_GET_DIAGONAL=17,
1454                MATOP_DIAGONAL_SCALE=18,
1455                MATOP_NORM=19,
1456                MATOP_ASSEMBLY_BEGIN=20,
1457                MATOP_ASSEMBLY_END=21,
1458                MATOP_SET_OPTION=22,
1459                MATOP_ZERO_ENTRIES=23,
1460                MATOP_ZERO_ROWS=24,
1461                MATOP_LUFACTOR_SYMBOLIC=25,
1462                MATOP_LUFACTOR_NUMERIC=26,
1463                MATOP_CHOLESKY_FACTOR_SYMBOLIC=27,
1464                MATOP_CHOLESKY_FACTOR_NUMERIC=28,
1465                MATOP_SETUP_PREALLOCATION=29,
1466                MATOP_ILUFACTOR_SYMBOLIC=30,
1467                MATOP_ICCFACTOR_SYMBOLIC=31,
1468                MATOP_GET_ARRAY=32,
1469                MATOP_RESTORE_ARRAY=33,
1470                MATOP_DUPLICATE=34,
1471                MATOP_FORWARD_SOLVE=35,
1472                MATOP_BACKWARD_SOLVE=36,
1473                MATOP_ILUFACTOR=37,
1474                MATOP_ICCFACTOR=38,
1475                MATOP_AXPY=39,
1476                MATOP_GET_SUBMATRICES=40,
1477                MATOP_INCREASE_OVERLAP=41,
1478                MATOP_GET_VALUES=42,
1479                MATOP_COPY=43,
1480                MATOP_GET_ROW_MAX=44,
1481                MATOP_SCALE=45,
1482                MATOP_SHIFT=46,
1483                MATOP_DIAGONAL_SET=47,
1484                MATOP_ILUDT_FACTOR=48,
1485                MATOP_SET_BLOCK_SIZE=49,
1486                MATOP_GET_ROW_IJ=50,
1487                MATOP_RESTORE_ROW_IJ=51,
1488                MATOP_GET_COLUMN_IJ=52,
1489                MATOP_RESTORE_COLUMN_IJ=53,
1490                MATOP_FDCOLORING_CREATE=54,
1491                MATOP_COLORING_PATCH=55,
1492                MATOP_SET_UNFACTORED=56,
1493                MATOP_PERMUTE=57,
1494                MATOP_SET_VALUES_BLOCKED=58,
1495                MATOP_GET_SUBMATRIX=59,
1496                MATOP_DESTROY=60,
1497                MATOP_VIEW=61,
1498                MATOP_CONVERT_FROM=62,
1499                MATOP_USE_SCALED_FORM=63,
1500                MATOP_SCALE_SYSTEM=64,
1501                MATOP_UNSCALE_SYSTEM=65,
1502                MATOP_SET_LOCAL_TO_GLOBAL_MAP=66,
1503                MATOP_SET_VALUES_LOCAL=67,
1504                MATOP_ZERO_ROWS_LOCAL=68,
1505                MATOP_GET_ROW_MAX_ABS=69,
1506                MATOP_GET_ROW_MIN_ABS=70,
1507                MATOP_CONVERT=71,
1508                MATOP_SET_COLORING=72,
1509                MATOP_SET_VALUES_ADIC=73,
1510                MATOP_SET_VALUES_ADIFOR=74,
1511                MATOP_FD_COLORING_APPLY=75,
1512                MATOP_SET_FROM_OPTIONS=76,
1513                MATOP_MULT_CON=77,
1514                MATOP_MULT_TRANSPOSE_CON=78,
1515                MATOP_PERMUTE_SPARSIFY=79,
1516                MATOP_MULT_MULTIPLE=80,
1517                MATOP_SOLVE_MULTIPLE=81,
1518                MATOP_GET_INERTIA=82,
1519                MATOP_LOAD=83,
1520                MATOP_IS_SYMMETRIC=84,
1521                MATOP_IS_HERMITIAN=85,
1522                MATOP_IS_STRUCTURALLY_SYMMETRIC=86,
1523                MATOP_DUMMY=87,
1524                MATOP_GET_VECS=88,
1525                MATOP_MAT_MULT=89,
1526                MATOP_MAT_MULT_SYMBOLIC=90,
1527                MATOP_MAT_MULT_NUMERIC=91,
1528                MATOP_PTAP=92,
1529                MATOP_PTAP_SYMBOLIC=93,
1530                MATOP_PTAP_NUMERIC=94,
1531                MATOP_MAT_MULTTRANSPOSE=95,
1532                MATOP_MAT_MULTTRANSPOSE_SYM=96,
1533                MATOP_MAT_MULTTRANSPOSE_NUM=97,
1534                MATOP_PTAP_SYMBOLIC_SEQAIJ=98,
1535                MATOP_PTAP_NUMERIC_SEQAIJ=99,
1536                MATOP_PTAP_SYMBOLIC_MPIAIJ=100,
1537                MATOP_PTAP_NUMERIC_MPIAIJ=101,
1538                MATOP_CONJUGATE=102,
1539                MATOP_SET_SIZES=103,
1540                MATOP_SET_VALUES_ROW=104,
1541                MATOP_REAL_PART=105,
1542                MATOP_IMAG_PART=106,
1543                MATOP_GET_ROW_UTRIANGULAR=107,
1544                MATOP_RESTORE_ROW_UTRIANGULAR=108,
1545                MATOP_MATSOLVE=109,
1546                MATOP_GET_REDUNDANTMATRIX=110,
1547                MATOP_GET_ROW_MIN=111,
1548                MATOP_GET_COLUMN_VEC=112,
1549                MATOP_MISSING_DIAGONAL=113,
1550                MATOP_MATGETSEQNONZEROSTRUCTURE=114,
1551                MATOP_CREATE=115,
1552                MATOP_GET_GHOSTS=116,
1553                MATOP_GET_LOCALSUBMATRIX=117,
1554                MATOP_RESTORE_LOCALSUBMATRIX=118,
1555                MATOP_MULT_DIAGONAL_BLOCK=119,
1556                MATOP_HERMITIANTRANSPOSE=120,
1557                MATOP_MULTHERMITIANTRANSPOSE=121,
1558                MATOP_MULTHERMITIANTRANSPOSEADD=122,
1559                MATOP_GETMULTIPROCBLOCK=123,
1560 	       MATOP_GET_SUBMATRICES_PARALLEL=128
1561              } MatOperation;
1562 extern PetscErrorCode  MatHasOperation(Mat,MatOperation,PetscBool *);
1563 extern PetscErrorCode  MatShellSetOperation(Mat,MatOperation,void(*)(void));
1564 extern PetscErrorCode  MatShellGetOperation(Mat,MatOperation,void(**)(void));
1565 extern PetscErrorCode  MatShellSetContext(Mat,void*);
1566 
1567 /*
1568    Codes for matrices stored on disk. By default they are
1569    stored in a universal format. By changing the format with
1570    PetscViewerSetFormat(viewer,PETSC_VIEWER_NATIVE); the matrices will
1571    be stored in a way natural for the matrix, for example dense matrices
1572    would be stored as dense. Matrices stored this way may only be
1573    read into matrices of the same type.
1574 */
1575 #define MATRIX_BINARY_FORMAT_DENSE -1
1576 
1577 extern PetscErrorCode  MatMPIBAIJSetHashTableFactor(Mat,PetscReal);
1578 extern PetscErrorCode  MatISGetLocalMat(Mat,Mat*);
1579 
1580 /*S
1581      MatNullSpace - Object that removes a null space from a vector, i.e.
1582          orthogonalizes the vector to a subsapce
1583 
1584    Level: advanced
1585 
1586   Concepts: matrix; linear operator, null space
1587 
1588   Users manual sections:
1589 .   sec_singular
1590 
1591 .seealso:  MatNullSpaceCreate()
1592 S*/
1593 typedef struct _p_MatNullSpace* MatNullSpace;
1594 
1595 extern PetscErrorCode  MatNullSpaceCreate(MPI_Comm,PetscBool ,PetscInt,const Vec[],MatNullSpace*);
1596 extern PetscErrorCode  MatNullSpaceSetFunction(MatNullSpace,PetscErrorCode (*)(MatNullSpace,Vec,void*),void*);
1597 extern PetscErrorCode  MatNullSpaceDestroy(MatNullSpace);
1598 extern PetscErrorCode  MatNullSpaceRemove(MatNullSpace,Vec,Vec*);
1599 extern PetscErrorCode  MatNullSpaceAttach(Mat,MatNullSpace);
1600 extern PetscErrorCode  MatNullSpaceTest(MatNullSpace,Mat,PetscBool  *);
1601 
1602 extern PetscErrorCode  MatReorderingSeqSBAIJ(Mat,IS);
1603 extern PetscErrorCode  MatMPISBAIJSetHashTableFactor(Mat,PetscReal);
1604 extern PetscErrorCode  MatSeqSBAIJSetColumnIndices(Mat,PetscInt *);
1605 extern PetscErrorCode  MatSeqBAIJInvertBlockDiagonal(Mat);
1606 
1607 extern PetscErrorCode  MatCreateMAIJ(Mat,PetscInt,Mat*);
1608 extern PetscErrorCode  MatMAIJRedimension(Mat,PetscInt,Mat*);
1609 extern PetscErrorCode  MatMAIJGetAIJ(Mat,Mat*);
1610 
1611 extern PetscErrorCode  MatComputeExplicitOperator(Mat,Mat*);
1612 
1613 extern PetscErrorCode  MatDiagonalScaleLocal(Mat,Vec);
1614 
1615 extern PetscErrorCode  MatCreateMFFD(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,Mat*);
1616 extern PetscErrorCode  MatMFFDSetBase(Mat,Vec,Vec);
1617 extern PetscErrorCode  MatMFFDSetFunction(Mat,PetscErrorCode(*)(void*,Vec,Vec),void*);
1618 extern PetscErrorCode  MatMFFDSetFunctioni(Mat,PetscErrorCode (*)(void*,PetscInt,Vec,PetscScalar*));
1619 extern PetscErrorCode  MatMFFDSetFunctioniBase(Mat,PetscErrorCode (*)(void*,Vec));
1620 extern PetscErrorCode  MatMFFDAddNullSpace(Mat,MatNullSpace);
1621 extern PetscErrorCode  MatMFFDSetHHistory(Mat,PetscScalar[],PetscInt);
1622 extern PetscErrorCode  MatMFFDResetHHistory(Mat);
1623 extern PetscErrorCode  MatMFFDSetFunctionError(Mat,PetscReal);
1624 extern PetscErrorCode  MatMFFDSetPeriod(Mat,PetscInt);
1625 extern PetscErrorCode  MatMFFDGetH(Mat,PetscScalar *);
1626 extern PetscErrorCode  MatMFFDSetOptionsPrefix(Mat,const char[]);
1627 extern PetscErrorCode  MatMFFDSetFromOptions(Mat);
1628 extern PetscErrorCode  MatMFFDCheckPositivity(void*,Vec,Vec,PetscScalar*);
1629 extern PetscErrorCode  MatMFFDSetCheckh(Mat,PetscErrorCode (*)(void*,Vec,Vec,PetscScalar*),void*);
1630 
1631 /*S
1632     MatMFFD - A data structured used to manage the computation of the h differencing parameter for matrix-free
1633               Jacobian vector products
1634 
1635     Notes: MATMFFD is a specific MatType which uses the MatMFFD data structure
1636 
1637            MatMFFD*() methods actually take the Mat as their first argument. Not a MatMFFD data structure
1638 
1639     Level: developer
1640 
1641 .seealso: MATMFFD, MatCreateMFFD(), MatMFFDSetFuction(), MatMFFDSetType(), MatMFFDRegister()
1642 S*/
1643 typedef struct _p_MatMFFD* MatMFFD;
1644 
1645 /*E
1646     MatMFFDType - algorithm used to compute the h used in computing matrix-vector products via differencing of the function
1647 
1648    Level: beginner
1649 
1650 .seealso: MatMFFDSetType(), MatMFFDRegister()
1651 E*/
1652 #define MatMFFDType char*
1653 #define MATMFFD_DS  "ds"
1654 #define MATMFFD_WP  "wp"
1655 
1656 extern PetscErrorCode  MatMFFDSetType(Mat,const MatMFFDType);
1657 extern PetscErrorCode  MatMFFDRegister(const char[],const char[],const char[],PetscErrorCode (*)(MatMFFD));
1658 
1659 /*MC
1660    MatMFFDRegisterDynamic - Adds a method to the MatMFFD registry.
1661 
1662    Synopsis:
1663    PetscErrorCode MatMFFDRegisterDynamic(const char *name_solver,const char *path,const char *name_create,PetscErrorCode (*routine_create)(MatMFFD))
1664 
1665    Not Collective
1666 
1667    Input Parameters:
1668 +  name_solver - name of a new user-defined compute-h module
1669 .  path - path (either absolute or relative) the library containing this solver
1670 .  name_create - name of routine to create method context
1671 -  routine_create - routine to create method context
1672 
1673    Level: developer
1674 
1675    Notes:
1676    MatMFFDRegisterDynamic() may be called multiple times to add several user-defined solvers.
1677 
1678    If dynamic libraries are used, then the fourth input argument (routine_create)
1679    is ignored.
1680 
1681    Sample usage:
1682 .vb
1683    MatMFFDRegisterDynamic("my_h",/home/username/my_lib/lib/libO/solaris/mylib.a,
1684                "MyHCreate",MyHCreate);
1685 .ve
1686 
1687    Then, your solver can be chosen with the procedural interface via
1688 $     MatMFFDSetType(mfctx,"my_h")
1689    or at runtime via the option
1690 $     -snes_mf_type my_h
1691 
1692 .keywords: MatMFFD, register
1693 
1694 .seealso: MatMFFDRegisterAll(), MatMFFDRegisterDestroy()
1695 M*/
1696 #if defined(PETSC_USE_DYNAMIC_LIBRARIES)
1697 #define MatMFFDRegisterDynamic(a,b,c,d) MatMFFDRegister(a,b,c,0)
1698 #else
1699 #define MatMFFDRegisterDynamic(a,b,c,d) MatMFFDRegister(a,b,c,d)
1700 #endif
1701 
1702 extern PetscErrorCode  MatMFFDRegisterAll(const char[]);
1703 extern PetscErrorCode  MatMFFDRegisterDestroy(void);
1704 extern PetscErrorCode  MatMFFDDSSetUmin(Mat,PetscReal);
1705 extern PetscErrorCode  MatMFFDWPSetComputeNormU(Mat,PetscBool );
1706 
1707 
1708 extern PetscErrorCode  PetscViewerMathematicaPutMatrix(PetscViewer, PetscInt, PetscInt, PetscReal *);
1709 extern PetscErrorCode  PetscViewerMathematicaPutCSRMatrix(PetscViewer, PetscInt, PetscInt, PetscInt *, PetscInt *, PetscReal *);
1710 
1711 /*
1712    PETSc interface to MUMPS
1713 */
1714 #ifdef PETSC_HAVE_MUMPS
1715 extern PetscErrorCode  MatMumpsSetIcntl(Mat,PetscInt,PetscInt);
1716 #endif
1717 
1718 /*
1719    PETSc interface to SUPERLU
1720 */
1721 #ifdef PETSC_HAVE_SUPERLU
1722 extern PetscErrorCode  MatSuperluSetILUDropTol(Mat,PetscReal);
1723 #endif
1724 
1725 extern PetscErrorCode MatCreateNest(MPI_Comm,PetscInt,const IS[],PetscInt,const IS[],const Mat[],Mat*);
1726 extern PetscErrorCode MatNestGetSize(Mat,PetscInt*,PetscInt*);
1727 extern PetscErrorCode MatNestGetSubMats(Mat,PetscInt*,PetscInt*,Mat***);
1728 extern PetscErrorCode MatNestGetSubMat(Mat,PetscInt,PetscInt,Mat*);
1729 extern PetscErrorCode MatNestSetVecType(Mat,const VecType);
1730 extern PetscErrorCode MatNestSetSubMats(Mat,PetscInt,const IS[],PetscInt,const IS[],const Mat[]);
1731 
1732 PETSC_EXTERN_CXX_END
1733 #endif
1734