xref: /petsc/include/petscmat.h (revision 7d0a6c19129e7069c8a40e210b34ed62989173db)
1 /*
2      Include file for the matrix component of PETSc
3 */
4 #ifndef __PETSCMAT_H
5 #define __PETSCMAT_H
6 #include "petscvec.h"
7 PETSC_EXTERN_CXX_BEGIN
8 
9 /*S
10      Mat - Abstract PETSc matrix object
11 
12    Level: beginner
13 
14   Concepts: matrix; linear operator
15 
16 .seealso:  MatCreate(), MatType, MatSetType()
17 S*/
18 typedef struct _p_Mat*           Mat;
19 
20 /*E
21     MatType - String with the name of a PETSc matrix or the creation function
22        with an optional dynamic library name, for example
23        http://www.mcs.anl.gov/petsc/lib.a:mymatcreate()
24 
25    Level: beginner
26 
27 .seealso: MatSetType(), Mat, MatSolverPackage
28 E*/
29 #define MatType char*
30 #define MATSAME            "same"
31 #define MATMAIJ            "maij"
32 #define MATSEQMAIJ           "seqmaij"
33 #define MATMPIMAIJ           "mpimaij"
34 #define MATIS              "is"
35 #define MATAIJ             "aij"
36 #define MATSEQAIJ            "seqaij"
37 #define MATMPIAIJ            "mpiaij"
38 #define MATAIJCRL              "aijcrl"
39 #define MATSEQAIJCRL             "seqaijcrl"
40 #define MATMPIAIJCRL             "mpiaijcrl"
41 #define MATAIJCUSP             "aijcusp"
42 #define MATSEQAIJCUSP            "seqaijcusp"
43 #define MATMPIAIJCUSP            "mpiaijcusp"
44 #define MATAIJPERM             "aijperm"
45 #define MATSEQAIJPERM            "seqaijperm"
46 #define MATMPIAIJPERM            "mpiaijperm"
47 #define MATSHELL           "shell"
48 #define MATDENSE           "dense"
49 #define MATSEQDENSE          "seqdense"
50 #define MATMPIDENSE          "mpidense"
51 #define MATBAIJ            "baij"
52 #define MATSEQBAIJ           "seqbaij"
53 #define MATMPIBAIJ           "mpibaij"
54 #define MATMPIADJ          "mpiadj"
55 #define MATSBAIJ           "sbaij"
56 #define MATSEQSBAIJ          "seqsbaij"
57 #define MATMPISBAIJ          "mpisbaij"
58 #define MATDAAD            "daad"
59 #define MATMFFD            "mffd"
60 #define MATNORMAL          "normal"
61 #define MATLRC             "lrc"
62 #define MATSCATTER         "scatter"
63 #define MATBLOCKMAT        "blockmat"
64 #define MATCOMPOSITE       "composite"
65 #define MATFFT             "fft"
66 #define MATFFTW              "fftw"
67 #define MATSEQCUFFT          "seqcufft"
68 #define MATTRANSPOSEMAT    "transpose"
69 #define MATSCHURCOMPLEMENT "schurcomplement"
70 #define MATPYTHON          "python"
71 #define MATHYPRESTRUCT     "hyprestruct"
72 #define MATHYPRESSTRUCT    "hypresstruct"
73 #define MATSUBMATRIX       "submatrix"
74 #define MATLOCALREF        "localref"
75 #define MATNEST            "nest"
76 
77 /*E
78     MatSolverPackage - String with the name of a PETSc matrix solver type.
79 
80     For example: "petsc" indicates what PETSc provides, "superlu" indicates either
81        SuperLU or SuperLU_Dist etc.
82 
83 
84    Level: beginner
85 
86 .seealso: MatGetFactor(), Mat, MatSetType(), MatType
87 E*/
88 #define MatSolverPackage char*
89 #define MATSOLVERSPOOLES      "spooles"
90 #define MATSOLVERSUPERLU      "superlu"
91 #define MATSOLVERSUPERLU_DIST "superlu_dist"
92 #define MATSOLVERUMFPACK      "umfpack"
93 #define MATSOLVERCHOLMOD      "cholmod"
94 #define MATSOLVERESSL         "essl"
95 #define MATSOLVERLUSOL        "lusol"
96 #define MATSOLVERMUMPS        "mumps"
97 #define MATSOLVERPASTIX       "pastix"
98 #define MATSOLVERDSCPACK      "dscpack"
99 #define MATSOLVERMATLAB       "matlab"
100 #define MATSOLVERPETSC        "petsc"
101 #define MATSOLVERPLAPACK      "plapack"
102 #define MATSOLVERBAS          "bas"
103 
104 /*E
105     MatFactorType - indicates what type of factorization is requested
106 
107     Level: beginner
108 
109    Any additions/changes here MUST also be made in include/finclude/petscmat.h
110 
111 .seealso: MatSolverPackage, MatGetFactor()
112 E*/
113 typedef enum {MAT_FACTOR_NONE, MAT_FACTOR_LU, MAT_FACTOR_CHOLESKY, MAT_FACTOR_ILU, MAT_FACTOR_ICC,MAT_FACTOR_ILUDT} MatFactorType;
114 extern const char *const MatFactorTypes[];
115 
116 extern PetscErrorCode  MatGetFactor(Mat,const MatSolverPackage,MatFactorType,Mat*);
117 extern PetscErrorCode  MatGetFactorAvailable(Mat,const MatSolverPackage,MatFactorType,PetscBool *);
118 extern PetscErrorCode  MatFactorGetSolverPackage(Mat,const MatSolverPackage*);
119 extern PetscErrorCode  MatGetFactorType(Mat,MatFactorType*);
120 
121 /* Logging support */
122 #define    MAT_FILE_CLASSID 1211216    /* used to indicate matrices in binary files */
123 extern PetscClassId  MAT_CLASSID;
124 extern PetscClassId  MAT_FDCOLORING_CLASSID;
125 extern PetscClassId  MAT_PARTITIONING_CLASSID;
126 extern PetscClassId  MAT_NULLSPACE_CLASSID;
127 extern PetscClassId  MATMFFD_CLASSID;
128 
129 /*E
130     MatReuse - Indicates if matrices obtained from a previous call to MatGetSubMatrices()
131      or MatGetSubMatrix() are to be reused to store the new matrix values. For MatConvert() is used to indicate
132      that the input matrix is to be replaced with the converted matrix.
133 
134     Level: beginner
135 
136    Any additions/changes here MUST also be made in include/finclude/petscmat.h
137 
138 .seealso: MatGetSubMatrices(), MatGetSubMatrix(), MatDestroyMatrices(), MatConvert()
139 E*/
140 typedef enum {MAT_INITIAL_MATRIX,MAT_REUSE_MATRIX,MAT_IGNORE_MATRIX} MatReuse;
141 
142 /*E
143     MatGetSubMatrixOption - Indicates if matrices obtained from a call to MatGetSubMatrices()
144      include the matrix values. Currently it is only used by MatGetSeqNonzerostructure().
145 
146     Level: beginner
147 
148 .seealso: MatGetSeqNonzerostructure()
149 E*/
150 typedef enum {MAT_DO_NOT_GET_VALUES,MAT_GET_VALUES} MatGetSubMatrixOption;
151 
152 extern PetscErrorCode  MatInitializePackage(const char[]);
153 
154 extern PetscErrorCode  MatCreate(MPI_Comm,Mat*);
155 PetscPolymorphicFunction(MatCreate,(MPI_Comm comm),(comm,&A),Mat,A)
156 PetscPolymorphicFunction(MatCreate,(),(PETSC_COMM_WORLD,&A),Mat,A)
157 extern PetscErrorCode  MatSetSizes(Mat,PetscInt,PetscInt,PetscInt,PetscInt);
158 extern PetscErrorCode  MatSetType(Mat,const MatType);
159 extern PetscErrorCode  MatSetFromOptions(Mat);
160 extern PetscErrorCode  MatSetUpPreallocation(Mat);
161 extern PetscErrorCode  MatRegisterAll(const char[]);
162 extern PetscErrorCode  MatRegister(const char[],const char[],const char[],PetscErrorCode(*)(Mat));
163 
164 extern PetscErrorCode  MatSetOptionsPrefix(Mat,const char[]);
165 extern PetscErrorCode  MatAppendOptionsPrefix(Mat,const char[]);
166 extern PetscErrorCode  MatGetOptionsPrefix(Mat,const char*[]);
167 
168 /*MC
169    MatRegisterDynamic - Adds a new matrix type
170 
171    Synopsis:
172    PetscErrorCode MatRegisterDynamic(const char *name,const char *path,const char *name_create,PetscErrorCode (*routine_create)(Mat))
173 
174    Not Collective
175 
176    Input Parameters:
177 +  name - name of a new user-defined matrix type
178 .  path - path (either absolute or relative) the library containing this solver
179 .  name_create - name of routine to create method context
180 -  routine_create - routine to create method context
181 
182    Notes:
183    MatRegisterDynamic() may be called multiple times to add several user-defined solvers.
184 
185    If dynamic libraries are used, then the fourth input argument (routine_create)
186    is ignored.
187 
188    Sample usage:
189 .vb
190    MatRegisterDynamic("my_mat",/home/username/my_lib/lib/libO/solaris/mylib.a,
191                "MyMatCreate",MyMatCreate);
192 .ve
193 
194    Then, your solver can be chosen with the procedural interface via
195 $     MatSetType(Mat,"my_mat")
196    or at runtime via the option
197 $     -mat_type my_mat
198 
199    Level: advanced
200 
201    Notes: ${PETSC_ARCH} occuring in pathname will be replaced with appropriate values.
202          If your function is not being put into a shared library then use VecRegister() instead
203 
204 .keywords: Mat, register
205 
206 .seealso: MatRegisterAll(), MatRegisterDestroy()
207 
208 M*/
209 #if defined(PETSC_USE_DYNAMIC_LIBRARIES)
210 #define MatRegisterDynamic(a,b,c,d) MatRegister(a,b,c,0)
211 #else
212 #define MatRegisterDynamic(a,b,c,d) MatRegister(a,b,c,d)
213 #endif
214 
215 extern PetscBool  MatRegisterAllCalled;
216 extern PetscFList MatList;
217 extern PetscFList MatColoringList;
218 extern PetscFList MatPartitioningList;
219 
220 /*E
221     MatStructure - Indicates if the matrix has the same nonzero structure
222 
223     Level: beginner
224 
225    Any additions/changes here MUST also be made in include/finclude/petscmat.h
226 
227 .seealso: MatCopy(), KSPSetOperators(), PCSetOperators()
228 E*/
229 typedef enum {SAME_NONZERO_PATTERN,DIFFERENT_NONZERO_PATTERN,SAME_PRECONDITIONER,SUBSET_NONZERO_PATTERN} MatStructure;
230 
231 extern PetscErrorCode  MatCreateSeqDense(MPI_Comm,PetscInt,PetscInt,PetscScalar[],Mat*);
232 extern PetscErrorCode  MatCreateMPIDense(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,PetscScalar[],Mat*);
233 extern PetscErrorCode  MatCreateSeqAIJ(MPI_Comm,PetscInt,PetscInt,PetscInt,const PetscInt[],Mat*);
234 PetscPolymorphicFunction(MatCreateSeqAIJ,(PetscInt m,PetscInt n,PetscInt nz,const PetscInt nnz[]),(PETSC_COMM_SELF,m,n,nz,nnz,&A),Mat,A)
235 PetscPolymorphicFunction(MatCreateSeqAIJ,(PetscInt m,PetscInt n,PetscInt nz),(PETSC_COMM_SELF,m,n,nz,PETSC_NULL,&A),Mat,A)
236 PetscPolymorphicFunction(MatCreateSeqAIJ,(PetscInt m,PetscInt n,const PetscInt nnz[]),(PETSC_COMM_SELF,m,n,0,nnz,&A),Mat,A)
237 PetscPolymorphicFunction(MatCreateSeqAIJ,(PetscInt m,PetscInt n),(PETSC_COMM_SELF,m,n,0,PETSC_NULL,&A),Mat,A)
238 PetscPolymorphicSubroutine(MatCreateSeqAIJ,(PetscInt m,PetscInt n,PetscInt nz,Mat *A),(PETSC_COMM_SELF,m,n,nz,PETSC_NULL,A))
239 PetscPolymorphicSubroutine(MatCreateSeqAIJ,(PetscInt m,PetscInt n,const PetscInt nnz[],Mat *A),(PETSC_COMM_SELF,m,n,0,nnz,A))
240 PetscPolymorphicSubroutine(MatCreateSeqAIJ,(PetscInt m,PetscInt n,Mat *A),(PETSC_COMM_SELF,m,n,0,PETSC_NULL,A))
241 extern PetscErrorCode  MatCreateMPIAIJ(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt,const PetscInt[],PetscInt,const PetscInt[],Mat*);
242 PetscPolymorphicFunction(MatCreateMPIAIJ,(MPI_Comm comm,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,const PetscInt nnz[],PetscInt onz,const PetscInt onnz[]),(comm,m,n,M,N,nz,nnz,onz,onnz,&A),Mat,A)
243 PetscPolymorphicFunction(MatCreateMPIAIJ,(MPI_Comm comm,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,PetscInt nnz),(comm,m,n,M,N,nz,PETSC_NULL,nnz,PETSC_NULL,&A),Mat,A)
244 PetscPolymorphicFunction(MatCreateMPIAIJ,(MPI_Comm comm,PetscInt m,PetscInt n,PetscInt M,PetscInt N,const PetscInt nnz[],const PetscInt onz[]),(comm,m,n,M,N,0,nnz,0,onz,&A),Mat,A)
245 PetscPolymorphicFunction(MatCreateMPIAIJ,(MPI_Comm comm,PetscInt m,PetscInt n,PetscInt M,PetscInt N),(comm,m,n,M,N,0,PETSC_NULL,0,PETSC_NULL,&A),Mat,A)
246 PetscPolymorphicSubroutine(MatCreateMPIAIJ,(MPI_Comm comm,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,PetscInt nnz,Mat *A),(comm,m,n,M,N,nz,PETSC_NULL,nnz,PETSC_NULL,A))
247 PetscPolymorphicSubroutine(MatCreateMPIAIJ,(MPI_Comm comm,PetscInt m,PetscInt n,PetscInt M,PetscInt N,const PetscInt nnz[],const PetscInt onz[],Mat *A),(comm,m,n,M,N,0,nnz,0,onz,A))
248 PetscPolymorphicSubroutine(MatCreateMPIAIJ,(MPI_Comm comm,PetscInt m,PetscInt n,PetscInt M,PetscInt N,Mat *A),(comm,m,n,M,N,0,PETSC_NULL,0,PETSC_NULL,A))
249 PetscPolymorphicFunction(MatCreateMPIAIJ,(PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,const PetscInt nnz[],PetscInt onz,const PetscInt onnz[]),(PETSC_COMM_WORLD,m,n,M,N,nz,nnz,onz,onnz,&A),Mat,A)
250 PetscPolymorphicFunction(MatCreateMPIAIJ,(PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,PetscInt nnz),(PETSC_COMM_WORLD,m,n,M,N,nz,PETSC_NULL,nnz,PETSC_NULL,&A),Mat,A)
251 PetscPolymorphicFunction(MatCreateMPIAIJ,(PetscInt m,PetscInt n,PetscInt M,PetscInt N,const PetscInt nnz[],const PetscInt onz[]),(PETSC_COMM_WORLD,m,n,M,N,0,nnz,0,onz,&A),Mat,A)
252 PetscPolymorphicFunction(MatCreateMPIAIJ,(PetscInt m,PetscInt n,PetscInt M,PetscInt N),(PETSC_COMM_WORLD,m,n,M,N,0,PETSC_NULL,0,PETSC_NULL,&A),Mat,A)
253 PetscPolymorphicSubroutine(MatCreateMPIAIJ,(PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,PetscInt nnz,Mat *A),(PETSC_COMM_WORLD,m,n,M,N,nz,PETSC_NULL,nnz,PETSC_NULL,A))
254 PetscPolymorphicSubroutine(MatCreateMPIAIJ,(PetscInt m,PetscInt n,PetscInt M,PetscInt N,const PetscInt nnz[],const PetscInt onz[],Mat *A),(PETSC_COMM_WORLD,m,n,M,N,0,nnz,0,onz,A))
255 PetscPolymorphicSubroutine(MatCreateMPIAIJ,(PetscInt m,PetscInt n,PetscInt M,PetscInt N,Mat *A),(PETSC_COMM_WORLD,m,n,M,N,0,PETSC_NULL,0,PETSC_NULL,A))
256 extern PetscErrorCode  MatCreateMPIAIJWithArrays(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,const PetscInt[],const PetscInt[],const PetscScalar[],Mat *);
257 extern PetscErrorCode  MatCreateMPIAIJWithSplitArrays(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt[],PetscInt[],PetscScalar[],PetscInt[],PetscInt[],PetscScalar[],Mat*);
258 
259 extern PetscErrorCode  MatCreateSeqBAIJ(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,const PetscInt[],Mat*);
260 PetscPolymorphicFunction(MatCreateSeqBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt nz,const PetscInt nnz[]),(PETSC_COMM_SELF,bs,m,n,nz,nnz,&A),Mat,A)
261 PetscPolymorphicFunction(MatCreateSeqBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt nz),(PETSC_COMM_SELF,bs,m,n,nz,PETSC_NULL,&A),Mat,A)
262 PetscPolymorphicFunction(MatCreateSeqBAIJ,(PetscInt bs,PetscInt m,PetscInt n,const PetscInt nnz[]),(PETSC_COMM_SELF,bs,m,n,0,nnz,&A),Mat,A)
263 PetscPolymorphicFunction(MatCreateSeqBAIJ,(PetscInt bs,PetscInt m,PetscInt n),(PETSC_COMM_SELF,bs,m,n,0,PETSC_NULL,&A),Mat,A)
264 PetscPolymorphicSubroutine(MatCreateSeqBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt nz,Mat *A),(PETSC_COMM_SELF,bs,m,n,nz,PETSC_NULL,A))
265 PetscPolymorphicSubroutine(MatCreateSeqBAIJ,(PetscInt bs,PetscInt m,PetscInt n,const PetscInt nnz[],Mat *A),(PETSC_COMM_SELF,bs,m,n,0,nnz,A))
266 PetscPolymorphicSubroutine(MatCreateSeqBAIJ,(PetscInt bs,PetscInt m,PetscInt n,Mat *A),(PETSC_COMM_SELF,bs,m,n,0,PETSC_NULL,A))
267 extern PetscErrorCode  MatCreateMPIBAIJ(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt,const PetscInt[],PetscInt,const PetscInt[],Mat*);
268 PetscPolymorphicFunction(MatCreateMPIBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,const PetscInt nnz[],PetscInt onz,const PetscInt onnz[]),(comm,bs,m,n,M,N,nz,nnz,onz,onnz,&A),Mat,A)
269 PetscPolymorphicFunction(MatCreateMPIBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,PetscInt nnz),(comm,bs,m,n,M,N,nz,PETSC_NULL,nnz,PETSC_NULL,&A),Mat,A)
270 PetscPolymorphicFunction(MatCreateMPIBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,const PetscInt nnz[],const PetscInt onz[]),(comm,bs,m,n,M,N,0,nnz,0,onz,&A),Mat,A)
271 PetscPolymorphicFunction(MatCreateMPIBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N),(comm,bs,m,n,M,N,0,PETSC_NULL,0,PETSC_NULL,&A),Mat,A)
272 PetscPolymorphicSubroutine(MatCreateMPIBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,PetscInt nnz,Mat *A),(comm,bs,m,n,M,N,nz,PETSC_NULL,nnz,PETSC_NULL,A))
273 PetscPolymorphicSubroutine(MatCreateMPIBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,const PetscInt nnz[],const PetscInt onz[],Mat *A),(comm,bs,m,n,M,N,0,nnz,0,onz,A))
274 PetscPolymorphicSubroutine(MatCreateMPIBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,Mat *A),(comm,bs,m,n,M,N,0,PETSC_NULL,0,PETSC_NULL,A))
275 PetscPolymorphicFunction(MatCreateMPIBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,const PetscInt nnz[],PetscInt onz,const PetscInt onnz[]),(PETSC_COMM_WORLD,bs,m,n,M,N,nz,nnz,onz,onnz,&A),Mat,A)
276 PetscPolymorphicFunction(MatCreateMPIBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,PetscInt nnz),(PETSC_COMM_WORLD,bs,m,n,M,N,nz,PETSC_NULL,nnz,PETSC_NULL,&A),Mat,A)
277 PetscPolymorphicFunction(MatCreateMPIBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,const PetscInt nnz[],const PetscInt onz[]),(PETSC_COMM_WORLD,bs,m,n,M,N,0,nnz,0,onz,&A),Mat,A)
278 PetscPolymorphicFunction(MatCreateMPIBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N),(PETSC_COMM_WORLD,bs,m,n,M,N,0,PETSC_NULL,0,PETSC_NULL,&A),Mat,A)
279 PetscPolymorphicSubroutine(MatCreateMPIBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,PetscInt nnz,Mat *A),(PETSC_COMM_WORLD,bs,m,n,M,N,nz,PETSC_NULL,nnz,PETSC_NULL,A))
280 PetscPolymorphicSubroutine(MatCreateMPIBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,const PetscInt nnz[],const PetscInt onz[],Mat *A),(PETSC_COMM_WORLD,bs,m,n,M,N,0,nnz,0,onz,A))
281 PetscPolymorphicSubroutine(MatCreateMPIBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,Mat *A),(PETSC_COMM_WORLD,bs,m,n,M,N,0,PETSC_NULL,0,PETSC_NULL,A))
282 extern PetscErrorCode  MatCreateMPIBAIJWithArrays(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt,const PetscInt[],const PetscInt[],const PetscScalar[],Mat*);
283 
284 extern PetscErrorCode  MatCreateMPIAdj(MPI_Comm,PetscInt,PetscInt,PetscInt[],PetscInt[],PetscInt[],Mat*);
285 extern PetscErrorCode  MatCreateSeqSBAIJ(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,const PetscInt[],Mat*);
286 PetscPolymorphicFunction(MatCreateSeqSBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt nz,const PetscInt nnz[]),(PETSC_COMM_SELF,bs,m,n,nz,nnz,&A),Mat,A)
287 PetscPolymorphicFunction(MatCreateSeqSBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt nz),(PETSC_COMM_SELF,bs,m,n,nz,PETSC_NULL,&A),Mat,A)
288 PetscPolymorphicFunction(MatCreateSeqSBAIJ,(PetscInt bs,PetscInt m,PetscInt n,const PetscInt nnz[]),(PETSC_COMM_SELF,bs,m,n,0,nnz,&A),Mat,A)
289 PetscPolymorphicFunction(MatCreateSeqSBAIJ,(PetscInt bs,PetscInt m,PetscInt n),(PETSC_COMM_SELF,bs,m,n,0,PETSC_NULL,&A),Mat,A)
290 PetscPolymorphicSubroutine(MatCreateSeqSBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt nz,Mat *A),(PETSC_COMM_SELF,bs,m,n,nz,PETSC_NULL,A))
291 PetscPolymorphicSubroutine(MatCreateSeqSBAIJ,(PetscInt bs,PetscInt m,PetscInt n,const PetscInt nnz[],Mat *A),(PETSC_COMM_SELF,bs,m,n,0,nnz,A))
292 PetscPolymorphicSubroutine(MatCreateSeqSBAIJ,(PetscInt bs,PetscInt m,PetscInt n,Mat *A),(PETSC_COMM_SELF,bs,m,n,0,PETSC_NULL,A))
293 
294 extern PetscErrorCode  MatCreateMPISBAIJ(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt,const PetscInt[],PetscInt,const PetscInt[],Mat*);
295 PetscPolymorphicFunction(MatCreateMPISBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,const PetscInt nnz[],PetscInt onz,const PetscInt onnz[]),(comm,bs,m,n,M,N,nz,nnz,onz,onnz,&A),Mat,A)
296 PetscPolymorphicFunction(MatCreateMPISBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,PetscInt nnz),(comm,bs,m,n,M,N,nz,PETSC_NULL,nnz,PETSC_NULL,&A),Mat,A)
297 PetscPolymorphicFunction(MatCreateMPISBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,const PetscInt nnz[],const PetscInt onz[]),(comm,bs,m,n,M,N,0,nnz,0,onz,&A),Mat,A)
298 PetscPolymorphicFunction(MatCreateMPISBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N),(comm,bs,m,n,M,N,0,PETSC_NULL,0,PETSC_NULL,&A),Mat,A)
299 PetscPolymorphicSubroutine(MatCreateMPISBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,PetscInt nnz,Mat *A),(comm,bs,m,n,M,N,nz,PETSC_NULL,nnz,PETSC_NULL,A))
300 PetscPolymorphicSubroutine(MatCreateMPISBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,const PetscInt nnz[],const PetscInt onz[],Mat *A),(comm,bs,m,n,M,N,0,nnz,0,onz,A))
301 PetscPolymorphicSubroutine(MatCreateMPISBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,Mat *A),(comm,bs,m,n,M,N,0,PETSC_NULL,0,PETSC_NULL,A))
302 PetscPolymorphicFunction(MatCreateMPISBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,const PetscInt nnz[],PetscInt onz,const PetscInt onnz[]),(PETSC_COMM_WORLD,bs,m,n,M,N,nz,nnz,onz,onnz,&A),Mat,A)
303 PetscPolymorphicFunction(MatCreateMPISBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,PetscInt nnz),(PETSC_COMM_WORLD,bs,m,n,M,N,nz,PETSC_NULL,nnz,PETSC_NULL,&A),Mat,A)
304 PetscPolymorphicFunction(MatCreateMPISBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,const PetscInt nnz[],const PetscInt onz[]),(PETSC_COMM_WORLD,bs,m,n,M,N,0,nnz,0,onz,&A),Mat,A)
305 PetscPolymorphicFunction(MatCreateMPISBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N),(PETSC_COMM_WORLD,bs,m,n,M,N,0,PETSC_NULL,0,PETSC_NULL,&A),Mat,A)
306 PetscPolymorphicSubroutine(MatCreateMPISBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,PetscInt nnz,Mat *A),(PETSC_COMM_WORLD,bs,m,n,M,N,nz,PETSC_NULL,nnz,PETSC_NULL,A))
307 PetscPolymorphicSubroutine(MatCreateMPISBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,const PetscInt nnz[],const PetscInt onz[],Mat *A),(PETSC_COMM_WORLD,bs,m,n,M,N,0,nnz,0,onz,A))
308 PetscPolymorphicSubroutine(MatCreateMPISBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,Mat *A),(PETSC_COMM_WORLD,bs,m,n,M,N,0,PETSC_NULL,0,PETSC_NULL,A))
309 extern PetscErrorCode  MatCreateMPISBAIJWithArrays(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt,const PetscInt[],const PetscInt[],const PetscScalar[],Mat *);
310 extern PetscErrorCode  MatMPISBAIJSetPreallocationCSR(Mat,PetscInt,const PetscInt[],const PetscInt[],const PetscScalar[]);
311 
312 extern PetscErrorCode  MatCreateShell(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,void *,Mat*);
313 PetscPolymorphicFunction(MatCreateShell,(MPI_Comm comm,PetscInt m,PetscInt n,PetscInt M,PetscInt N,void *ctx),(comm,m,n,M,N,ctx,&A),Mat,A)
314 PetscPolymorphicFunction(MatCreateShell,(PetscInt m,PetscInt n,PetscInt M,PetscInt N,void *ctx),(PETSC_COMM_WORLD,m,n,M,N,ctx,&A),Mat,A)
315 extern PetscErrorCode  MatCreateAdic(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt,void (*)(void),Mat*);
316 extern PetscErrorCode  MatCreateNormal(Mat,Mat*);
317 PetscPolymorphicFunction(MatCreateNormal,(Mat mat),(mat,&A),Mat,A)
318 extern PetscErrorCode  MatCreateLRC(Mat,Mat,Mat,Mat*);
319 extern PetscErrorCode  MatCreateIS(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,ISLocalToGlobalMapping,Mat*);
320 extern PetscErrorCode  MatCreateSeqAIJCRL(MPI_Comm,PetscInt,PetscInt,PetscInt,const PetscInt[],Mat*);
321 extern PetscErrorCode  MatCreateMPIAIJCRL(MPI_Comm,PetscInt,PetscInt,PetscInt,const PetscInt[],PetscInt,const PetscInt[],Mat*);
322 extern PetscErrorCode  MatCreateScatter(MPI_Comm,VecScatter,Mat*);
323 extern PetscErrorCode  MatScatterSetVecScatter(Mat,VecScatter);
324 extern PetscErrorCode  MatScatterGetVecScatter(Mat,VecScatter*);
325 extern PetscErrorCode  MatCreateBlockMat(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt*,Mat*);
326 extern PetscErrorCode  MatCompositeAddMat(Mat,Mat);
327 extern PetscErrorCode  MatCompositeMerge(Mat);
328 extern PetscErrorCode  MatCreateComposite(MPI_Comm,PetscInt,const Mat*,Mat*);
329 typedef enum {MAT_COMPOSITE_ADDITIVE,MAT_COMPOSITE_MULTIPLICATIVE} MatCompositeType;
330 extern PetscErrorCode  MatCompositeSetType(Mat,MatCompositeType);
331 
332 extern PetscErrorCode  MatCreateFFT(MPI_Comm,PetscInt,const PetscInt[],const MatType,Mat*);
333 extern PetscErrorCode  MatCreateSeqCUFFT(MPI_Comm,PetscInt,const PetscInt[],Mat*);
334 
335 extern PetscErrorCode  MatCreateTranspose(Mat,Mat*);
336 extern PetscErrorCode  MatCreateSubMatrix(Mat,IS,IS,Mat*);
337 extern PetscErrorCode  MatSubMatrixUpdate(Mat,Mat,IS,IS);
338 extern PetscErrorCode  MatCreateLocalRef(Mat,IS,IS,Mat*);
339 
340 extern PetscErrorCode  MatCreatePython(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,const char[],Mat*);
341 extern PetscErrorCode  MatPythonSetType(Mat,const char[]);
342 
343 extern PetscErrorCode  MatSetUp(Mat);
344 extern PetscErrorCode  MatDestroy_(Mat);
345 #define MatDestroy(a)  (MatDestroy_(a) || (((a) = 0),0))
346 
347 extern PetscErrorCode  MatConjugate(Mat);
348 extern PetscErrorCode  MatRealPart(Mat);
349 extern PetscErrorCode  MatImaginaryPart(Mat);
350 extern PetscErrorCode  MatGetDiagonalBlock(Mat,PetscBool *,MatReuse,Mat*);
351 extern PetscErrorCode  MatGetTrace(Mat,PetscScalar*);
352 
353 /* ------------------------------------------------------------*/
354 extern PetscErrorCode  MatSetValues(Mat,PetscInt,const PetscInt[],PetscInt,const PetscInt[],const PetscScalar[],InsertMode);
355 extern PetscErrorCode  MatSetValuesBlocked(Mat,PetscInt,const PetscInt[],PetscInt,const PetscInt[],const PetscScalar[],InsertMode);
356 extern PetscErrorCode  MatSetValuesRow(Mat,PetscInt,const PetscScalar[]);
357 extern PetscErrorCode  MatSetValuesRowLocal(Mat,PetscInt,const PetscScalar[]);
358 
359 /*S
360      MatStencil - Data structure (C struct) for storing information about a single row or
361         column of a matrix as index on an associated grid.
362 
363    Level: beginner
364 
365   Concepts: matrix; linear operator
366 
367 .seealso:  MatSetValuesStencil(), MatSetStencil(), MatSetValuesBlockStencil()
368 S*/
369 typedef struct {
370   PetscInt k,j,i,c;
371 } MatStencil;
372 
373 extern PetscErrorCode  MatSetValuesStencil(Mat,PetscInt,const MatStencil[],PetscInt,const MatStencil[],const PetscScalar[],InsertMode);
374 extern PetscErrorCode  MatSetValuesBlockedStencil(Mat,PetscInt,const MatStencil[],PetscInt,const MatStencil[],const PetscScalar[],InsertMode);
375 extern PetscErrorCode  MatSetStencil(Mat,PetscInt,const PetscInt[],const PetscInt[],PetscInt);
376 
377 extern PetscErrorCode  MatSetColoring(Mat,ISColoring);
378 extern PetscErrorCode  MatSetValuesAdic(Mat,void*);
379 extern PetscErrorCode  MatSetValuesAdifor(Mat,PetscInt,void*);
380 
381 /*E
382     MatAssemblyType - Indicates if the matrix is now to be used, or if you plan
383      to continue to add values to it
384 
385     Level: beginner
386 
387 .seealso: MatAssemblyBegin(), MatAssemblyEnd()
388 E*/
389 typedef enum {MAT_FLUSH_ASSEMBLY=1,MAT_FINAL_ASSEMBLY=0} MatAssemblyType;
390 extern PetscErrorCode  MatAssemblyBegin(Mat,MatAssemblyType);
391 extern PetscErrorCode  MatAssemblyEnd(Mat,MatAssemblyType);
392 extern PetscErrorCode  MatAssembled(Mat,PetscBool *);
393 
394 
395 
396 /*E
397     MatOption - Options that may be set for a matrix and its behavior or storage
398 
399     Level: beginner
400 
401    Any additions/changes here MUST also be made in include/finclude/petscmat.h
402 
403 .seealso: MatSetOption()
404 E*/
405 typedef enum {MAT_ROW_ORIENTED,MAT_NEW_NONZERO_LOCATIONS,
406               MAT_SYMMETRIC,
407               MAT_STRUCTURALLY_SYMMETRIC,
408               MAT_NEW_DIAGONALS,MAT_IGNORE_OFF_PROC_ENTRIES,
409               MAT_NEW_NONZERO_LOCATION_ERR,
410               MAT_NEW_NONZERO_ALLOCATION_ERR,MAT_USE_HASH_TABLE,
411               MAT_KEEP_NONZERO_PATTERN,MAT_IGNORE_ZERO_ENTRIES,
412               MAT_USE_INODES,
413               MAT_HERMITIAN,
414               MAT_SYMMETRY_ETERNAL,
415               MAT_CHECK_COMPRESSED_ROW,
416               MAT_IGNORE_LOWER_TRIANGULAR,MAT_ERROR_LOWER_TRIANGULAR,
417               MAT_GETROW_UPPERTRIANGULAR,MAT_UNUSED_NONZERO_LOCATION_ERR,
418               MAT_SPD,MAT_NO_OFF_PROC_ENTRIES,MAT_NO_OFF_PROC_ZERO_ROWS,
419               NUM_MAT_OPTIONS} MatOption;
420 extern const char *MatOptions[];
421 extern PetscErrorCode  MatSetOption(Mat,MatOption,PetscBool );
422 extern PetscErrorCode  MatGetType(Mat,const MatType*);
423 PetscPolymorphicFunction(MatGetType,(Mat mat),(mat,&t),const MatType,t)
424 
425 extern PetscErrorCode  MatGetValues(Mat,PetscInt,const PetscInt[],PetscInt,const PetscInt[],PetscScalar[]);
426 extern PetscErrorCode  MatGetRow(Mat,PetscInt,PetscInt *,const PetscInt *[],const PetscScalar*[]);
427 extern PetscErrorCode  MatRestoreRow(Mat,PetscInt,PetscInt *,const PetscInt *[],const PetscScalar*[]);
428 extern PetscErrorCode  MatGetRowUpperTriangular(Mat);
429 extern PetscErrorCode  MatRestoreRowUpperTriangular(Mat);
430 extern PetscErrorCode  MatGetColumn(Mat,PetscInt,PetscInt *,const PetscInt *[],const PetscScalar*[]);
431 extern PetscErrorCode  MatRestoreColumn(Mat,PetscInt,PetscInt *,const PetscInt *[],const PetscScalar*[]);
432 extern PetscErrorCode  MatGetColumnVector(Mat,Vec,PetscInt);
433 extern PetscErrorCode  MatGetArray(Mat,PetscScalar *[]);
434 PetscPolymorphicFunction(MatGetArray,(Mat mat),(mat,&a),PetscScalar*,a)
435 extern PetscErrorCode  MatRestoreArray(Mat,PetscScalar *[]);
436 extern PetscErrorCode  MatGetBlockSize(Mat,PetscInt *);
437 PetscPolymorphicFunction(MatGetBlockSize,(Mat mat),(mat,&a),PetscInt,a)
438 extern PetscErrorCode  MatSetBlockSize(Mat,PetscInt);
439 
440 
441 extern PetscErrorCode  MatMult(Mat,Vec,Vec);
442 extern PetscErrorCode  MatMultDiagonalBlock(Mat,Vec,Vec);
443 extern PetscErrorCode  MatMultAdd(Mat,Vec,Vec,Vec);
444 PetscPolymorphicSubroutine(MatMultAdd,(Mat A,Vec x,Vec y),(A,x,y,y))
445 extern PetscErrorCode  MatMultTranspose(Mat,Vec,Vec);
446 extern PetscErrorCode  MatMultHermitianTranspose(Mat,Vec,Vec);
447 extern PetscErrorCode  MatIsTranspose(Mat,Mat,PetscReal,PetscBool *);
448 PetscPolymorphicFunction(MatIsTranspose,(Mat A,Mat B,PetscReal tol),(A,B,tol,&t),PetscBool ,t)
449 PetscPolymorphicFunction(MatIsTranspose,(Mat A,Mat B),(A,B,0,&t),PetscBool ,t)
450 extern PetscErrorCode  MatIsHermitianTranspose(Mat,Mat,PetscReal,PetscBool *);
451 extern PetscErrorCode  MatMultTransposeAdd(Mat,Vec,Vec,Vec);
452 extern PetscErrorCode  MatMultHermitianTransposeAdd(Mat,Vec,Vec,Vec);
453 PetscPolymorphicSubroutine(MatMultTransposeAdd,(Mat A,Vec x,Vec y),(A,x,y,y))
454 extern PetscErrorCode  MatMultConstrained(Mat,Vec,Vec);
455 extern PetscErrorCode  MatMultTransposeConstrained(Mat,Vec,Vec);
456 extern PetscErrorCode  MatMatSolve(Mat,Mat,Mat);
457 
458 /*E
459     MatDuplicateOption - Indicates if a duplicated sparse matrix should have
460   its numerical values copied over or just its nonzero structure.
461 
462     Level: beginner
463 
464    Any additions/changes here MUST also be made in include/finclude/petscmat.h
465 
466 $   MAT_SHARE_NONZERO_PATTERN - the i and j arrays in the new matrix will be shared with the original matrix
467 $                               this also triggers the MAT_DO_NOT_COPY_VALUES option. This is used when you
468 $                               have several matrices with the same nonzero pattern.
469 
470 .seealso: MatDuplicate()
471 E*/
472 typedef enum {MAT_DO_NOT_COPY_VALUES,MAT_COPY_VALUES,MAT_SHARE_NONZERO_PATTERN} MatDuplicateOption;
473 
474 extern PetscErrorCode  MatConvert(Mat,const MatType,MatReuse,Mat*);
475 PetscPolymorphicFunction(MatConvert,(Mat A,const MatType t),(A,t,MAT_INITIAL_MATRIX,&a),Mat,a)
476 extern PetscErrorCode  MatDuplicate(Mat,MatDuplicateOption,Mat*);
477 PetscPolymorphicFunction(MatDuplicate,(Mat A,MatDuplicateOption o),(A,o,&a),Mat,a)
478 PetscPolymorphicFunction(MatDuplicate,(Mat A),(A,MAT_COPY_VALUES,&a),Mat,a)
479 
480 
481 extern PetscErrorCode  MatCopy(Mat,Mat,MatStructure);
482 extern PetscErrorCode  MatView(Mat,PetscViewer);
483 extern PetscErrorCode  MatIsSymmetric(Mat,PetscReal,PetscBool *);
484 PetscPolymorphicFunction(MatIsSymmetric,(Mat A,PetscReal tol),(A,tol,&t),PetscBool ,t)
485 PetscPolymorphicFunction(MatIsSymmetric,(Mat A),(A,0,&t),PetscBool ,t)
486 extern PetscErrorCode  MatIsStructurallySymmetric(Mat,PetscBool *);
487 PetscPolymorphicFunction(MatIsStructurallySymmetric,(Mat A),(A,&t),PetscBool ,t)
488 extern PetscErrorCode  MatIsHermitian(Mat,PetscReal,PetscBool *);
489 PetscPolymorphicFunction(MatIsHermitian,(Mat A),(A,0,&t),PetscBool ,t)
490 extern PetscErrorCode  MatIsSymmetricKnown(Mat,PetscBool *,PetscBool *);
491 extern PetscErrorCode  MatIsHermitianKnown(Mat,PetscBool *,PetscBool *);
492 extern PetscErrorCode  MatMissingDiagonal(Mat,PetscBool  *,PetscInt *);
493 extern PetscErrorCode  MatLoad(Mat, PetscViewer);
494 
495 extern PetscErrorCode  MatGetRowIJ(Mat,PetscInt,PetscBool ,PetscBool ,PetscInt*,PetscInt *[],PetscInt *[],PetscBool  *);
496 extern PetscErrorCode  MatRestoreRowIJ(Mat,PetscInt,PetscBool ,PetscBool ,PetscInt *,PetscInt *[],PetscInt *[],PetscBool  *);
497 extern PetscErrorCode  MatGetColumnIJ(Mat,PetscInt,PetscBool ,PetscBool ,PetscInt*,PetscInt *[],PetscInt *[],PetscBool  *);
498 extern PetscErrorCode  MatRestoreColumnIJ(Mat,PetscInt,PetscBool ,PetscBool ,PetscInt *,PetscInt *[],PetscInt *[],PetscBool  *);
499 
500 /*S
501      MatInfo - Context of matrix information, used with MatGetInfo()
502 
503    In Fortran this is simply a double precision array of dimension MAT_INFO_SIZE
504 
505    Level: intermediate
506 
507   Concepts: matrix^nonzero information
508 
509 .seealso:  MatGetInfo(), MatInfoType
510 S*/
511 typedef struct {
512   PetscLogDouble block_size;                         /* block size */
513   PetscLogDouble nz_allocated,nz_used,nz_unneeded;   /* number of nonzeros */
514   PetscLogDouble memory;                             /* memory allocated */
515   PetscLogDouble assemblies;                         /* number of matrix assemblies called */
516   PetscLogDouble mallocs;                            /* number of mallocs during MatSetValues() */
517   PetscLogDouble fill_ratio_given,fill_ratio_needed; /* fill ratio for LU/ILU */
518   PetscLogDouble factor_mallocs;                     /* number of mallocs during factorization */
519 } MatInfo;
520 
521 /*E
522     MatInfoType - Indicates if you want information about the local part of the matrix,
523      the entire parallel matrix or the maximum over all the local parts.
524 
525     Level: beginner
526 
527    Any additions/changes here MUST also be made in include/finclude/petscmat.h
528 
529 .seealso: MatGetInfo(), MatInfo
530 E*/
531 typedef enum {MAT_LOCAL=1,MAT_GLOBAL_MAX=2,MAT_GLOBAL_SUM=3} MatInfoType;
532 extern PetscErrorCode  MatGetInfo(Mat,MatInfoType,MatInfo*);
533 extern PetscErrorCode  MatGetDiagonal(Mat,Vec);
534 extern PetscErrorCode  MatGetRowMax(Mat,Vec,PetscInt[]);
535 extern PetscErrorCode  MatGetRowMin(Mat,Vec,PetscInt[]);
536 extern PetscErrorCode  MatGetRowMaxAbs(Mat,Vec,PetscInt[]);
537 extern PetscErrorCode  MatGetRowMinAbs(Mat,Vec,PetscInt[]);
538 extern PetscErrorCode  MatGetRowSum(Mat,Vec);
539 extern PetscErrorCode  MatTranspose(Mat,MatReuse,Mat*);
540 PetscPolymorphicFunction(MatTranspose,(Mat A),(A,MAT_INITIAL_MATRIX,&t),Mat,t)
541 extern PetscErrorCode  MatHermitianTranspose(Mat,MatReuse,Mat*);
542 extern PetscErrorCode  MatPermute(Mat,IS,IS,Mat *);
543 PetscPolymorphicFunction(MatPermute,(Mat A,IS is1,IS is2),(A,is1,is2,&t),Mat,t)
544 extern PetscErrorCode  MatDiagonalScale(Mat,Vec,Vec);
545 extern PetscErrorCode  MatDiagonalSet(Mat,Vec,InsertMode);
546 extern PetscErrorCode  MatEqual(Mat,Mat,PetscBool *);
547 PetscPolymorphicFunction(MatEqual,(Mat A,Mat B),(A,B,&t),PetscBool ,t)
548 extern PetscErrorCode  MatMultEqual(Mat,Mat,PetscInt,PetscBool *);
549 extern PetscErrorCode  MatMultAddEqual(Mat,Mat,PetscInt,PetscBool *);
550 extern PetscErrorCode  MatMultTransposeEqual(Mat,Mat,PetscInt,PetscBool *);
551 extern PetscErrorCode  MatMultTransposeAddEqual(Mat,Mat,PetscInt,PetscBool *);
552 
553 extern PetscErrorCode  MatNorm(Mat,NormType,PetscReal *);
554 PetscPolymorphicFunction(MatNorm,(Mat A,NormType t),(A,t,&n),PetscReal,n)
555 extern PetscErrorCode MatGetColumnNorms(Mat,NormType,PetscReal *);
556 extern PetscErrorCode  MatZeroEntries(Mat);
557 extern PetscErrorCode  MatZeroRows(Mat,PetscInt,const PetscInt [],PetscScalar,Vec,Vec);
558 extern PetscErrorCode  MatZeroRowsIS(Mat,IS,PetscScalar,Vec,Vec);
559 extern PetscErrorCode  MatZeroRowsStencil(Mat,PetscInt,const MatStencil [],PetscScalar,Vec,Vec);
560 extern PetscErrorCode  MatZeroRowsColumns(Mat,PetscInt,const PetscInt [],PetscScalar,Vec,Vec);
561 extern PetscErrorCode  MatZeroRowsColumnsIS(Mat,IS,PetscScalar,Vec,Vec);
562 
563 extern PetscErrorCode  MatUseScaledForm(Mat,PetscBool );
564 extern PetscErrorCode  MatScaleSystem(Mat,Vec,Vec);
565 extern PetscErrorCode  MatUnScaleSystem(Mat,Vec,Vec);
566 
567 extern PetscErrorCode  MatGetSize(Mat,PetscInt*,PetscInt*);
568 extern PetscErrorCode  MatGetLocalSize(Mat,PetscInt*,PetscInt*);
569 extern PetscErrorCode  MatGetOwnershipRange(Mat,PetscInt*,PetscInt*);
570 extern PetscErrorCode  MatGetOwnershipRanges(Mat,const PetscInt**);
571 extern PetscErrorCode  MatGetOwnershipRangeColumn(Mat,PetscInt*,PetscInt*);
572 extern PetscErrorCode  MatGetOwnershipRangesColumn(Mat,const PetscInt**);
573 
574 extern PetscErrorCode  MatGetSubMatrices(Mat,PetscInt,const IS[],const IS[],MatReuse,Mat *[]);
575 extern PetscErrorCode  MatGetSubMatricesParallel(Mat,PetscInt,const IS[],const IS[],MatReuse,Mat *[]);
576 extern PetscErrorCode  MatDestroyMatrices(PetscInt,Mat *[]);
577 extern PetscErrorCode  MatGetSubMatrix(Mat,IS,IS,MatReuse,Mat *);
578 extern PetscErrorCode  MatGetLocalSubMatrix(Mat,IS,IS,Mat*);
579 extern PetscErrorCode  MatRestoreLocalSubMatrix(Mat,IS,IS,Mat*);
580 extern PetscErrorCode  MatGetSeqNonzeroStructure(Mat,Mat*);
581 extern PetscErrorCode  MatDestroySeqNonzeroStructure(Mat*);
582 
583 extern PetscErrorCode  MatMerge(MPI_Comm,Mat,PetscInt,MatReuse,Mat*);
584 extern PetscErrorCode  MatMerge_SeqsToMPI(MPI_Comm,Mat,PetscInt,PetscInt,MatReuse,Mat*);
585 extern PetscErrorCode  MatMerge_SeqsToMPISymbolic(MPI_Comm,Mat,PetscInt,PetscInt,Mat*);
586 extern PetscErrorCode  MatMerge_SeqsToMPINumeric(Mat,Mat);
587 extern PetscErrorCode  MatDestroy_MPIAIJ_SeqsToMPI(Mat);
588 extern PetscErrorCode  MatGetLocalMat(Mat,MatReuse,Mat*);
589 extern PetscErrorCode  MatGetLocalMatCondensed(Mat,MatReuse,IS*,IS*,Mat*);
590 extern PetscErrorCode  MatGetBrowsOfAcols(Mat,Mat,MatReuse,IS*,IS*,PetscInt*,Mat*);
591 extern PetscErrorCode  MatGetBrowsOfAoCols(Mat,Mat,MatReuse,PetscInt**,PetscInt**,MatScalar**,Mat*);
592 #if defined (PETSC_USE_CTABLE)
593 #include "petscctable.h"
594 extern PetscErrorCode  MatGetCommunicationStructs(Mat, Vec *, PetscTable *, VecScatter *);
595 #else
596 extern PetscErrorCode  MatGetCommunicationStructs(Mat, Vec *, PetscInt *[], VecScatter *);
597 #endif
598 extern PetscErrorCode  MatGetGhosts(Mat, PetscInt *,const PetscInt *[]);
599 
600 extern PetscErrorCode  MatIncreaseOverlap(Mat,PetscInt,IS[],PetscInt);
601 
602 extern PetscErrorCode  MatMatMult(Mat,Mat,MatReuse,PetscReal,Mat*);
603 extern PetscErrorCode  MatMatMultSymbolic(Mat,Mat,PetscReal,Mat*);
604 extern PetscErrorCode  MatMatMultNumeric(Mat,Mat,Mat);
605 
606 extern PetscErrorCode  MatPtAP(Mat,Mat,MatReuse,PetscReal,Mat*);
607 extern PetscErrorCode  MatPtAPSymbolic(Mat,Mat,PetscReal,Mat*);
608 extern PetscErrorCode  MatPtAPNumeric(Mat,Mat,Mat);
609 
610 extern PetscErrorCode  MatMatMultTranspose(Mat,Mat,MatReuse,PetscReal,Mat*);
611 extern PetscErrorCode  MatMatMultTransposeSymbolic(Mat,Mat,PetscReal,Mat*);
612 extern PetscErrorCode  MatMatMultTransposeNumeric(Mat,Mat,Mat);
613 
614 extern PetscErrorCode  MatAXPY(Mat,PetscScalar,Mat,MatStructure);
615 extern PetscErrorCode  MatAYPX(Mat,PetscScalar,Mat,MatStructure);
616 
617 extern PetscErrorCode  MatScale(Mat,PetscScalar);
618 extern PetscErrorCode  MatShift(Mat,PetscScalar);
619 
620 extern PetscErrorCode  MatSetLocalToGlobalMapping(Mat,ISLocalToGlobalMapping,ISLocalToGlobalMapping);
621 extern PetscErrorCode  MatSetLocalToGlobalMappingBlock(Mat,ISLocalToGlobalMapping,ISLocalToGlobalMapping);
622 extern PetscErrorCode  MatGetLocalToGlobalMapping(Mat,ISLocalToGlobalMapping*,ISLocalToGlobalMapping*);
623 extern PetscErrorCode  MatGetLocalToGlobalMappingBlock(Mat,ISLocalToGlobalMapping*,ISLocalToGlobalMapping*);
624 extern PetscErrorCode  MatZeroRowsLocal(Mat,PetscInt,const PetscInt [],PetscScalar,Vec,Vec);
625 extern PetscErrorCode  MatZeroRowsLocalIS(Mat,IS,PetscScalar,Vec,Vec);
626 extern PetscErrorCode  MatZeroRowsColumnsLocal(Mat,PetscInt,const PetscInt [],PetscScalar,Vec,Vec);
627 extern PetscErrorCode  MatZeroRowsColumnsLocalIS(Mat,IS,PetscScalar,Vec,Vec);
628 extern PetscErrorCode  MatSetValuesLocal(Mat,PetscInt,const PetscInt[],PetscInt,const PetscInt[],const PetscScalar[],InsertMode);
629 extern PetscErrorCode  MatSetValuesBlockedLocal(Mat,PetscInt,const PetscInt[],PetscInt,const PetscInt[],const PetscScalar[],InsertMode);
630 
631 extern PetscErrorCode  MatStashSetInitialSize(Mat,PetscInt,PetscInt);
632 extern PetscErrorCode  MatStashGetInfo(Mat,PetscInt*,PetscInt*,PetscInt*,PetscInt*);
633 
634 extern PetscErrorCode  MatInterpolate(Mat,Vec,Vec);
635 extern PetscErrorCode  MatInterpolateAdd(Mat,Vec,Vec,Vec);
636 PetscPolymorphicSubroutine(MatInterpolateAdd,(Mat A,Vec x,Vec y),(A,x,y,y))
637 extern PetscErrorCode  MatRestrict(Mat,Vec,Vec);
638 extern PetscErrorCode  MatGetVecs(Mat,Vec*,Vec*);
639 extern PetscErrorCode  MatGetRedundantMatrix(Mat,PetscInt,MPI_Comm,PetscInt,MatReuse,Mat*);
640 extern PetscErrorCode  MatGetMultiProcBlock(Mat,MPI_Comm,Mat*);
641 extern PetscErrorCode  MatFindZeroDiagonals(Mat,IS*);
642 
643 /*MC
644    MatSetValue - Set a single entry into a matrix.
645 
646    Not collective
647 
648    Input Parameters:
649 +  m - the matrix
650 .  row - the row location of the entry
651 .  col - the column location of the entry
652 .  value - the value to insert
653 -  mode - either INSERT_VALUES or ADD_VALUES
654 
655    Notes:
656    For efficiency one should use MatSetValues() and set several or many
657    values simultaneously if possible.
658 
659    Level: beginner
660 
661 .seealso: MatSetValues(), MatSetValueLocal()
662 M*/
663 PETSC_STATIC_INLINE PetscErrorCode MatSetValue(Mat v,PetscInt i,PetscInt j,PetscScalar va,InsertMode mode) {return MatSetValues(v,1,&i,1,&j,&va,mode);}
664 
665 PETSC_STATIC_INLINE PetscErrorCode MatGetValue(Mat v,PetscInt i,PetscInt j,PetscScalar *va) {return MatGetValues(v,1,&i,1,&j,va);}
666 
667 PETSC_STATIC_INLINE PetscErrorCode MatSetValueLocal(Mat v,PetscInt i,PetscInt j,PetscScalar va,InsertMode mode) {return MatSetValuesLocal(v,1,&i,1,&j,&va,mode);}
668 
669 /*MC
670    MatPreallocateInitialize - Begins the block of code that will count the number of nonzeros per
671        row in a matrix providing the data that one can use to correctly preallocate the matrix.
672 
673    Synopsis:
674    PetscErrorCode MatPreallocateInitialize(MPI_Comm comm, PetscInt nrows, PetscInt ncols, PetscInt *dnz, PetscInt *onz)
675 
676    Collective on MPI_Comm
677 
678    Input Parameters:
679 +  comm - the communicator that will share the eventually allocated matrix
680 .  nrows - the number of LOCAL rows in the matrix
681 -  ncols - the number of LOCAL columns in the matrix
682 
683    Output Parameters:
684 +  dnz - the array that will be passed to the matrix preallocation routines
685 -  ozn - the other array passed to the matrix preallocation routines
686 
687 
688    Level: intermediate
689 
690    Notes:
691     See the <A href="../../docs/manual.pdf#nameddest=ch_performance">Hints for Performance Improvment</A> chapter in the users manual for more details.
692 
693    Do not malloc or free dnz and onz, that is handled internally by these routines
694 
695    Use MatPreallocateInitializeSymmetric() for symmetric matrices (MPISBAIJ matrices)
696 
697    This is a MACRO not a function because it has a leading { that is closed by PetscPreallocateFinalize().
698 
699   Concepts: preallocation^Matrix
700 
701 .seealso: MatPreallocateFinalize(), MatPreallocateSet(), MatPreallocateSymmetricSet(), MatPreallocateSetLocal(),
702           MatPreallocateInitializeSymmetric(), MatPreallocateSymmetricSetLocal()
703 M*/
704 #define MatPreallocateInitialize(comm,nrows,ncols,dnz,onz) 0; \
705 { \
706   PetscErrorCode _4_ierr; PetscInt __nrows = (nrows),__ctmp = (ncols),__rstart,__start,__end; \
707   _4_ierr = PetscMalloc2(__nrows,PetscInt,&dnz,__nrows,PetscInt,&onz);CHKERRQ(_4_ierr); \
708   _4_ierr = PetscMemzero(dnz,__nrows*sizeof(PetscInt));CHKERRQ(_4_ierr);\
709   _4_ierr = PetscMemzero(onz,__nrows*sizeof(PetscInt));CHKERRQ(_4_ierr);\
710   _4_ierr = MPI_Scan(&__ctmp,&__end,1,MPIU_INT,MPI_SUM,comm);CHKERRQ(_4_ierr); __start = __end - __ctmp;\
711   _4_ierr = MPI_Scan(&__nrows,&__rstart,1,MPIU_INT,MPI_SUM,comm);CHKERRQ(_4_ierr); __rstart = __rstart - __nrows;
712 
713 /*MC
714    MatPreallocateSymmetricInitialize - Begins the block of code that will count the number of nonzeros per
715        row in a matrix providing the data that one can use to correctly preallocate the matrix.
716 
717    Synopsis:
718    PetscErrorCode MatPreallocateSymmetricInitialize(MPI_Comm comm, PetscInt nrows, PetscInt ncols, PetscInt *dnz, PetscInt *onz)
719 
720    Collective on MPI_Comm
721 
722    Input Parameters:
723 +  comm - the communicator that will share the eventually allocated matrix
724 .  nrows - the number of LOCAL rows in the matrix
725 -  ncols - the number of LOCAL columns in the matrix
726 
727    Output Parameters:
728 +  dnz - the array that will be passed to the matrix preallocation routines
729 -  ozn - the other array passed to the matrix preallocation routines
730 
731 
732    Level: intermediate
733 
734    Notes:
735     See the <A href="../../docs/manual.pdf#nameddest=ch_performance">Hints for Performance Improvment</A> chapter in the users manual for more details.
736 
737    Do not malloc or free dnz and onz, that is handled internally by these routines
738 
739    This is a MACRO not a function because it has a leading { that is closed by PetscPreallocateFinalize().
740 
741   Concepts: preallocation^Matrix
742 
743 .seealso: MatPreallocateFinalize(), MatPreallocateSet(), MatPreallocateSymmetricSet(), MatPreallocateSetLocal(),
744           MatPreallocateInitialize(), MatPreallocateSymmetricSetLocal()
745 M*/
746 #define MatPreallocateSymmetricInitialize(comm,nrows,ncols,dnz,onz) 0; \
747 { \
748   PetscErrorCode _4_ierr; PetscInt __nrows = (nrows),__ctmp = (ncols),__rstart,__end; \
749   _4_ierr = PetscMalloc2(__nrows,PetscInt,&dnz,__nrows,PetscInt,&onz);CHKERRQ(_4_ierr); \
750   _4_ierr = PetscMemzero(dnz,__nrows*sizeof(PetscInt));CHKERRQ(_4_ierr);\
751   _4_ierr = PetscMemzero(onz,__nrows*sizeof(PetscInt));CHKERRQ(_4_ierr);\
752   _4_ierr = MPI_Scan(&__ctmp,&__end,1,MPIU_INT,MPI_SUM,comm);CHKERRQ(_4_ierr);\
753   _4_ierr = MPI_Scan(&__nrows,&__rstart,1,MPIU_INT,MPI_SUM,comm);CHKERRQ(_4_ierr); __rstart = __rstart - __nrows;
754 
755 /*MC
756    MatPreallocateSetLocal - Indicates the locations (rows and columns) in the matrix where nonzeros will be
757        inserted using a local number of the rows and columns
758 
759    Synopsis:
760    PetscErrorCode MatPreallocateSetLocal(ISLocalToGlobalMappping map,PetscInt nrows, PetscInt *rows,PetscInt ncols, PetscInt *cols,PetscInt *dnz, PetscInt *onz)
761 
762    Not Collective
763 
764    Input Parameters:
765 +  map - the row mapping from local numbering to global numbering
766 .  nrows - the number of rows indicated
767 .  rows - the indices of the rows
768 .  cmap - the column mapping from local to global numbering
769 .  ncols - the number of columns in the matrix
770 .  cols - the columns indicated
771 .  dnz - the array that will be passed to the matrix preallocation routines
772 -  ozn - the other array passed to the matrix preallocation routines
773 
774 
775    Level: intermediate
776 
777    Notes:
778     See the <A href="../../docs/manual.pdf#nameddest=ch_performance">Hints for Performance Improvment</A> chapter in the users manual for more details.
779 
780    Do not malloc or free dnz and onz, that is handled internally by these routines
781 
782   Concepts: preallocation^Matrix
783 
784 .seealso: MatPreallocateFinalize(), MatPreallocateSet(), MatPreallocateSymmetricSet(), MatPreallocateInitialize(),
785           MatPreallocateInitialize(), MatPreallocateSymmetricSetLocal()
786 M*/
787 #define MatPreallocateSetLocal(rmap,nrows,rows,cmap,ncols,cols,dnz,onz) 0; \
788 {\
789   PetscInt __l;\
790   _4_ierr = ISLocalToGlobalMappingApply(rmap,nrows,rows,rows);CHKERRQ(_4_ierr);\
791   _4_ierr = ISLocalToGlobalMappingApply(cmap,ncols,cols,cols);CHKERRQ(_4_ierr);\
792   for (__l=0;__l<nrows;__l++) {\
793     _4_ierr = MatPreallocateSet((rows)[__l],ncols,cols,dnz,onz);CHKERRQ(_4_ierr);\
794   }\
795 }
796 
797 /*MC
798    MatPreallocateSymmetricSetLocal - Indicates the locations (rows and columns) in the matrix where nonzeros will be
799        inserted using a local number of the rows and columns
800 
801    Synopsis:
802    PetscErrorCode MatPreallocateSymmetricSetLocal(ISLocalToGlobalMappping map,PetscInt nrows, PetscInt *rows,PetscInt ncols, PetscInt *cols,PetscInt *dnz, PetscInt *onz)
803 
804    Not Collective
805 
806    Input Parameters:
807 +  map - the mapping between local numbering and global numbering
808 .  nrows - the number of rows indicated
809 .  rows - the indices of the rows
810 .  ncols - the number of columns in the matrix
811 .  cols - the columns indicated
812 .  dnz - the array that will be passed to the matrix preallocation routines
813 -  ozn - the other array passed to the matrix preallocation routines
814 
815 
816    Level: intermediate
817 
818    Notes:
819     See the <A href="../../docs/manual.pdf#nameddest=ch_performance">Hints for Performance Improvment</A> chapter in the users manual for more details.
820 
821    Do not malloc or free dnz and onz that is handled internally by these routines
822 
823   Concepts: preallocation^Matrix
824 
825 .seealso: MatPreallocateFinalize(), MatPreallocateSet(), MatPreallocateSymmetricSet(), MatPreallocateInitialize(),
826           MatPreallocateInitialize(), MatPreallocateSymmetricSetLocal(), MatPreallocateSetLocal()
827 M*/
828 #define MatPreallocateSymmetricSetLocal(map,nrows,rows,ncols,cols,dnz,onz) 0;\
829 {\
830   PetscInt __l;\
831   _4_ierr = ISLocalToGlobalMappingApply(map,nrows,rows,rows);CHKERRQ(_4_ierr);\
832   _4_ierr = ISLocalToGlobalMappingApply(map,ncols,cols,cols);CHKERRQ(_4_ierr);\
833   for (__l=0;__l<nrows;__l++) {\
834     _4_ierr = MatPreallocateSymmetricSet((rows)[__l],ncols,cols,dnz,onz);CHKERRQ(_4_ierr);\
835   }\
836 }
837 
838 /*MC
839    MatPreallocateSet - Indicates the locations (rows and columns) in the matrix where nonzeros will be
840        inserted using a local number of the rows and columns
841 
842    Synopsis:
843    PetscErrorCode MatPreallocateSet(PetscInt nrows, PetscInt *rows,PetscInt ncols, PetscInt *cols,PetscInt *dnz, PetscInt *onz)
844 
845    Not Collective
846 
847    Input Parameters:
848 +  row - the row
849 .  ncols - the number of columns in the matrix
850 -  cols - the columns indicated
851 
852    Output Parameters:
853 +  dnz - the array that will be passed to the matrix preallocation routines
854 -  ozn - the other array passed to the matrix preallocation routines
855 
856 
857    Level: intermediate
858 
859    Notes:
860     See the <A href="../../docs/manual.pdf#nameddest=ch_performance">Hints for Performance Improvment</A> chapter in the users manual for more details.
861 
862    Do not malloc or free dnz and onz that is handled internally by these routines
863 
864    This is a MACRO not a function because it uses variables declared in MatPreallocateInitialize().
865 
866   Concepts: preallocation^Matrix
867 
868 .seealso: MatPreallocateFinalize(), MatPreallocateSet(), MatPreallocateSymmetricSet(), MatPreallocateInitialize(),
869           MatPreallocateInitialize(), MatPreallocateSymmetricSetLocal(), MatPreallocateSetLocal()
870 M*/
871 #define MatPreallocateSet(row,nc,cols,dnz,onz) 0;\
872 { PetscInt __i; \
873   if (row < __rstart) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Trying to set preallocation for row %D less than first local row %D",row,__rstart);\
874   if (row >= __rstart+__nrows) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Trying to set preallocation for row %D greater than last local row %D",row,__rstart+__nrows-1);\
875   for (__i=0; __i<nc; __i++) {\
876     if ((cols)[__i] < __start || (cols)[__i] >= __end) onz[row - __rstart]++; \
877     else dnz[row - __rstart]++;\
878   }\
879 }
880 
881 /*MC
882    MatPreallocateSymmetricSet - Indicates the locations (rows and columns) in the matrix where nonzeros will be
883        inserted using a local number of the rows and columns
884 
885    Synopsis:
886    PetscErrorCode MatPreallocateSymmetricSet(PetscInt nrows, PetscInt *rows,PetscInt ncols, PetscInt *cols,PetscInt *dnz, PetscInt *onz)
887 
888    Not Collective
889 
890    Input Parameters:
891 +  nrows - the number of rows indicated
892 .  rows - the indices of the rows
893 .  ncols - the number of columns in the matrix
894 .  cols - the columns indicated
895 .  dnz - the array that will be passed to the matrix preallocation routines
896 -  ozn - the other array passed to the matrix preallocation routines
897 
898 
899    Level: intermediate
900 
901    Notes:
902     See the <A href="../../docs/manual.pdf#nameddest=ch_performance">Hints for Performance Improvment</A> chapter in the users manual for more details.
903 
904    Do not malloc or free dnz and onz that is handled internally by these routines
905 
906    This is a MACRO not a function because it uses variables declared in MatPreallocateInitialize().
907 
908   Concepts: preallocation^Matrix
909 
910 .seealso: MatPreallocateFinalize(), MatPreallocateSet(), MatPreallocateSymmetricSet(), MatPreallocateInitialize(),
911           MatPreallocateInitialize(), MatPreallocateSymmetricSetLocal(), MatPreallocateSetLocal()
912 M*/
913 #define MatPreallocateSymmetricSet(row,nc,cols,dnz,onz) 0;\
914 { PetscInt __i; \
915   for (__i=0; __i<nc; __i++) {\
916     if (cols[__i] >= __end) onz[row - __rstart]++; \
917     else if (cols[__i] >= row) dnz[row - __rstart]++;\
918   }\
919 }
920 
921 /*MC
922    MatPreallocateLocation -  An alternative to MatPreallocationSet() that puts the nonzero locations into the matrix if it exists
923 
924    Synopsis:
925    PetscErrorCode MatPreallocateLocations(Mat A,PetscInt row,PetscInt ncols,PetscInt *cols,PetscInt *dnz,PetscInt *onz)
926 
927    Not Collective
928 
929    Input Parameters:
930 .  A - matrix
931 .  row - row where values exist (must be local to this process)
932 .  ncols - number of columns
933 .  cols - columns with nonzeros
934 .  dnz - the array that will be passed to the matrix preallocation routines
935 -  ozn - the other array passed to the matrix preallocation routines
936 
937 
938    Level: intermediate
939 
940    Notes:
941     See the <A href="../../docs/manual.pdf#nameddest=ch_performance">Hints for Performance Improvment</A> chapter in the users manual for more details.
942 
943    Do not malloc or free dnz and onz that is handled internally by these routines
944 
945    This is a MACRO not a function because it uses a bunch of variables private to the MatPreallocation.... routines.
946 
947   Concepts: preallocation^Matrix
948 
949 .seealso: MatPreallocateInitialize(), MatPreallocateSet(), MatPreallocateSymmetricSet(), MatPreallocateSetLocal(),
950           MatPreallocateSymmetricInitialize(), MatPreallocateSymmetricSetLocal()
951 M*/
952 #define MatPreallocateLocation(A,row,ncols,cols,dnz,onz) 0;if (A) {ierr = MatSetValues(A,1,&row,ncols,cols,PETSC_NULL,INSERT_VALUES);CHKERRQ(ierr);} else {ierr =  MatPreallocateSet(row,ncols,cols,dnz,onz);CHKERRQ(ierr);}
953 
954 
955 /*MC
956    MatPreallocateFinalize - Ends the block of code that will count the number of nonzeros per
957        row in a matrix providing the data that one can use to correctly preallocate the matrix.
958 
959    Synopsis:
960    PetscErrorCode MatPreallocateFinalize(PetscInt *dnz, PetscInt *onz)
961 
962    Collective on MPI_Comm
963 
964    Input Parameters:
965 +  dnz - the array that was be passed to the matrix preallocation routines
966 -  ozn - the other array passed to the matrix preallocation routines
967 
968 
969    Level: intermediate
970 
971    Notes:
972     See the <A href="../../docs/manual.pdf#nameddest=ch_performance">Hints for Performance Improvment</A> chapter in the users manual for more details.
973 
974    Do not malloc or free dnz and onz that is handled internally by these routines
975 
976    This is a MACRO not a function because it closes the { started in MatPreallocateInitialize().
977 
978   Concepts: preallocation^Matrix
979 
980 .seealso: MatPreallocateInitialize(), MatPreallocateSet(), MatPreallocateSymmetricSet(), MatPreallocateSetLocal(),
981           MatPreallocateSymmetricInitialize(), MatPreallocateSymmetricSetLocal()
982 M*/
983 #define MatPreallocateFinalize(dnz,onz) 0;_4_ierr = PetscFree2(dnz,onz);CHKERRQ(_4_ierr);}
984 
985 
986 
987 /* Routines unique to particular data structures */
988 extern PetscErrorCode  MatShellGetContext(Mat,void **);
989 PetscPolymorphicFunction(MatShellGetContext,(Mat A),(A,&t),void*,t)
990 
991 extern PetscErrorCode  MatInodeAdjustForInodes(Mat,IS*,IS*);
992 extern PetscErrorCode  MatInodeGetInodeSizes(Mat,PetscInt *,PetscInt *[],PetscInt *);
993 
994 extern PetscErrorCode  MatSeqAIJSetColumnIndices(Mat,PetscInt[]);
995 extern PetscErrorCode  MatSeqBAIJSetColumnIndices(Mat,PetscInt[]);
996 extern PetscErrorCode  MatCreateSeqAIJWithArrays(MPI_Comm,PetscInt,PetscInt,PetscInt[],PetscInt[],PetscScalar[],Mat*);
997 extern PetscErrorCode  MatCreateSeqBAIJWithArrays(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt[],PetscInt[],PetscScalar[],Mat*);
998 extern PetscErrorCode  MatCreateSeqSBAIJWithArrays(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt[],PetscInt[],PetscScalar[],Mat*);
999 
1000 #define MAT_SKIP_ALLOCATION -4
1001 
1002 extern PetscErrorCode  MatSeqBAIJSetPreallocation(Mat,PetscInt,PetscInt,const PetscInt[]);
1003 PetscPolymorphicSubroutine(MatSeqBAIJSetPreallocation,(Mat A,PetscInt bs,const PetscInt nnz[]),(A,bs,0,nnz))
1004 extern PetscErrorCode  MatSeqSBAIJSetPreallocation(Mat,PetscInt,PetscInt,const PetscInt[]);
1005 PetscPolymorphicSubroutine(MatSeqSBAIJSetPreallocation,(Mat A,PetscInt bs,const PetscInt nnz[]),(A,bs,0,nnz))
1006 extern PetscErrorCode  MatSeqAIJSetPreallocation(Mat,PetscInt,const PetscInt[]);
1007 PetscPolymorphicSubroutine(MatSeqAIJSetPreallocation,(Mat A,const PetscInt nnz[]),(A,0,nnz))
1008 
1009 extern PetscErrorCode  MatMPIBAIJSetPreallocation(Mat,PetscInt,PetscInt,const PetscInt[],PetscInt,const PetscInt[]);
1010 PetscPolymorphicSubroutine(MatMPIBAIJSetPreallocation,(Mat A,PetscInt bs,const PetscInt nnz[],const PetscInt onz[]),(A,bs,0,nnz,0,onz))
1011 extern PetscErrorCode  MatMPISBAIJSetPreallocation(Mat,PetscInt,PetscInt,const PetscInt[],PetscInt,const PetscInt[]);
1012 extern PetscErrorCode  MatMPIAIJSetPreallocation(Mat,PetscInt,const PetscInt[],PetscInt,const PetscInt[]);
1013 extern PetscErrorCode  MatSeqAIJSetPreallocationCSR(Mat,const PetscInt [],const PetscInt [],const PetscScalar []);
1014 extern PetscErrorCode  MatSeqBAIJSetPreallocationCSR(Mat,PetscInt,const PetscInt[],const PetscInt[],const PetscScalar[]);
1015 extern PetscErrorCode  MatMPIAIJSetPreallocationCSR(Mat,const PetscInt[],const PetscInt[],const PetscScalar[]);
1016 extern PetscErrorCode  MatMPIBAIJSetPreallocationCSR(Mat,PetscInt,const PetscInt[],const PetscInt[],const PetscScalar[]);
1017 extern PetscErrorCode  MatMPIAdjSetPreallocation(Mat,PetscInt[],PetscInt[],PetscInt[]);
1018 extern PetscErrorCode  MatMPIDenseSetPreallocation(Mat,PetscScalar[]);
1019 extern PetscErrorCode  MatSeqDenseSetPreallocation(Mat,PetscScalar[]);
1020 extern PetscErrorCode  MatMPIAIJGetSeqAIJ(Mat,Mat*,Mat*,PetscInt*[]);
1021 extern PetscErrorCode  MatMPIBAIJGetSeqBAIJ(Mat,Mat*,Mat*,PetscInt*[]);
1022 extern PetscErrorCode  MatAdicSetLocalFunction(Mat,void (*)(void));
1023 
1024 extern PetscErrorCode  MatSeqDenseSetLDA(Mat,PetscInt);
1025 extern PetscErrorCode  MatDenseGetLocalMatrix(Mat,Mat*);
1026 
1027 extern PetscErrorCode  MatStoreValues(Mat);
1028 extern PetscErrorCode  MatRetrieveValues(Mat);
1029 
1030 extern PetscErrorCode  MatDAADSetCtx(Mat,void*);
1031 
1032 extern PetscErrorCode  MatFindNonzeroRows(Mat,IS*);
1033 /*
1034   These routines are not usually accessed directly, rather solving is
1035   done through the KSP and PC interfaces.
1036 */
1037 
1038 /*E
1039     MatOrderingType - String with the name of a PETSc matrix ordering or the creation function
1040        with an optional dynamic library name, for example
1041        http://www.mcs.anl.gov/petsc/lib.a:orderingcreate()
1042 
1043    Level: beginner
1044 
1045    Cannot use const because the PC objects manipulate the string
1046 
1047 .seealso: MatGetOrdering()
1048 E*/
1049 #define MatOrderingType char*
1050 #define MATORDERINGNATURAL     "natural"
1051 #define MATORDERINGND          "nd"
1052 #define MATORDERING1WD         "1wd"
1053 #define MATORDERINGRCM         "rcm"
1054 #define MATORDERINGQMD         "qmd"
1055 #define MATORDERINGROWLENGTH   "rowlength"
1056 #define MATORDERINGDSC_ND      "dsc_nd"         /* these three are only for DSCPACK, see its documentation for details */
1057 #define MATORDERINGDSC_MMD     "dsc_mmd"
1058 #define MATORDERINGDSC_MDF     "dsc_mdf"
1059 #define MATORDERINGAMD         "amd"            /* only works if UMFPACK is installed with PETSc */
1060 
1061 extern PetscErrorCode  MatGetOrdering(Mat,const MatOrderingType,IS*,IS*);
1062 extern PetscErrorCode  MatGetOrderingList(PetscFList *list);
1063 extern PetscErrorCode  MatOrderingRegister(const char[],const char[],const char[],PetscErrorCode(*)(Mat,const MatOrderingType,IS*,IS*));
1064 
1065 /*MC
1066    MatOrderingRegisterDynamic - Adds a new sparse matrix ordering to the matrix package.
1067 
1068    Synopsis:
1069    PetscErrorCode MatOrderingRegisterDynamic(const char *name_ordering,const char *path,const char *name_create,PetscErrorCode (*routine_create)(MatOrdering))
1070 
1071    Not Collective
1072 
1073    Input Parameters:
1074 +  sname - name of ordering (for example MATORDERINGND)
1075 .  path - location of library where creation routine is
1076 .  name - name of function that creates the ordering type,a string
1077 -  function - function pointer that creates the ordering
1078 
1079    Level: developer
1080 
1081    If dynamic libraries are used, then the fourth input argument (function)
1082    is ignored.
1083 
1084    Sample usage:
1085 .vb
1086    MatOrderingRegisterDynamic("my_order",/home/username/my_lib/lib/libO/solaris/mylib.a,
1087                "MyOrder",MyOrder);
1088 .ve
1089 
1090    Then, your partitioner can be chosen with the procedural interface via
1091 $     MatOrderingSetType(part,"my_order)
1092    or at runtime via the option
1093 $     -pc_factor_mat_ordering_type my_order
1094 
1095    ${PETSC_ARCH} occuring in pathname will be replaced with appropriate values.
1096 
1097 .keywords: matrix, ordering, register
1098 
1099 .seealso: MatOrderingRegisterDestroy(), MatOrderingRegisterAll()
1100 M*/
1101 #if defined(PETSC_USE_DYNAMIC_LIBRARIES)
1102 #define MatOrderingRegisterDynamic(a,b,c,d) MatOrderingRegister(a,b,c,0)
1103 #else
1104 #define MatOrderingRegisterDynamic(a,b,c,d) MatOrderingRegister(a,b,c,d)
1105 #endif
1106 
1107 extern PetscErrorCode  MatOrderingRegisterDestroy(void);
1108 extern PetscErrorCode  MatOrderingRegisterAll(const char[]);
1109 extern PetscBool  MatOrderingRegisterAllCalled;
1110 extern PetscFList MatOrderingList;
1111 
1112 extern PetscErrorCode  MatReorderForNonzeroDiagonal(Mat,PetscReal,IS,IS);
1113 
1114 /*S
1115     MatFactorShiftType - Numeric Shift.
1116 
1117    Level: beginner
1118 
1119 S*/
1120 typedef enum {MAT_SHIFT_NONE,MAT_SHIFT_NONZERO,MAT_SHIFT_POSITIVE_DEFINITE,MAT_SHIFT_INBLOCKS} MatFactorShiftType;
1121 extern const char *MatFactorShiftTypes[];
1122 
1123 /*S
1124    MatFactorInfo - Data passed into the matrix factorization routines
1125 
1126    In Fortran these are simply double precision arrays of size MAT_FACTORINFO_SIZE, that is use
1127 $     MatFactorInfo  info(MAT_FACTORINFO_SIZE)
1128 
1129    Notes: These are not usually directly used by users, instead use PC type of LU, ILU, CHOLESKY or ICC.
1130 
1131       You can use MatFactorInfoInitialize() to set default values.
1132 
1133    Level: developer
1134 
1135 .seealso: MatLUFactorSymbolic(), MatILUFactorSymbolic(), MatCholeskyFactorSymbolic(), MatICCFactorSymbolic(), MatICCFactor(),
1136           MatFactorInfoInitialize()
1137 
1138 S*/
1139 typedef struct {
1140   PetscReal     diagonal_fill;  /* force diagonal to fill in if initially not filled */
1141   PetscReal     usedt;
1142   PetscReal     dt;             /* drop tolerance */
1143   PetscReal     dtcol;          /* tolerance for pivoting */
1144   PetscReal     dtcount;        /* maximum nonzeros to be allowed per row */
1145   PetscReal     fill;           /* expected fill, nonzeros in factored matrix/nonzeros in original matrix */
1146   PetscReal     levels;         /* ICC/ILU(levels) */
1147   PetscReal     pivotinblocks;  /* for BAIJ and SBAIJ matrices pivot in factorization on blocks, default 1.0
1148                                    factorization may be faster if do not pivot */
1149   PetscReal     zeropivot;      /* pivot is called zero if less than this */
1150   PetscReal     shifttype;      /* type of shift added to matrix factor to prevent zero pivots */
1151   PetscReal     shiftamount;     /* how large the shift is */
1152 } MatFactorInfo;
1153 
1154 extern PetscErrorCode  MatFactorInfoInitialize(MatFactorInfo*);
1155 extern PetscErrorCode  MatCholeskyFactor(Mat,IS,const MatFactorInfo*);
1156 extern PetscErrorCode  MatCholeskyFactorSymbolic(Mat,Mat,IS,const MatFactorInfo*);
1157 extern PetscErrorCode  MatCholeskyFactorNumeric(Mat,Mat,const MatFactorInfo*);
1158 extern PetscErrorCode  MatLUFactor(Mat,IS,IS,const MatFactorInfo*);
1159 extern PetscErrorCode  MatILUFactor(Mat,IS,IS,const MatFactorInfo*);
1160 extern PetscErrorCode  MatLUFactorSymbolic(Mat,Mat,IS,IS,const MatFactorInfo*);
1161 extern PetscErrorCode  MatILUFactorSymbolic(Mat,Mat,IS,IS,const MatFactorInfo*);
1162 extern PetscErrorCode  MatICCFactorSymbolic(Mat,Mat,IS,const MatFactorInfo*);
1163 extern PetscErrorCode  MatICCFactor(Mat,IS,const MatFactorInfo*);
1164 extern PetscErrorCode  MatLUFactorNumeric(Mat,Mat,const MatFactorInfo*);
1165 extern PetscErrorCode  MatGetInertia(Mat,PetscInt*,PetscInt*,PetscInt*);
1166 extern PetscErrorCode  MatSolve(Mat,Vec,Vec);
1167 extern PetscErrorCode  MatForwardSolve(Mat,Vec,Vec);
1168 extern PetscErrorCode  MatBackwardSolve(Mat,Vec,Vec);
1169 extern PetscErrorCode  MatSolveAdd(Mat,Vec,Vec,Vec);
1170 extern PetscErrorCode  MatSolveTranspose(Mat,Vec,Vec);
1171 extern PetscErrorCode  MatSolveTransposeAdd(Mat,Vec,Vec,Vec);
1172 extern PetscErrorCode  MatSolves(Mat,Vecs,Vecs);
1173 
1174 extern PetscErrorCode  MatSetUnfactored(Mat);
1175 
1176 /*E
1177     MatSORType - What type of (S)SOR to perform
1178 
1179     Level: beginner
1180 
1181    May be bitwise ORd together
1182 
1183    Any additions/changes here MUST also be made in include/finclude/petscmat.h
1184 
1185    MatSORType may be bitwise ORd together, so do not change the numbers
1186 
1187 .seealso: MatSOR()
1188 E*/
1189 typedef enum {SOR_FORWARD_SWEEP=1,SOR_BACKWARD_SWEEP=2,SOR_SYMMETRIC_SWEEP=3,
1190               SOR_LOCAL_FORWARD_SWEEP=4,SOR_LOCAL_BACKWARD_SWEEP=8,
1191               SOR_LOCAL_SYMMETRIC_SWEEP=12,SOR_ZERO_INITIAL_GUESS=16,
1192               SOR_EISENSTAT=32,SOR_APPLY_UPPER=64,SOR_APPLY_LOWER=128} MatSORType;
1193 extern PetscErrorCode  MatSOR(Mat,Vec,PetscReal,MatSORType,PetscReal,PetscInt,PetscInt,Vec);
1194 
1195 /*
1196     These routines are for efficiently computing Jacobians via finite differences.
1197 */
1198 
1199 /*E
1200     MatColoringType - String with the name of a PETSc matrix coloring or the creation function
1201        with an optional dynamic library name, for example
1202        http://www.mcs.anl.gov/petsc/lib.a:coloringcreate()
1203 
1204    Level: beginner
1205 
1206 .seealso: MatGetColoring()
1207 E*/
1208 #define MatColoringType char*
1209 #define MATCOLORINGNATURAL "natural"
1210 #define MATCOLORINGSL      "sl"
1211 #define MATCOLORINGLF      "lf"
1212 #define MATCOLORINGID      "id"
1213 
1214 extern PetscErrorCode  MatGetColoring(Mat,const MatColoringType,ISColoring*);
1215 extern PetscErrorCode  MatColoringRegister(const char[],const char[],const char[],PetscErrorCode(*)(Mat,MatColoringType,ISColoring *));
1216 
1217 /*MC
1218    MatColoringRegisterDynamic - Adds a new sparse matrix coloring to the
1219                                matrix package.
1220 
1221    Synopsis:
1222    PetscErrorCode MatColoringRegisterDynamic(const char *name_coloring,const char *path,const char *name_create,PetscErrorCode (*routine_create)(MatColoring))
1223 
1224    Not Collective
1225 
1226    Input Parameters:
1227 +  sname - name of Coloring (for example MATCOLORINGSL)
1228 .  path - location of library where creation routine is
1229 .  name - name of function that creates the Coloring type, a string
1230 -  function - function pointer that creates the coloring
1231 
1232    Level: developer
1233 
1234    If dynamic libraries are used, then the fourth input argument (function)
1235    is ignored.
1236 
1237    Sample usage:
1238 .vb
1239    MatColoringRegisterDynamic("my_color",/home/username/my_lib/lib/libO/solaris/mylib.a,
1240                "MyColor",MyColor);
1241 .ve
1242 
1243    Then, your partitioner can be chosen with the procedural interface via
1244 $     MatColoringSetType(part,"my_color")
1245    or at runtime via the option
1246 $     -mat_coloring_type my_color
1247 
1248    $PETSC_ARCH occuring in pathname will be replaced with appropriate values.
1249 
1250 .keywords: matrix, Coloring, register
1251 
1252 .seealso: MatColoringRegisterDestroy(), MatColoringRegisterAll()
1253 M*/
1254 #if defined(PETSC_USE_DYNAMIC_LIBRARIES)
1255 #define MatColoringRegisterDynamic(a,b,c,d) MatColoringRegister(a,b,c,0)
1256 #else
1257 #define MatColoringRegisterDynamic(a,b,c,d) MatColoringRegister(a,b,c,d)
1258 #endif
1259 
1260 extern PetscBool  MatColoringRegisterAllCalled;
1261 
1262 extern PetscErrorCode  MatColoringRegisterAll(const char[]);
1263 extern PetscErrorCode  MatColoringRegisterDestroy(void);
1264 extern PetscErrorCode  MatColoringPatch(Mat,PetscInt,PetscInt,ISColoringValue[],ISColoring*);
1265 
1266 /*S
1267      MatFDColoring - Object for computing a sparse Jacobian via finite differences
1268         and coloring
1269 
1270    Level: beginner
1271 
1272   Concepts: coloring, sparse Jacobian, finite differences
1273 
1274 .seealso:  MatFDColoringCreate()
1275 S*/
1276 typedef struct _p_MatFDColoring* MatFDColoring;
1277 
1278 extern PetscErrorCode  MatFDColoringCreate(Mat,ISColoring,MatFDColoring *);
1279 extern PetscErrorCode  MatFDColoringDestroy(MatFDColoring);
1280 extern PetscErrorCode  MatFDColoringView(MatFDColoring,PetscViewer);
1281 extern PetscErrorCode  MatFDColoringSetFunction(MatFDColoring,PetscErrorCode (*)(void),void*);
1282 extern PetscErrorCode  MatFDColoringGetFunction(MatFDColoring,PetscErrorCode (**)(void),void**);
1283 extern PetscErrorCode  MatFDColoringSetParameters(MatFDColoring,PetscReal,PetscReal);
1284 extern PetscErrorCode  MatFDColoringSetFromOptions(MatFDColoring);
1285 extern PetscErrorCode  MatFDColoringApply(Mat,MatFDColoring,Vec,MatStructure*,void *);
1286 extern PetscErrorCode  MatFDColoringApplyTS(Mat,MatFDColoring,PetscReal,Vec,MatStructure*,void *);
1287 extern PetscErrorCode  MatFDColoringSetF(MatFDColoring,Vec);
1288 extern PetscErrorCode  MatFDColoringGetPerturbedColumns(MatFDColoring,PetscInt*,PetscInt*[]);
1289 /*
1290     These routines are for partitioning matrices: currently used only
1291   for adjacency matrix, MatCreateMPIAdj().
1292 */
1293 
1294 /*S
1295      MatPartitioning - Object for managing the partitioning of a matrix or graph
1296 
1297    Level: beginner
1298 
1299   Concepts: partitioning
1300 
1301 .seealso:  MatPartitioningCreate(), MatPartitioningType
1302 S*/
1303 typedef struct _p_MatPartitioning* MatPartitioning;
1304 
1305 /*E
1306     MatPartitioningType - String with the name of a PETSc matrix partitioning or the creation function
1307        with an optional dynamic library name, for example
1308        http://www.mcs.anl.gov/petsc/lib.a:partitioningcreate()
1309 
1310    Level: beginner
1311 
1312 .seealso: MatPartitioningCreate(), MatPartitioning
1313 E*/
1314 #define MatPartitioningType char*
1315 #define MATPARTITIONINGCURRENT  "current"
1316 #define MATPARTITIONINGSQUARE   "square"
1317 #define MATPARTITIONINGPARMETIS "parmetis"
1318 #define MATPARTITIONINGCHACO    "chaco"
1319 #define MATPARTITIONINGJOSTLE   "jostle"
1320 #define MATPARTITIONINGPARTY    "party"
1321 #define MATPARTITIONINGSCOTCH   "scotch"
1322 
1323 
1324 extern PetscErrorCode  MatPartitioningCreate(MPI_Comm,MatPartitioning*);
1325 extern PetscErrorCode  MatPartitioningSetType(MatPartitioning,const MatPartitioningType);
1326 extern PetscErrorCode  MatPartitioningSetNParts(MatPartitioning,PetscInt);
1327 extern PetscErrorCode  MatPartitioningSetAdjacency(MatPartitioning,Mat);
1328 extern PetscErrorCode  MatPartitioningSetVertexWeights(MatPartitioning,const PetscInt[]);
1329 extern PetscErrorCode  MatPartitioningSetPartitionWeights(MatPartitioning,const PetscReal []);
1330 extern PetscErrorCode  MatPartitioningApply(MatPartitioning,IS*);
1331 extern PetscErrorCode  MatPartitioningDestroy(MatPartitioning);
1332 
1333 extern PetscErrorCode  MatPartitioningRegister(const char[],const char[],const char[],PetscErrorCode (*)(MatPartitioning));
1334 
1335 /*MC
1336    MatPartitioningRegisterDynamic - Adds a new sparse matrix partitioning to the
1337    matrix package.
1338 
1339    Synopsis:
1340    PetscErrorCode MatPartitioningRegisterDynamic(const char *name_partitioning,const char *path,const char *name_create,PetscErrorCode (*routine_create)(MatPartitioning))
1341 
1342    Not Collective
1343 
1344    Input Parameters:
1345 +  sname - name of partitioning (for example MATPARTITIONINGCURRENT) or parmetis
1346 .  path - location of library where creation routine is
1347 .  name - name of function that creates the partitioning type, a string
1348 -  function - function pointer that creates the partitioning type
1349 
1350    Level: developer
1351 
1352    If dynamic libraries are used, then the fourth input argument (function)
1353    is ignored.
1354 
1355    Sample usage:
1356 .vb
1357    MatPartitioningRegisterDynamic("my_part",/home/username/my_lib/lib/libO/solaris/mylib.a,
1358                "MyPartCreate",MyPartCreate);
1359 .ve
1360 
1361    Then, your partitioner can be chosen with the procedural interface via
1362 $     MatPartitioningSetType(part,"my_part")
1363    or at runtime via the option
1364 $     -mat_partitioning_type my_part
1365 
1366    $PETSC_ARCH occuring in pathname will be replaced with appropriate values.
1367 
1368 .keywords: matrix, partitioning, register
1369 
1370 .seealso: MatPartitioningRegisterDestroy(), MatPartitioningRegisterAll()
1371 M*/
1372 #if defined(PETSC_USE_DYNAMIC_LIBRARIES)
1373 #define MatPartitioningRegisterDynamic(a,b,c,d) MatPartitioningRegister(a,b,c,0)
1374 #else
1375 #define MatPartitioningRegisterDynamic(a,b,c,d) MatPartitioningRegister(a,b,c,d)
1376 #endif
1377 
1378 extern PetscBool  MatPartitioningRegisterAllCalled;
1379 
1380 extern PetscErrorCode  MatPartitioningRegisterAll(const char[]);
1381 extern PetscErrorCode  MatPartitioningRegisterDestroy(void);
1382 
1383 extern PetscErrorCode  MatPartitioningView(MatPartitioning,PetscViewer);
1384 extern PetscErrorCode  MatPartitioningSetFromOptions(MatPartitioning);
1385 extern PetscErrorCode  MatPartitioningGetType(MatPartitioning,const MatPartitioningType*);
1386 
1387 extern PetscErrorCode  MatPartitioningParmetisSetCoarseSequential(MatPartitioning);
1388 extern PetscErrorCode  MatPartitioningParmetisGetEdgeCut(MatPartitioning, PetscInt *);
1389 
1390 extern PetscErrorCode  MatPartitioningJostleSetCoarseLevel(MatPartitioning,PetscReal);
1391 extern PetscErrorCode  MatPartitioningJostleSetCoarseSequential(MatPartitioning);
1392 
1393 typedef enum {MP_CHACO_MULTILEVEL_KL,MP_CHACO_SPECTRAL,MP_CHACO_LINEAR,MP_CHACO_RANDOM, MP_CHACO_SCATTERED} MPChacoGlobalType;
1394 extern PetscErrorCode  MatPartitioningChacoSetGlobal(MatPartitioning, MPChacoGlobalType);
1395 typedef enum { MP_CHACO_KERNIGHAN_LIN, MP_CHACO_NONE } MPChacoLocalType;
1396 extern PetscErrorCode  MatPartitioningChacoSetLocal(MatPartitioning, MPChacoLocalType);
1397 extern PetscErrorCode  MatPartitioningChacoSetCoarseLevel(MatPartitioning,PetscReal);
1398 typedef enum { MP_CHACO_LANCZOS, MP_CHACO_RQI_SYMMLQ } MPChacoEigenType;
1399 extern PetscErrorCode  MatPartitioningChacoSetEigenSolver(MatPartitioning,MPChacoEigenType);
1400 extern PetscErrorCode  MatPartitioningChacoSetEigenTol(MatPartitioning, PetscReal);
1401 extern PetscErrorCode  MatPartitioningChacoSetEigenNumber(MatPartitioning, PetscInt);
1402 
1403 #define MP_PARTY_OPT "opt"
1404 #define MP_PARTY_LIN "lin"
1405 #define MP_PARTY_SCA "sca"
1406 #define MP_PARTY_RAN "ran"
1407 #define MP_PARTY_GBF "gbf"
1408 #define MP_PARTY_GCF "gcf"
1409 #define MP_PARTY_BUB "bub"
1410 #define MP_PARTY_DEF "def"
1411 extern PetscErrorCode  MatPartitioningPartySetGlobal(MatPartitioning, const char*);
1412 #define MP_PARTY_HELPFUL_SETS "hs"
1413 #define MP_PARTY_KERNIGHAN_LIN "kl"
1414 #define MP_PARTY_NONE "no"
1415 extern PetscErrorCode  MatPartitioningPartySetLocal(MatPartitioning, const char*);
1416 extern PetscErrorCode  MatPartitioningPartySetCoarseLevel(MatPartitioning,PetscReal);
1417 extern PetscErrorCode  MatPartitioningPartySetBipart(MatPartitioning,PetscBool );
1418 extern PetscErrorCode  MatPartitioningPartySetMatchOptimization(MatPartitioning,PetscBool );
1419 
1420 typedef enum { MP_SCOTCH_GREEDY, MP_SCOTCH_GPS, MP_SCOTCH_GR_GPS } MPScotchGlobalType;
1421 extern PetscErrorCode  MatPartitioningScotchSetArch(MatPartitioning,const char*);
1422 extern PetscErrorCode  MatPartitioningScotchSetMultilevel(MatPartitioning);
1423 extern PetscErrorCode  MatPartitioningScotchSetGlobal(MatPartitioning,MPScotchGlobalType);
1424 extern PetscErrorCode  MatPartitioningScotchSetCoarseLevel(MatPartitioning,PetscReal);
1425 extern PetscErrorCode  MatPartitioningScotchSetHostList(MatPartitioning,const char*);
1426 typedef enum { MP_SCOTCH_KERNIGHAN_LIN, MP_SCOTCH_NONE } MPScotchLocalType;
1427 extern PetscErrorCode  MatPartitioningScotchSetLocal(MatPartitioning,MPScotchLocalType);
1428 extern PetscErrorCode  MatPartitioningScotchSetMapping(MatPartitioning);
1429 extern PetscErrorCode  MatPartitioningScotchSetStrategy(MatPartitioning,char*);
1430 
1431 extern PetscErrorCode MatMeshToVertexGraph(Mat,PetscInt,Mat*);
1432 extern PetscErrorCode MatMeshToCellGraph(Mat,PetscInt,Mat*);
1433 
1434 /*
1435     If you add entries here you must also add them to finclude/petscmat.h
1436 */
1437 typedef enum { MATOP_SET_VALUES=0,
1438                MATOP_GET_ROW=1,
1439                MATOP_RESTORE_ROW=2,
1440                MATOP_MULT=3,
1441                MATOP_MULT_ADD=4,
1442                MATOP_MULT_TRANSPOSE=5,
1443                MATOP_MULT_TRANSPOSE_ADD=6,
1444                MATOP_SOLVE=7,
1445                MATOP_SOLVE_ADD=8,
1446                MATOP_SOLVE_TRANSPOSE=9,
1447                MATOP_SOLVE_TRANSPOSE_ADD=10,
1448                MATOP_LUFACTOR=11,
1449                MATOP_CHOLESKYFACTOR=12,
1450                MATOP_SOR=13,
1451                MATOP_TRANSPOSE=14,
1452                MATOP_GETINFO=15,
1453                MATOP_EQUAL=16,
1454                MATOP_GET_DIAGONAL=17,
1455                MATOP_DIAGONAL_SCALE=18,
1456                MATOP_NORM=19,
1457                MATOP_ASSEMBLY_BEGIN=20,
1458                MATOP_ASSEMBLY_END=21,
1459                MATOP_SET_OPTION=22,
1460                MATOP_ZERO_ENTRIES=23,
1461                MATOP_ZERO_ROWS=24,
1462                MATOP_LUFACTOR_SYMBOLIC=25,
1463                MATOP_LUFACTOR_NUMERIC=26,
1464                MATOP_CHOLESKY_FACTOR_SYMBOLIC=27,
1465                MATOP_CHOLESKY_FACTOR_NUMERIC=28,
1466                MATOP_SETUP_PREALLOCATION=29,
1467                MATOP_ILUFACTOR_SYMBOLIC=30,
1468                MATOP_ICCFACTOR_SYMBOLIC=31,
1469                MATOP_GET_ARRAY=32,
1470                MATOP_RESTORE_ARRAY=33,
1471                MATOP_DUPLICATE=34,
1472                MATOP_FORWARD_SOLVE=35,
1473                MATOP_BACKWARD_SOLVE=36,
1474                MATOP_ILUFACTOR=37,
1475                MATOP_ICCFACTOR=38,
1476                MATOP_AXPY=39,
1477                MATOP_GET_SUBMATRICES=40,
1478                MATOP_INCREASE_OVERLAP=41,
1479                MATOP_GET_VALUES=42,
1480                MATOP_COPY=43,
1481                MATOP_GET_ROW_MAX=44,
1482                MATOP_SCALE=45,
1483                MATOP_SHIFT=46,
1484                MATOP_DIAGONAL_SET=47,
1485                MATOP_ILUDT_FACTOR=48,
1486                MATOP_SET_BLOCK_SIZE=49,
1487                MATOP_GET_ROW_IJ=50,
1488                MATOP_RESTORE_ROW_IJ=51,
1489                MATOP_GET_COLUMN_IJ=52,
1490                MATOP_RESTORE_COLUMN_IJ=53,
1491                MATOP_FDCOLORING_CREATE=54,
1492                MATOP_COLORING_PATCH=55,
1493                MATOP_SET_UNFACTORED=56,
1494                MATOP_PERMUTE=57,
1495                MATOP_SET_VALUES_BLOCKED=58,
1496                MATOP_GET_SUBMATRIX=59,
1497                MATOP_DESTROY=60,
1498                MATOP_VIEW=61,
1499                MATOP_CONVERT_FROM=62,
1500                MATOP_USE_SCALED_FORM=63,
1501                MATOP_SCALE_SYSTEM=64,
1502                MATOP_UNSCALE_SYSTEM=65,
1503                MATOP_SET_LOCAL_TO_GLOBAL_MAP=66,
1504                MATOP_SET_VALUES_LOCAL=67,
1505                MATOP_ZERO_ROWS_LOCAL=68,
1506                MATOP_GET_ROW_MAX_ABS=69,
1507                MATOP_GET_ROW_MIN_ABS=70,
1508                MATOP_CONVERT=71,
1509                MATOP_SET_COLORING=72,
1510                MATOP_SET_VALUES_ADIC=73,
1511                MATOP_SET_VALUES_ADIFOR=74,
1512                MATOP_FD_COLORING_APPLY=75,
1513                MATOP_SET_FROM_OPTIONS=76,
1514                MATOP_MULT_CON=77,
1515                MATOP_MULT_TRANSPOSE_CON=78,
1516                MATOP_PERMUTE_SPARSIFY=79,
1517                MATOP_MULT_MULTIPLE=80,
1518                MATOP_SOLVE_MULTIPLE=81,
1519                MATOP_GET_INERTIA=82,
1520                MATOP_LOAD=83,
1521                MATOP_IS_SYMMETRIC=84,
1522                MATOP_IS_HERMITIAN=85,
1523                MATOP_IS_STRUCTURALLY_SYMMETRIC=86,
1524                MATOP_DUMMY=87,
1525                MATOP_GET_VECS=88,
1526                MATOP_MAT_MULT=89,
1527                MATOP_MAT_MULT_SYMBOLIC=90,
1528                MATOP_MAT_MULT_NUMERIC=91,
1529                MATOP_PTAP=92,
1530                MATOP_PTAP_SYMBOLIC=93,
1531                MATOP_PTAP_NUMERIC=94,
1532                MATOP_MAT_MULTTRANSPOSE=95,
1533                MATOP_MAT_MULTTRANSPOSE_SYM=96,
1534                MATOP_MAT_MULTTRANSPOSE_NUM=97,
1535                MATOP_PTAP_SYMBOLIC_SEQAIJ=98,
1536                MATOP_PTAP_NUMERIC_SEQAIJ=99,
1537                MATOP_PTAP_SYMBOLIC_MPIAIJ=100,
1538                MATOP_PTAP_NUMERIC_MPIAIJ=101,
1539                MATOP_CONJUGATE=102,
1540                MATOP_SET_SIZES=103,
1541                MATOP_SET_VALUES_ROW=104,
1542                MATOP_REAL_PART=105,
1543                MATOP_IMAG_PART=106,
1544                MATOP_GET_ROW_UTRIANGULAR=107,
1545                MATOP_RESTORE_ROW_UTRIANGULAR=108,
1546                MATOP_MATSOLVE=109,
1547                MATOP_GET_REDUNDANTMATRIX=110,
1548                MATOP_GET_ROW_MIN=111,
1549                MATOP_GET_COLUMN_VEC=112,
1550                MATOP_MISSING_DIAGONAL=113,
1551                MATOP_MATGETSEQNONZEROSTRUCTURE=114,
1552                MATOP_CREATE=115,
1553                MATOP_GET_GHOSTS=116,
1554                MATOP_GET_LOCALSUBMATRIX=117,
1555                MATOP_RESTORE_LOCALSUBMATRIX=118,
1556                MATOP_MULT_DIAGONAL_BLOCK=119,
1557                MATOP_HERMITIANTRANSPOSE=120,
1558                MATOP_MULTHERMITIANTRANSPOSE=121,
1559                MATOP_MULTHERMITIANTRANSPOSEADD=122,
1560                MATOP_GETMULTIPROCBLOCK=123,
1561 	       MATOP_GET_SUBMATRICES_PARALLEL=128
1562              } MatOperation;
1563 extern PetscErrorCode  MatHasOperation(Mat,MatOperation,PetscBool *);
1564 extern PetscErrorCode  MatShellSetOperation(Mat,MatOperation,void(*)(void));
1565 extern PetscErrorCode  MatShellGetOperation(Mat,MatOperation,void(**)(void));
1566 extern PetscErrorCode  MatShellSetContext(Mat,void*);
1567 
1568 /*
1569    Codes for matrices stored on disk. By default they are
1570    stored in a universal format. By changing the format with
1571    PetscViewerSetFormat(viewer,PETSC_VIEWER_NATIVE); the matrices will
1572    be stored in a way natural for the matrix, for example dense matrices
1573    would be stored as dense. Matrices stored this way may only be
1574    read into matrices of the same type.
1575 */
1576 #define MATRIX_BINARY_FORMAT_DENSE -1
1577 
1578 extern PetscErrorCode  MatMPIBAIJSetHashTableFactor(Mat,PetscReal);
1579 extern PetscErrorCode  MatISGetLocalMat(Mat,Mat*);
1580 
1581 /*S
1582      MatNullSpace - Object that removes a null space from a vector, i.e.
1583          orthogonalizes the vector to a subsapce
1584 
1585    Level: advanced
1586 
1587   Concepts: matrix; linear operator, null space
1588 
1589   Users manual sections:
1590 .   sec_singular
1591 
1592 .seealso:  MatNullSpaceCreate()
1593 S*/
1594 typedef struct _p_MatNullSpace* MatNullSpace;
1595 
1596 extern PetscErrorCode  MatNullSpaceCreate(MPI_Comm,PetscBool ,PetscInt,const Vec[],MatNullSpace*);
1597 extern PetscErrorCode  MatNullSpaceSetFunction(MatNullSpace,PetscErrorCode (*)(MatNullSpace,Vec,void*),void*);
1598 extern PetscErrorCode  MatNullSpaceDestroy(MatNullSpace);
1599 extern PetscErrorCode  MatNullSpaceRemove(MatNullSpace,Vec,Vec*);
1600 extern PetscErrorCode  MatNullSpaceAttach(Mat,MatNullSpace);
1601 extern PetscErrorCode  MatNullSpaceTest(MatNullSpace,Mat,PetscBool  *);
1602 
1603 extern PetscErrorCode  MatReorderingSeqSBAIJ(Mat,IS);
1604 extern PetscErrorCode  MatMPISBAIJSetHashTableFactor(Mat,PetscReal);
1605 extern PetscErrorCode  MatSeqSBAIJSetColumnIndices(Mat,PetscInt *);
1606 extern PetscErrorCode  MatSeqBAIJInvertBlockDiagonal(Mat);
1607 
1608 extern PetscErrorCode  MatCreateMAIJ(Mat,PetscInt,Mat*);
1609 extern PetscErrorCode  MatMAIJRedimension(Mat,PetscInt,Mat*);
1610 extern PetscErrorCode  MatMAIJGetAIJ(Mat,Mat*);
1611 
1612 extern PetscErrorCode  MatComputeExplicitOperator(Mat,Mat*);
1613 
1614 extern PetscErrorCode  MatDiagonalScaleLocal(Mat,Vec);
1615 
1616 extern PetscErrorCode  MatCreateMFFD(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,Mat*);
1617 extern PetscErrorCode  MatMFFDSetBase(Mat,Vec,Vec);
1618 extern PetscErrorCode  MatMFFDSetFunction(Mat,PetscErrorCode(*)(void*,Vec,Vec),void*);
1619 extern PetscErrorCode  MatMFFDSetFunctioni(Mat,PetscErrorCode (*)(void*,PetscInt,Vec,PetscScalar*));
1620 extern PetscErrorCode  MatMFFDSetFunctioniBase(Mat,PetscErrorCode (*)(void*,Vec));
1621 extern PetscErrorCode  MatMFFDAddNullSpace(Mat,MatNullSpace);
1622 extern PetscErrorCode  MatMFFDSetHHistory(Mat,PetscScalar[],PetscInt);
1623 extern PetscErrorCode  MatMFFDResetHHistory(Mat);
1624 extern PetscErrorCode  MatMFFDSetFunctionError(Mat,PetscReal);
1625 extern PetscErrorCode  MatMFFDSetPeriod(Mat,PetscInt);
1626 extern PetscErrorCode  MatMFFDGetH(Mat,PetscScalar *);
1627 extern PetscErrorCode  MatMFFDSetOptionsPrefix(Mat,const char[]);
1628 extern PetscErrorCode  MatMFFDSetFromOptions(Mat);
1629 extern PetscErrorCode  MatMFFDCheckPositivity(void*,Vec,Vec,PetscScalar*);
1630 extern PetscErrorCode  MatMFFDSetCheckh(Mat,PetscErrorCode (*)(void*,Vec,Vec,PetscScalar*),void*);
1631 
1632 /*S
1633     MatMFFD - A data structured used to manage the computation of the h differencing parameter for matrix-free
1634               Jacobian vector products
1635 
1636     Notes: MATMFFD is a specific MatType which uses the MatMFFD data structure
1637 
1638            MatMFFD*() methods actually take the Mat as their first argument. Not a MatMFFD data structure
1639 
1640     Level: developer
1641 
1642 .seealso: MATMFFD, MatCreateMFFD(), MatMFFDSetFuction(), MatMFFDSetType(), MatMFFDRegister()
1643 S*/
1644 typedef struct _p_MatMFFD* MatMFFD;
1645 
1646 /*E
1647     MatMFFDType - algorithm used to compute the h used in computing matrix-vector products via differencing of the function
1648 
1649    Level: beginner
1650 
1651 .seealso: MatMFFDSetType(), MatMFFDRegister()
1652 E*/
1653 #define MatMFFDType char*
1654 #define MATMFFD_DS  "ds"
1655 #define MATMFFD_WP  "wp"
1656 
1657 extern PetscErrorCode  MatMFFDSetType(Mat,const MatMFFDType);
1658 extern PetscErrorCode  MatMFFDRegister(const char[],const char[],const char[],PetscErrorCode (*)(MatMFFD));
1659 
1660 /*MC
1661    MatMFFDRegisterDynamic - Adds a method to the MatMFFD registry.
1662 
1663    Synopsis:
1664    PetscErrorCode MatMFFDRegisterDynamic(const char *name_solver,const char *path,const char *name_create,PetscErrorCode (*routine_create)(MatMFFD))
1665 
1666    Not Collective
1667 
1668    Input Parameters:
1669 +  name_solver - name of a new user-defined compute-h module
1670 .  path - path (either absolute or relative) the library containing this solver
1671 .  name_create - name of routine to create method context
1672 -  routine_create - routine to create method context
1673 
1674    Level: developer
1675 
1676    Notes:
1677    MatMFFDRegisterDynamic() may be called multiple times to add several user-defined solvers.
1678 
1679    If dynamic libraries are used, then the fourth input argument (routine_create)
1680    is ignored.
1681 
1682    Sample usage:
1683 .vb
1684    MatMFFDRegisterDynamic("my_h",/home/username/my_lib/lib/libO/solaris/mylib.a,
1685                "MyHCreate",MyHCreate);
1686 .ve
1687 
1688    Then, your solver can be chosen with the procedural interface via
1689 $     MatMFFDSetType(mfctx,"my_h")
1690    or at runtime via the option
1691 $     -snes_mf_type my_h
1692 
1693 .keywords: MatMFFD, register
1694 
1695 .seealso: MatMFFDRegisterAll(), MatMFFDRegisterDestroy()
1696 M*/
1697 #if defined(PETSC_USE_DYNAMIC_LIBRARIES)
1698 #define MatMFFDRegisterDynamic(a,b,c,d) MatMFFDRegister(a,b,c,0)
1699 #else
1700 #define MatMFFDRegisterDynamic(a,b,c,d) MatMFFDRegister(a,b,c,d)
1701 #endif
1702 
1703 extern PetscErrorCode  MatMFFDRegisterAll(const char[]);
1704 extern PetscErrorCode  MatMFFDRegisterDestroy(void);
1705 extern PetscErrorCode  MatMFFDDSSetUmin(Mat,PetscReal);
1706 extern PetscErrorCode  MatMFFDWPSetComputeNormU(Mat,PetscBool );
1707 
1708 
1709 extern PetscErrorCode  PetscViewerMathematicaPutMatrix(PetscViewer, PetscInt, PetscInt, PetscReal *);
1710 extern PetscErrorCode  PetscViewerMathematicaPutCSRMatrix(PetscViewer, PetscInt, PetscInt, PetscInt *, PetscInt *, PetscReal *);
1711 
1712 /*
1713    PETSc interface to MUMPS
1714 */
1715 #ifdef PETSC_HAVE_MUMPS
1716 extern PetscErrorCode  MatMumpsSetIcntl(Mat,PetscInt,PetscInt);
1717 #endif
1718 
1719 /*
1720    PETSc interface to SUPERLU
1721 */
1722 #ifdef PETSC_HAVE_SUPERLU
1723 extern PetscErrorCode  MatSuperluSetILUDropTol(Mat,PetscReal);
1724 #endif
1725 
1726 extern PetscErrorCode MatCreateNest(MPI_Comm,PetscInt,const IS[],PetscInt,const IS[],const Mat[],Mat*);
1727 extern PetscErrorCode MatNestGetSize(Mat,PetscInt*,PetscInt*);
1728 extern PetscErrorCode MatNestGetSubMats(Mat,PetscInt*,PetscInt*,Mat***);
1729 extern PetscErrorCode MatNestGetSubMat(Mat,PetscInt,PetscInt,Mat*);
1730 extern PetscErrorCode MatNestSetVecType(Mat,const VecType);
1731 extern PetscErrorCode MatNestSetSubMats(Mat,PetscInt,const IS[],PetscInt,const IS[],const Mat[]);
1732 
1733 PETSC_EXTERN_CXX_END
1734 #endif
1735