xref: /petsc/include/petscmat.h (revision fcb1c9af1a39adbb75e1011fa083d9d197867fec)
1 /*
2      Include file for the matrix component of PETSc
3 */
4 #ifndef __PETSCMAT_H
5 #define __PETSCMAT_H
6 #include "petscvec.h"
7 PETSC_EXTERN_CXX_BEGIN
8 
9 /*S
10      Mat - Abstract PETSc matrix object
11 
12    Level: beginner
13 
14   Concepts: matrix; linear operator
15 
16 .seealso:  MatCreate(), MatType, MatSetType()
17 S*/
18 typedef struct _p_Mat*           Mat;
19 
20 /*E
21     MatType - String with the name of a PETSc matrix or the creation function
22        with an optional dynamic library name, for example
23        http://www.mcs.anl.gov/petsc/lib.a:mymatcreate()
24 
25    Level: beginner
26 
27 .seealso: MatSetType(), Mat, MatSolverPackage
28 E*/
29 #define MatType char*
30 #define MATSAME            "same"
31 #define MATMAIJ            "maij"
32 #define MATSEQMAIJ           "seqmaij"
33 #define MATMPIMAIJ           "mpimaij"
34 #define MATIS              "is"
35 #define MATAIJ             "aij"
36 #define MATSEQAIJ            "seqaij"
37 #define MATMPIAIJ            "mpiaij"
38 #define MATAIJCRL              "aijcrl"
39 #define MATSEQAIJCRL             "seqaijcrl"
40 #define MATMPIAIJCRL             "mpiaijcrl"
41 #define MATAIJCUSP             "aijcusp"
42 #define MATSEQAIJCUSP            "seqaijcusp"
43 #define MATMPIAIJCUSP            "mpiaijcusp"
44 #define MATAIJPERM             "aijperm"
45 #define MATSEQAIJPERM            "seqaijperm"
46 #define MATMPIAIJPERM            "mpiaijperm"
47 #define MATSHELL           "shell"
48 #define MATDENSE           "dense"
49 #define MATSEQDENSE          "seqdense"
50 #define MATMPIDENSE          "mpidense"
51 #define MATBAIJ            "baij"
52 #define MATSEQBAIJ           "seqbaij"
53 #define MATMPIBAIJ           "mpibaij"
54 #define MATMPIADJ          "mpiadj"
55 #define MATSBAIJ           "sbaij"
56 #define MATSEQSBAIJ          "seqsbaij"
57 #define MATMPISBAIJ          "mpisbaij"
58 
59 #define MATSEQBSTRM        "seqbstrm"
60 #define MATMPIBSTRM        "mpibstrm"
61 #define MATBSTRM           "bstrm"
62 
63 #define MATDAAD            "daad"
64 #define MATMFFD            "mffd"
65 #define MATNORMAL          "normal"
66 #define MATLRC             "lrc"
67 #define MATSCATTER         "scatter"
68 #define MATBLOCKMAT        "blockmat"
69 #define MATCOMPOSITE       "composite"
70 #define MATFFT             "fft"
71 #define MATFFTW              "fftw"
72 #define MATSEQCUFFT          "seqcufft"
73 #define MATTRANSPOSEMAT    "transpose"
74 #define MATSCHURCOMPLEMENT "schurcomplement"
75 #define MATPYTHON          "python"
76 #define MATHYPRESTRUCT     "hyprestruct"
77 #define MATHYPRESSTRUCT    "hypresstruct"
78 #define MATSUBMATRIX       "submatrix"
79 #define MATLOCALREF        "localref"
80 #define MATNEST            "nest"
81 
82 /*E
83     MatSolverPackage - String with the name of a PETSc matrix solver type.
84 
85     For example: "petsc" indicates what PETSc provides, "superlu" indicates either
86        SuperLU or SuperLU_Dist etc.
87 
88 
89    Level: beginner
90 
91 .seealso: MatGetFactor(), Mat, MatSetType(), MatType
92 E*/
93 #define MatSolverPackage char*
94 #define MATSOLVERSPOOLES      "spooles"
95 #define MATSOLVERSUPERLU      "superlu"
96 #define MATSOLVERSUPERLU_DIST "superlu_dist"
97 #define MATSOLVERUMFPACK      "umfpack"
98 #define MATSOLVERCHOLMOD      "cholmod"
99 #define MATSOLVERESSL         "essl"
100 #define MATSOLVERLUSOL        "lusol"
101 #define MATSOLVERMUMPS        "mumps"
102 #define MATSOLVERPASTIX       "pastix"
103 #define MATSOLVERDSCPACK      "dscpack"
104 #define MATSOLVERMATLAB       "matlab"
105 #define MATSOLVERPETSC        "petsc"
106 #define MATSOLVERPLAPACK      "plapack"
107 #define MATSOLVERBAS          "bas"
108 
109 #define MAT_SOLVER_BSTRM       "bstrm"
110 
111 /*E
112     MatFactorType - indicates what type of factorization is requested
113 
114     Level: beginner
115 
116    Any additions/changes here MUST also be made in include/finclude/petscmat.h
117 
118 .seealso: MatSolverPackage, MatGetFactor()
119 E*/
120 typedef enum {MAT_FACTOR_NONE, MAT_FACTOR_LU, MAT_FACTOR_CHOLESKY, MAT_FACTOR_ILU, MAT_FACTOR_ICC,MAT_FACTOR_ILUDT} MatFactorType;
121 extern const char *const MatFactorTypes[];
122 
123 extern PetscErrorCode  MatGetFactor(Mat,const MatSolverPackage,MatFactorType,Mat*);
124 extern PetscErrorCode  MatGetFactorAvailable(Mat,const MatSolverPackage,MatFactorType,PetscBool *);
125 extern PetscErrorCode  MatFactorGetSolverPackage(Mat,const MatSolverPackage*);
126 extern PetscErrorCode  MatGetFactorType(Mat,MatFactorType*);
127 
128 /* Logging support */
129 #define    MAT_FILE_CLASSID 1211216    /* used to indicate matrices in binary files */
130 extern PetscClassId  MAT_CLASSID;
131 extern PetscClassId  MAT_FDCOLORING_CLASSID;
132 extern PetscClassId  MAT_PARTITIONING_CLASSID;
133 extern PetscClassId  MAT_NULLSPACE_CLASSID;
134 extern PetscClassId  MATMFFD_CLASSID;
135 
136 /*E
137     MatReuse - Indicates if matrices obtained from a previous call to MatGetSubMatrices()
138      or MatGetSubMatrix() are to be reused to store the new matrix values. For MatConvert() is used to indicate
139      that the input matrix is to be replaced with the converted matrix.
140 
141     Level: beginner
142 
143    Any additions/changes here MUST also be made in include/finclude/petscmat.h
144 
145 .seealso: MatGetSubMatrices(), MatGetSubMatrix(), MatDestroyMatrices(), MatConvert()
146 E*/
147 typedef enum {MAT_INITIAL_MATRIX,MAT_REUSE_MATRIX,MAT_IGNORE_MATRIX} MatReuse;
148 
149 /*E
150     MatGetSubMatrixOption - Indicates if matrices obtained from a call to MatGetSubMatrices()
151      include the matrix values. Currently it is only used by MatGetSeqNonzerostructure().
152 
153     Level: beginner
154 
155 .seealso: MatGetSeqNonzerostructure()
156 E*/
157 typedef enum {MAT_DO_NOT_GET_VALUES,MAT_GET_VALUES} MatGetSubMatrixOption;
158 
159 extern PetscErrorCode  MatInitializePackage(const char[]);
160 
161 extern PetscErrorCode  MatCreate(MPI_Comm,Mat*);
162 PetscPolymorphicFunction(MatCreate,(MPI_Comm comm),(comm,&A),Mat,A)
163 PetscPolymorphicFunction(MatCreate,(),(PETSC_COMM_WORLD,&A),Mat,A)
164 extern PetscErrorCode  MatSetSizes(Mat,PetscInt,PetscInt,PetscInt,PetscInt);
165 extern PetscErrorCode  MatSetType(Mat,const MatType);
166 extern PetscErrorCode  MatSetFromOptions(Mat);
167 extern PetscErrorCode  MatSetUpPreallocation(Mat);
168 extern PetscErrorCode  MatRegisterAll(const char[]);
169 extern PetscErrorCode  MatRegister(const char[],const char[],const char[],PetscErrorCode(*)(Mat));
170 
171 extern PetscErrorCode  MatSetOptionsPrefix(Mat,const char[]);
172 extern PetscErrorCode  MatAppendOptionsPrefix(Mat,const char[]);
173 extern PetscErrorCode  MatGetOptionsPrefix(Mat,const char*[]);
174 
175 /*MC
176    MatRegisterDynamic - Adds a new matrix type
177 
178    Synopsis:
179    PetscErrorCode MatRegisterDynamic(const char *name,const char *path,const char *name_create,PetscErrorCode (*routine_create)(Mat))
180 
181    Not Collective
182 
183    Input Parameters:
184 +  name - name of a new user-defined matrix type
185 .  path - path (either absolute or relative) the library containing this solver
186 .  name_create - name of routine to create method context
187 -  routine_create - routine to create method context
188 
189    Notes:
190    MatRegisterDynamic() may be called multiple times to add several user-defined solvers.
191 
192    If dynamic libraries are used, then the fourth input argument (routine_create)
193    is ignored.
194 
195    Sample usage:
196 .vb
197    MatRegisterDynamic("my_mat",/home/username/my_lib/lib/libO/solaris/mylib.a,
198                "MyMatCreate",MyMatCreate);
199 .ve
200 
201    Then, your solver can be chosen with the procedural interface via
202 $     MatSetType(Mat,"my_mat")
203    or at runtime via the option
204 $     -mat_type my_mat
205 
206    Level: advanced
207 
208    Notes: ${PETSC_ARCH} occuring in pathname will be replaced with appropriate values.
209          If your function is not being put into a shared library then use VecRegister() instead
210 
211 .keywords: Mat, register
212 
213 .seealso: MatRegisterAll(), MatRegisterDestroy()
214 
215 M*/
216 #if defined(PETSC_USE_DYNAMIC_LIBRARIES)
217 #define MatRegisterDynamic(a,b,c,d) MatRegister(a,b,c,0)
218 #else
219 #define MatRegisterDynamic(a,b,c,d) MatRegister(a,b,c,d)
220 #endif
221 
222 extern PetscBool  MatRegisterAllCalled;
223 extern PetscFList MatList;
224 extern PetscFList MatColoringList;
225 extern PetscFList MatPartitioningList;
226 
227 /*E
228     MatStructure - Indicates if the matrix has the same nonzero structure
229 
230     Level: beginner
231 
232    Any additions/changes here MUST also be made in include/finclude/petscmat.h
233 
234 .seealso: MatCopy(), KSPSetOperators(), PCSetOperators()
235 E*/
236 typedef enum {SAME_NONZERO_PATTERN,DIFFERENT_NONZERO_PATTERN,SAME_PRECONDITIONER,SUBSET_NONZERO_PATTERN} MatStructure;
237 
238 extern PetscErrorCode  MatCreateSeqDense(MPI_Comm,PetscInt,PetscInt,PetscScalar[],Mat*);
239 extern PetscErrorCode  MatCreateMPIDense(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,PetscScalar[],Mat*);
240 extern PetscErrorCode  MatCreateSeqAIJ(MPI_Comm,PetscInt,PetscInt,PetscInt,const PetscInt[],Mat*);
241 PetscPolymorphicFunction(MatCreateSeqAIJ,(PetscInt m,PetscInt n,PetscInt nz,const PetscInt nnz[]),(PETSC_COMM_SELF,m,n,nz,nnz,&A),Mat,A)
242 PetscPolymorphicFunction(MatCreateSeqAIJ,(PetscInt m,PetscInt n,PetscInt nz),(PETSC_COMM_SELF,m,n,nz,PETSC_NULL,&A),Mat,A)
243 PetscPolymorphicFunction(MatCreateSeqAIJ,(PetscInt m,PetscInt n,const PetscInt nnz[]),(PETSC_COMM_SELF,m,n,0,nnz,&A),Mat,A)
244 PetscPolymorphicFunction(MatCreateSeqAIJ,(PetscInt m,PetscInt n),(PETSC_COMM_SELF,m,n,0,PETSC_NULL,&A),Mat,A)
245 PetscPolymorphicSubroutine(MatCreateSeqAIJ,(PetscInt m,PetscInt n,PetscInt nz,Mat *A),(PETSC_COMM_SELF,m,n,nz,PETSC_NULL,A))
246 PetscPolymorphicSubroutine(MatCreateSeqAIJ,(PetscInt m,PetscInt n,const PetscInt nnz[],Mat *A),(PETSC_COMM_SELF,m,n,0,nnz,A))
247 PetscPolymorphicSubroutine(MatCreateSeqAIJ,(PetscInt m,PetscInt n,Mat *A),(PETSC_COMM_SELF,m,n,0,PETSC_NULL,A))
248 extern PetscErrorCode  MatCreateMPIAIJ(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt,const PetscInt[],PetscInt,const PetscInt[],Mat*);
249 PetscPolymorphicFunction(MatCreateMPIAIJ,(MPI_Comm comm,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,const PetscInt nnz[],PetscInt onz,const PetscInt onnz[]),(comm,m,n,M,N,nz,nnz,onz,onnz,&A),Mat,A)
250 PetscPolymorphicFunction(MatCreateMPIAIJ,(MPI_Comm comm,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,PetscInt nnz),(comm,m,n,M,N,nz,PETSC_NULL,nnz,PETSC_NULL,&A),Mat,A)
251 PetscPolymorphicFunction(MatCreateMPIAIJ,(MPI_Comm comm,PetscInt m,PetscInt n,PetscInt M,PetscInt N,const PetscInt nnz[],const PetscInt onz[]),(comm,m,n,M,N,0,nnz,0,onz,&A),Mat,A)
252 PetscPolymorphicFunction(MatCreateMPIAIJ,(MPI_Comm comm,PetscInt m,PetscInt n,PetscInt M,PetscInt N),(comm,m,n,M,N,0,PETSC_NULL,0,PETSC_NULL,&A),Mat,A)
253 PetscPolymorphicSubroutine(MatCreateMPIAIJ,(MPI_Comm comm,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,PetscInt nnz,Mat *A),(comm,m,n,M,N,nz,PETSC_NULL,nnz,PETSC_NULL,A))
254 PetscPolymorphicSubroutine(MatCreateMPIAIJ,(MPI_Comm comm,PetscInt m,PetscInt n,PetscInt M,PetscInt N,const PetscInt nnz[],const PetscInt onz[],Mat *A),(comm,m,n,M,N,0,nnz,0,onz,A))
255 PetscPolymorphicSubroutine(MatCreateMPIAIJ,(MPI_Comm comm,PetscInt m,PetscInt n,PetscInt M,PetscInt N,Mat *A),(comm,m,n,M,N,0,PETSC_NULL,0,PETSC_NULL,A))
256 PetscPolymorphicFunction(MatCreateMPIAIJ,(PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,const PetscInt nnz[],PetscInt onz,const PetscInt onnz[]),(PETSC_COMM_WORLD,m,n,M,N,nz,nnz,onz,onnz,&A),Mat,A)
257 PetscPolymorphicFunction(MatCreateMPIAIJ,(PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,PetscInt nnz),(PETSC_COMM_WORLD,m,n,M,N,nz,PETSC_NULL,nnz,PETSC_NULL,&A),Mat,A)
258 PetscPolymorphicFunction(MatCreateMPIAIJ,(PetscInt m,PetscInt n,PetscInt M,PetscInt N,const PetscInt nnz[],const PetscInt onz[]),(PETSC_COMM_WORLD,m,n,M,N,0,nnz,0,onz,&A),Mat,A)
259 PetscPolymorphicFunction(MatCreateMPIAIJ,(PetscInt m,PetscInt n,PetscInt M,PetscInt N),(PETSC_COMM_WORLD,m,n,M,N,0,PETSC_NULL,0,PETSC_NULL,&A),Mat,A)
260 PetscPolymorphicSubroutine(MatCreateMPIAIJ,(PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,PetscInt nnz,Mat *A),(PETSC_COMM_WORLD,m,n,M,N,nz,PETSC_NULL,nnz,PETSC_NULL,A))
261 PetscPolymorphicSubroutine(MatCreateMPIAIJ,(PetscInt m,PetscInt n,PetscInt M,PetscInt N,const PetscInt nnz[],const PetscInt onz[],Mat *A),(PETSC_COMM_WORLD,m,n,M,N,0,nnz,0,onz,A))
262 PetscPolymorphicSubroutine(MatCreateMPIAIJ,(PetscInt m,PetscInt n,PetscInt M,PetscInt N,Mat *A),(PETSC_COMM_WORLD,m,n,M,N,0,PETSC_NULL,0,PETSC_NULL,A))
263 extern PetscErrorCode  MatCreateMPIAIJWithArrays(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,const PetscInt[],const PetscInt[],const PetscScalar[],Mat *);
264 extern PetscErrorCode  MatCreateMPIAIJWithSplitArrays(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt[],PetscInt[],PetscScalar[],PetscInt[],PetscInt[],PetscScalar[],Mat*);
265 
266 extern PetscErrorCode  MatCreateSeqBAIJ(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,const PetscInt[],Mat*);
267 PetscPolymorphicFunction(MatCreateSeqBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt nz,const PetscInt nnz[]),(PETSC_COMM_SELF,bs,m,n,nz,nnz,&A),Mat,A)
268 PetscPolymorphicFunction(MatCreateSeqBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt nz),(PETSC_COMM_SELF,bs,m,n,nz,PETSC_NULL,&A),Mat,A)
269 PetscPolymorphicFunction(MatCreateSeqBAIJ,(PetscInt bs,PetscInt m,PetscInt n,const PetscInt nnz[]),(PETSC_COMM_SELF,bs,m,n,0,nnz,&A),Mat,A)
270 PetscPolymorphicFunction(MatCreateSeqBAIJ,(PetscInt bs,PetscInt m,PetscInt n),(PETSC_COMM_SELF,bs,m,n,0,PETSC_NULL,&A),Mat,A)
271 PetscPolymorphicSubroutine(MatCreateSeqBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt nz,Mat *A),(PETSC_COMM_SELF,bs,m,n,nz,PETSC_NULL,A))
272 PetscPolymorphicSubroutine(MatCreateSeqBAIJ,(PetscInt bs,PetscInt m,PetscInt n,const PetscInt nnz[],Mat *A),(PETSC_COMM_SELF,bs,m,n,0,nnz,A))
273 PetscPolymorphicSubroutine(MatCreateSeqBAIJ,(PetscInt bs,PetscInt m,PetscInt n,Mat *A),(PETSC_COMM_SELF,bs,m,n,0,PETSC_NULL,A))
274 extern PetscErrorCode  MatCreateMPIBAIJ(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt,const PetscInt[],PetscInt,const PetscInt[],Mat*);
275 PetscPolymorphicFunction(MatCreateMPIBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,const PetscInt nnz[],PetscInt onz,const PetscInt onnz[]),(comm,bs,m,n,M,N,nz,nnz,onz,onnz,&A),Mat,A)
276 PetscPolymorphicFunction(MatCreateMPIBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,PetscInt nnz),(comm,bs,m,n,M,N,nz,PETSC_NULL,nnz,PETSC_NULL,&A),Mat,A)
277 PetscPolymorphicFunction(MatCreateMPIBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,const PetscInt nnz[],const PetscInt onz[]),(comm,bs,m,n,M,N,0,nnz,0,onz,&A),Mat,A)
278 PetscPolymorphicFunction(MatCreateMPIBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N),(comm,bs,m,n,M,N,0,PETSC_NULL,0,PETSC_NULL,&A),Mat,A)
279 PetscPolymorphicSubroutine(MatCreateMPIBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,PetscInt nnz,Mat *A),(comm,bs,m,n,M,N,nz,PETSC_NULL,nnz,PETSC_NULL,A))
280 PetscPolymorphicSubroutine(MatCreateMPIBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,const PetscInt nnz[],const PetscInt onz[],Mat *A),(comm,bs,m,n,M,N,0,nnz,0,onz,A))
281 PetscPolymorphicSubroutine(MatCreateMPIBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,Mat *A),(comm,bs,m,n,M,N,0,PETSC_NULL,0,PETSC_NULL,A))
282 PetscPolymorphicFunction(MatCreateMPIBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,const PetscInt nnz[],PetscInt onz,const PetscInt onnz[]),(PETSC_COMM_WORLD,bs,m,n,M,N,nz,nnz,onz,onnz,&A),Mat,A)
283 PetscPolymorphicFunction(MatCreateMPIBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,PetscInt nnz),(PETSC_COMM_WORLD,bs,m,n,M,N,nz,PETSC_NULL,nnz,PETSC_NULL,&A),Mat,A)
284 PetscPolymorphicFunction(MatCreateMPIBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,const PetscInt nnz[],const PetscInt onz[]),(PETSC_COMM_WORLD,bs,m,n,M,N,0,nnz,0,onz,&A),Mat,A)
285 PetscPolymorphicFunction(MatCreateMPIBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N),(PETSC_COMM_WORLD,bs,m,n,M,N,0,PETSC_NULL,0,PETSC_NULL,&A),Mat,A)
286 PetscPolymorphicSubroutine(MatCreateMPIBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,PetscInt nnz,Mat *A),(PETSC_COMM_WORLD,bs,m,n,M,N,nz,PETSC_NULL,nnz,PETSC_NULL,A))
287 PetscPolymorphicSubroutine(MatCreateMPIBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,const PetscInt nnz[],const PetscInt onz[],Mat *A),(PETSC_COMM_WORLD,bs,m,n,M,N,0,nnz,0,onz,A))
288 PetscPolymorphicSubroutine(MatCreateMPIBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,Mat *A),(PETSC_COMM_WORLD,bs,m,n,M,N,0,PETSC_NULL,0,PETSC_NULL,A))
289 extern PetscErrorCode  MatCreateMPIBAIJWithArrays(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt,const PetscInt[],const PetscInt[],const PetscScalar[],Mat*);
290 
291 extern PetscErrorCode  MatCreateMPIAdj(MPI_Comm,PetscInt,PetscInt,PetscInt[],PetscInt[],PetscInt[],Mat*);
292 extern PetscErrorCode  MatCreateSeqSBAIJ(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,const PetscInt[],Mat*);
293 PetscPolymorphicFunction(MatCreateSeqSBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt nz,const PetscInt nnz[]),(PETSC_COMM_SELF,bs,m,n,nz,nnz,&A),Mat,A)
294 PetscPolymorphicFunction(MatCreateSeqSBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt nz),(PETSC_COMM_SELF,bs,m,n,nz,PETSC_NULL,&A),Mat,A)
295 PetscPolymorphicFunction(MatCreateSeqSBAIJ,(PetscInt bs,PetscInt m,PetscInt n,const PetscInt nnz[]),(PETSC_COMM_SELF,bs,m,n,0,nnz,&A),Mat,A)
296 PetscPolymorphicFunction(MatCreateSeqSBAIJ,(PetscInt bs,PetscInt m,PetscInt n),(PETSC_COMM_SELF,bs,m,n,0,PETSC_NULL,&A),Mat,A)
297 PetscPolymorphicSubroutine(MatCreateSeqSBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt nz,Mat *A),(PETSC_COMM_SELF,bs,m,n,nz,PETSC_NULL,A))
298 PetscPolymorphicSubroutine(MatCreateSeqSBAIJ,(PetscInt bs,PetscInt m,PetscInt n,const PetscInt nnz[],Mat *A),(PETSC_COMM_SELF,bs,m,n,0,nnz,A))
299 PetscPolymorphicSubroutine(MatCreateSeqSBAIJ,(PetscInt bs,PetscInt m,PetscInt n,Mat *A),(PETSC_COMM_SELF,bs,m,n,0,PETSC_NULL,A))
300 
301 extern PetscErrorCode  MatCreateMPISBAIJ(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt,const PetscInt[],PetscInt,const PetscInt[],Mat*);
302 PetscPolymorphicFunction(MatCreateMPISBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,const PetscInt nnz[],PetscInt onz,const PetscInt onnz[]),(comm,bs,m,n,M,N,nz,nnz,onz,onnz,&A),Mat,A)
303 PetscPolymorphicFunction(MatCreateMPISBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,PetscInt nnz),(comm,bs,m,n,M,N,nz,PETSC_NULL,nnz,PETSC_NULL,&A),Mat,A)
304 PetscPolymorphicFunction(MatCreateMPISBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,const PetscInt nnz[],const PetscInt onz[]),(comm,bs,m,n,M,N,0,nnz,0,onz,&A),Mat,A)
305 PetscPolymorphicFunction(MatCreateMPISBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N),(comm,bs,m,n,M,N,0,PETSC_NULL,0,PETSC_NULL,&A),Mat,A)
306 PetscPolymorphicSubroutine(MatCreateMPISBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,PetscInt nnz,Mat *A),(comm,bs,m,n,M,N,nz,PETSC_NULL,nnz,PETSC_NULL,A))
307 PetscPolymorphicSubroutine(MatCreateMPISBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,const PetscInt nnz[],const PetscInt onz[],Mat *A),(comm,bs,m,n,M,N,0,nnz,0,onz,A))
308 PetscPolymorphicSubroutine(MatCreateMPISBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,Mat *A),(comm,bs,m,n,M,N,0,PETSC_NULL,0,PETSC_NULL,A))
309 PetscPolymorphicFunction(MatCreateMPISBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,const PetscInt nnz[],PetscInt onz,const PetscInt onnz[]),(PETSC_COMM_WORLD,bs,m,n,M,N,nz,nnz,onz,onnz,&A),Mat,A)
310 PetscPolymorphicFunction(MatCreateMPISBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,PetscInt nnz),(PETSC_COMM_WORLD,bs,m,n,M,N,nz,PETSC_NULL,nnz,PETSC_NULL,&A),Mat,A)
311 PetscPolymorphicFunction(MatCreateMPISBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,const PetscInt nnz[],const PetscInt onz[]),(PETSC_COMM_WORLD,bs,m,n,M,N,0,nnz,0,onz,&A),Mat,A)
312 PetscPolymorphicFunction(MatCreateMPISBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N),(PETSC_COMM_WORLD,bs,m,n,M,N,0,PETSC_NULL,0,PETSC_NULL,&A),Mat,A)
313 PetscPolymorphicSubroutine(MatCreateMPISBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,PetscInt nnz,Mat *A),(PETSC_COMM_WORLD,bs,m,n,M,N,nz,PETSC_NULL,nnz,PETSC_NULL,A))
314 PetscPolymorphicSubroutine(MatCreateMPISBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,const PetscInt nnz[],const PetscInt onz[],Mat *A),(PETSC_COMM_WORLD,bs,m,n,M,N,0,nnz,0,onz,A))
315 PetscPolymorphicSubroutine(MatCreateMPISBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,Mat *A),(PETSC_COMM_WORLD,bs,m,n,M,N,0,PETSC_NULL,0,PETSC_NULL,A))
316 extern PetscErrorCode  MatCreateMPISBAIJWithArrays(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt,const PetscInt[],const PetscInt[],const PetscScalar[],Mat *);
317 extern PetscErrorCode  MatMPISBAIJSetPreallocationCSR(Mat,PetscInt,const PetscInt[],const PetscInt[],const PetscScalar[]);
318 
319 extern PetscErrorCode  MatCreateShell(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,void *,Mat*);
320 PetscPolymorphicFunction(MatCreateShell,(MPI_Comm comm,PetscInt m,PetscInt n,PetscInt M,PetscInt N,void *ctx),(comm,m,n,M,N,ctx,&A),Mat,A)
321 PetscPolymorphicFunction(MatCreateShell,(PetscInt m,PetscInt n,PetscInt M,PetscInt N,void *ctx),(PETSC_COMM_WORLD,m,n,M,N,ctx,&A),Mat,A)
322 extern PetscErrorCode  MatCreateAdic(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt,void (*)(void),Mat*);
323 extern PetscErrorCode  MatCreateNormal(Mat,Mat*);
324 PetscPolymorphicFunction(MatCreateNormal,(Mat mat),(mat,&A),Mat,A)
325 extern PetscErrorCode  MatCreateLRC(Mat,Mat,Mat,Mat*);
326 extern PetscErrorCode  MatCreateIS(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,ISLocalToGlobalMapping,Mat*);
327 extern PetscErrorCode  MatCreateSeqAIJCRL(MPI_Comm,PetscInt,PetscInt,PetscInt,const PetscInt[],Mat*);
328 extern PetscErrorCode  MatCreateMPIAIJCRL(MPI_Comm,PetscInt,PetscInt,PetscInt,const PetscInt[],PetscInt,const PetscInt[],Mat*);
329 
330 extern PetscErrorCode  MatCreateSeqBSTRM(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,const PetscInt[],Mat*);
331 extern PetscErrorCode  MatCreateMPIBSTRM(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt,const PetscInt[],PetscInt,const PetscInt[],Mat*);
332 
333 extern PetscErrorCode  MatCreateScatter(MPI_Comm,VecScatter,Mat*);
334 extern PetscErrorCode  MatScatterSetVecScatter(Mat,VecScatter);
335 extern PetscErrorCode  MatScatterGetVecScatter(Mat,VecScatter*);
336 extern PetscErrorCode  MatCreateBlockMat(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt*,Mat*);
337 extern PetscErrorCode  MatCompositeAddMat(Mat,Mat);
338 extern PetscErrorCode  MatCompositeMerge(Mat);
339 extern PetscErrorCode  MatCreateComposite(MPI_Comm,PetscInt,const Mat*,Mat*);
340 typedef enum {MAT_COMPOSITE_ADDITIVE,MAT_COMPOSITE_MULTIPLICATIVE} MatCompositeType;
341 extern PetscErrorCode  MatCompositeSetType(Mat,MatCompositeType);
342 
343 extern PetscErrorCode  MatCreateFFT(MPI_Comm,PetscInt,const PetscInt[],const MatType,Mat*);
344 extern PetscErrorCode  MatCreateSeqCUFFT(MPI_Comm,PetscInt,const PetscInt[],Mat*);
345 
346 extern PetscErrorCode  MatCreateTranspose(Mat,Mat*);
347 extern PetscErrorCode  MatCreateSubMatrix(Mat,IS,IS,Mat*);
348 extern PetscErrorCode  MatSubMatrixUpdate(Mat,Mat,IS,IS);
349 extern PetscErrorCode  MatCreateLocalRef(Mat,IS,IS,Mat*);
350 
351 extern PetscErrorCode  MatPythonSetType(Mat,const char[]);
352 
353 extern PetscErrorCode  MatSetUp(Mat);
354 extern PetscErrorCode  MatDestroy(Mat*);
355 
356 extern PetscErrorCode  MatConjugate(Mat);
357 extern PetscErrorCode  MatRealPart(Mat);
358 extern PetscErrorCode  MatImaginaryPart(Mat);
359 extern PetscErrorCode  MatGetDiagonalBlock(Mat,PetscBool *,MatReuse,Mat*);
360 extern PetscErrorCode  MatGetTrace(Mat,PetscScalar*);
361 
362 /* ------------------------------------------------------------*/
363 extern PetscErrorCode  MatSetValues(Mat,PetscInt,const PetscInt[],PetscInt,const PetscInt[],const PetscScalar[],InsertMode);
364 extern PetscErrorCode  MatSetValuesBlocked(Mat,PetscInt,const PetscInt[],PetscInt,const PetscInt[],const PetscScalar[],InsertMode);
365 extern PetscErrorCode  MatSetValuesRow(Mat,PetscInt,const PetscScalar[]);
366 extern PetscErrorCode  MatSetValuesRowLocal(Mat,PetscInt,const PetscScalar[]);
367 
368 /*S
369      MatStencil - Data structure (C struct) for storing information about a single row or
370         column of a matrix as index on an associated grid.
371 
372    Level: beginner
373 
374   Concepts: matrix; linear operator
375 
376 .seealso:  MatSetValuesStencil(), MatSetStencil(), MatSetValuesBlockStencil()
377 S*/
378 typedef struct {
379   PetscInt k,j,i,c;
380 } MatStencil;
381 
382 extern PetscErrorCode  MatSetValuesStencil(Mat,PetscInt,const MatStencil[],PetscInt,const MatStencil[],const PetscScalar[],InsertMode);
383 extern PetscErrorCode  MatSetValuesBlockedStencil(Mat,PetscInt,const MatStencil[],PetscInt,const MatStencil[],const PetscScalar[],InsertMode);
384 extern PetscErrorCode  MatSetStencil(Mat,PetscInt,const PetscInt[],const PetscInt[],PetscInt);
385 
386 extern PetscErrorCode  MatSetColoring(Mat,ISColoring);
387 extern PetscErrorCode  MatSetValuesAdic(Mat,void*);
388 extern PetscErrorCode  MatSetValuesAdifor(Mat,PetscInt,void*);
389 
390 /*E
391     MatAssemblyType - Indicates if the matrix is now to be used, or if you plan
392      to continue to add values to it
393 
394     Level: beginner
395 
396 .seealso: MatAssemblyBegin(), MatAssemblyEnd()
397 E*/
398 typedef enum {MAT_FLUSH_ASSEMBLY=1,MAT_FINAL_ASSEMBLY=0} MatAssemblyType;
399 extern PetscErrorCode  MatAssemblyBegin(Mat,MatAssemblyType);
400 extern PetscErrorCode  MatAssemblyEnd(Mat,MatAssemblyType);
401 extern PetscErrorCode  MatAssembled(Mat,PetscBool *);
402 
403 
404 
405 /*E
406     MatOption - Options that may be set for a matrix and its behavior or storage
407 
408     Level: beginner
409 
410    Any additions/changes here MUST also be made in include/finclude/petscmat.h
411 
412 .seealso: MatSetOption()
413 E*/
414 typedef enum {MAT_ROW_ORIENTED,MAT_NEW_NONZERO_LOCATIONS,
415               MAT_SYMMETRIC,
416               MAT_STRUCTURALLY_SYMMETRIC,
417               MAT_NEW_DIAGONALS,MAT_IGNORE_OFF_PROC_ENTRIES,
418               MAT_NEW_NONZERO_LOCATION_ERR,
419               MAT_NEW_NONZERO_ALLOCATION_ERR,MAT_USE_HASH_TABLE,
420               MAT_KEEP_NONZERO_PATTERN,MAT_IGNORE_ZERO_ENTRIES,
421               MAT_USE_INODES,
422               MAT_HERMITIAN,
423               MAT_SYMMETRY_ETERNAL,
424               MAT_CHECK_COMPRESSED_ROW,
425               MAT_IGNORE_LOWER_TRIANGULAR,MAT_ERROR_LOWER_TRIANGULAR,
426               MAT_GETROW_UPPERTRIANGULAR,MAT_UNUSED_NONZERO_LOCATION_ERR,
427               MAT_SPD,MAT_NO_OFF_PROC_ENTRIES,MAT_NO_OFF_PROC_ZERO_ROWS,
428               NUM_MAT_OPTIONS} MatOption;
429 extern const char *MatOptions[];
430 extern PetscErrorCode  MatSetOption(Mat,MatOption,PetscBool );
431 extern PetscErrorCode  MatGetType(Mat,const MatType*);
432 PetscPolymorphicFunction(MatGetType,(Mat mat),(mat,&t),const MatType,t)
433 
434 extern PetscErrorCode  MatGetValues(Mat,PetscInt,const PetscInt[],PetscInt,const PetscInt[],PetscScalar[]);
435 extern PetscErrorCode  MatGetRow(Mat,PetscInt,PetscInt *,const PetscInt *[],const PetscScalar*[]);
436 extern PetscErrorCode  MatRestoreRow(Mat,PetscInt,PetscInt *,const PetscInt *[],const PetscScalar*[]);
437 extern PetscErrorCode  MatGetRowUpperTriangular(Mat);
438 extern PetscErrorCode  MatRestoreRowUpperTriangular(Mat);
439 extern PetscErrorCode  MatGetColumn(Mat,PetscInt,PetscInt *,const PetscInt *[],const PetscScalar*[]);
440 extern PetscErrorCode  MatRestoreColumn(Mat,PetscInt,PetscInt *,const PetscInt *[],const PetscScalar*[]);
441 extern PetscErrorCode  MatGetColumnVector(Mat,Vec,PetscInt);
442 extern PetscErrorCode  MatGetArray(Mat,PetscScalar *[]);
443 PetscPolymorphicFunction(MatGetArray,(Mat mat),(mat,&a),PetscScalar*,a)
444 extern PetscErrorCode  MatRestoreArray(Mat,PetscScalar *[]);
445 extern PetscErrorCode  MatGetBlockSize(Mat,PetscInt *);
446 PetscPolymorphicFunction(MatGetBlockSize,(Mat mat),(mat,&a),PetscInt,a)
447 extern PetscErrorCode  MatSetBlockSize(Mat,PetscInt);
448 
449 
450 extern PetscErrorCode  MatMult(Mat,Vec,Vec);
451 extern PetscErrorCode  MatMultDiagonalBlock(Mat,Vec,Vec);
452 extern PetscErrorCode  MatMultAdd(Mat,Vec,Vec,Vec);
453 PetscPolymorphicSubroutine(MatMultAdd,(Mat A,Vec x,Vec y),(A,x,y,y))
454 extern PetscErrorCode  MatMultTranspose(Mat,Vec,Vec);
455 extern PetscErrorCode  MatMultHermitianTranspose(Mat,Vec,Vec);
456 extern PetscErrorCode  MatIsTranspose(Mat,Mat,PetscReal,PetscBool *);
457 PetscPolymorphicFunction(MatIsTranspose,(Mat A,Mat B,PetscReal tol),(A,B,tol,&t),PetscBool ,t)
458 PetscPolymorphicFunction(MatIsTranspose,(Mat A,Mat B),(A,B,0,&t),PetscBool ,t)
459 extern PetscErrorCode  MatIsHermitianTranspose(Mat,Mat,PetscReal,PetscBool *);
460 extern PetscErrorCode  MatMultTransposeAdd(Mat,Vec,Vec,Vec);
461 extern PetscErrorCode  MatMultHermitianTransposeAdd(Mat,Vec,Vec,Vec);
462 PetscPolymorphicSubroutine(MatMultTransposeAdd,(Mat A,Vec x,Vec y),(A,x,y,y))
463 extern PetscErrorCode  MatMultConstrained(Mat,Vec,Vec);
464 extern PetscErrorCode  MatMultTransposeConstrained(Mat,Vec,Vec);
465 extern PetscErrorCode  MatMatSolve(Mat,Mat,Mat);
466 
467 /*E
468     MatDuplicateOption - Indicates if a duplicated sparse matrix should have
469   its numerical values copied over or just its nonzero structure.
470 
471     Level: beginner
472 
473    Any additions/changes here MUST also be made in include/finclude/petscmat.h
474 
475 $   MAT_SHARE_NONZERO_PATTERN - the i and j arrays in the new matrix will be shared with the original matrix
476 $                               this also triggers the MAT_DO_NOT_COPY_VALUES option. This is used when you
477 $                               have several matrices with the same nonzero pattern.
478 
479 .seealso: MatDuplicate()
480 E*/
481 typedef enum {MAT_DO_NOT_COPY_VALUES,MAT_COPY_VALUES,MAT_SHARE_NONZERO_PATTERN} MatDuplicateOption;
482 
483 extern PetscErrorCode  MatConvert(Mat,const MatType,MatReuse,Mat*);
484 PetscPolymorphicFunction(MatConvert,(Mat A,const MatType t),(A,t,MAT_INITIAL_MATRIX,&a),Mat,a)
485 extern PetscErrorCode  MatDuplicate(Mat,MatDuplicateOption,Mat*);
486 PetscPolymorphicFunction(MatDuplicate,(Mat A,MatDuplicateOption o),(A,o,&a),Mat,a)
487 PetscPolymorphicFunction(MatDuplicate,(Mat A),(A,MAT_COPY_VALUES,&a),Mat,a)
488 
489 
490 extern PetscErrorCode  MatCopy(Mat,Mat,MatStructure);
491 extern PetscErrorCode  MatView(Mat,PetscViewer);
492 extern PetscErrorCode  MatIsSymmetric(Mat,PetscReal,PetscBool *);
493 PetscPolymorphicFunction(MatIsSymmetric,(Mat A,PetscReal tol),(A,tol,&t),PetscBool ,t)
494 PetscPolymorphicFunction(MatIsSymmetric,(Mat A),(A,0,&t),PetscBool ,t)
495 extern PetscErrorCode  MatIsStructurallySymmetric(Mat,PetscBool *);
496 PetscPolymorphicFunction(MatIsStructurallySymmetric,(Mat A),(A,&t),PetscBool ,t)
497 extern PetscErrorCode  MatIsHermitian(Mat,PetscReal,PetscBool *);
498 PetscPolymorphicFunction(MatIsHermitian,(Mat A),(A,0,&t),PetscBool ,t)
499 extern PetscErrorCode  MatIsSymmetricKnown(Mat,PetscBool *,PetscBool *);
500 extern PetscErrorCode  MatIsHermitianKnown(Mat,PetscBool *,PetscBool *);
501 extern PetscErrorCode  MatMissingDiagonal(Mat,PetscBool  *,PetscInt *);
502 extern PetscErrorCode  MatLoad(Mat, PetscViewer);
503 
504 extern PetscErrorCode  MatGetRowIJ(Mat,PetscInt,PetscBool ,PetscBool ,PetscInt*,PetscInt *[],PetscInt *[],PetscBool  *);
505 extern PetscErrorCode  MatRestoreRowIJ(Mat,PetscInt,PetscBool ,PetscBool ,PetscInt *,PetscInt *[],PetscInt *[],PetscBool  *);
506 extern PetscErrorCode  MatGetColumnIJ(Mat,PetscInt,PetscBool ,PetscBool ,PetscInt*,PetscInt *[],PetscInt *[],PetscBool  *);
507 extern PetscErrorCode  MatRestoreColumnIJ(Mat,PetscInt,PetscBool ,PetscBool ,PetscInt *,PetscInt *[],PetscInt *[],PetscBool  *);
508 
509 /*S
510      MatInfo - Context of matrix information, used with MatGetInfo()
511 
512    In Fortran this is simply a double precision array of dimension MAT_INFO_SIZE
513 
514    Level: intermediate
515 
516   Concepts: matrix^nonzero information
517 
518 .seealso:  MatGetInfo(), MatInfoType
519 S*/
520 typedef struct {
521   PetscLogDouble block_size;                         /* block size */
522   PetscLogDouble nz_allocated,nz_used,nz_unneeded;   /* number of nonzeros */
523   PetscLogDouble memory;                             /* memory allocated */
524   PetscLogDouble assemblies;                         /* number of matrix assemblies called */
525   PetscLogDouble mallocs;                            /* number of mallocs during MatSetValues() */
526   PetscLogDouble fill_ratio_given,fill_ratio_needed; /* fill ratio for LU/ILU */
527   PetscLogDouble factor_mallocs;                     /* number of mallocs during factorization */
528 } MatInfo;
529 
530 /*E
531     MatInfoType - Indicates if you want information about the local part of the matrix,
532      the entire parallel matrix or the maximum over all the local parts.
533 
534     Level: beginner
535 
536    Any additions/changes here MUST also be made in include/finclude/petscmat.h
537 
538 .seealso: MatGetInfo(), MatInfo
539 E*/
540 typedef enum {MAT_LOCAL=1,MAT_GLOBAL_MAX=2,MAT_GLOBAL_SUM=3} MatInfoType;
541 extern PetscErrorCode  MatGetInfo(Mat,MatInfoType,MatInfo*);
542 extern PetscErrorCode  MatGetDiagonal(Mat,Vec);
543 extern PetscErrorCode  MatGetRowMax(Mat,Vec,PetscInt[]);
544 extern PetscErrorCode  MatGetRowMin(Mat,Vec,PetscInt[]);
545 extern PetscErrorCode  MatGetRowMaxAbs(Mat,Vec,PetscInt[]);
546 extern PetscErrorCode  MatGetRowMinAbs(Mat,Vec,PetscInt[]);
547 extern PetscErrorCode  MatGetRowSum(Mat,Vec);
548 extern PetscErrorCode  MatTranspose(Mat,MatReuse,Mat*);
549 PetscPolymorphicFunction(MatTranspose,(Mat A),(A,MAT_INITIAL_MATRIX,&t),Mat,t)
550 extern PetscErrorCode  MatHermitianTranspose(Mat,MatReuse,Mat*);
551 extern PetscErrorCode  MatPermute(Mat,IS,IS,Mat *);
552 PetscPolymorphicFunction(MatPermute,(Mat A,IS is1,IS is2),(A,is1,is2,&t),Mat,t)
553 extern PetscErrorCode  MatDiagonalScale(Mat,Vec,Vec);
554 extern PetscErrorCode  MatDiagonalSet(Mat,Vec,InsertMode);
555 extern PetscErrorCode  MatEqual(Mat,Mat,PetscBool *);
556 PetscPolymorphicFunction(MatEqual,(Mat A,Mat B),(A,B,&t),PetscBool ,t)
557 extern PetscErrorCode  MatMultEqual(Mat,Mat,PetscInt,PetscBool *);
558 extern PetscErrorCode  MatMultAddEqual(Mat,Mat,PetscInt,PetscBool *);
559 extern PetscErrorCode  MatMultTransposeEqual(Mat,Mat,PetscInt,PetscBool *);
560 extern PetscErrorCode  MatMultTransposeAddEqual(Mat,Mat,PetscInt,PetscBool *);
561 
562 extern PetscErrorCode  MatNorm(Mat,NormType,PetscReal *);
563 PetscPolymorphicFunction(MatNorm,(Mat A,NormType t),(A,t,&n),PetscReal,n)
564 extern PetscErrorCode MatGetColumnNorms(Mat,NormType,PetscReal *);
565 extern PetscErrorCode  MatZeroEntries(Mat);
566 extern PetscErrorCode  MatZeroRows(Mat,PetscInt,const PetscInt [],PetscScalar,Vec,Vec);
567 extern PetscErrorCode  MatZeroRowsIS(Mat,IS,PetscScalar,Vec,Vec);
568 extern PetscErrorCode  MatZeroRowsStencil(Mat,PetscInt,const MatStencil [],PetscScalar,Vec,Vec);
569 extern PetscErrorCode  MatZeroRowsColumns(Mat,PetscInt,const PetscInt [],PetscScalar,Vec,Vec);
570 extern PetscErrorCode  MatZeroRowsColumnsIS(Mat,IS,PetscScalar,Vec,Vec);
571 
572 extern PetscErrorCode  MatUseScaledForm(Mat,PetscBool );
573 extern PetscErrorCode  MatScaleSystem(Mat,Vec,Vec);
574 extern PetscErrorCode  MatUnScaleSystem(Mat,Vec,Vec);
575 
576 extern PetscErrorCode  MatGetSize(Mat,PetscInt*,PetscInt*);
577 extern PetscErrorCode  MatGetLocalSize(Mat,PetscInt*,PetscInt*);
578 extern PetscErrorCode  MatGetOwnershipRange(Mat,PetscInt*,PetscInt*);
579 extern PetscErrorCode  MatGetOwnershipRanges(Mat,const PetscInt**);
580 extern PetscErrorCode  MatGetOwnershipRangeColumn(Mat,PetscInt*,PetscInt*);
581 extern PetscErrorCode  MatGetOwnershipRangesColumn(Mat,const PetscInt**);
582 
583 extern PetscErrorCode  MatGetSubMatrices(Mat,PetscInt,const IS[],const IS[],MatReuse,Mat *[]);
584 extern PetscErrorCode  MatGetSubMatricesParallel(Mat,PetscInt,const IS[],const IS[],MatReuse,Mat *[]);
585 extern PetscErrorCode  MatDestroyMatrices(PetscInt,Mat *[]);
586 extern PetscErrorCode  MatGetSubMatrix(Mat,IS,IS,MatReuse,Mat *);
587 extern PetscErrorCode  MatGetLocalSubMatrix(Mat,IS,IS,Mat*);
588 extern PetscErrorCode  MatRestoreLocalSubMatrix(Mat,IS,IS,Mat*);
589 extern PetscErrorCode  MatGetSeqNonzeroStructure(Mat,Mat*);
590 extern PetscErrorCode  MatDestroySeqNonzeroStructure(Mat*);
591 
592 extern PetscErrorCode  MatMerge(MPI_Comm,Mat,PetscInt,MatReuse,Mat*);
593 extern PetscErrorCode  MatMerge_SeqsToMPI(MPI_Comm,Mat,PetscInt,PetscInt,MatReuse,Mat*);
594 extern PetscErrorCode  MatMerge_SeqsToMPISymbolic(MPI_Comm,Mat,PetscInt,PetscInt,Mat*);
595 extern PetscErrorCode  MatMerge_SeqsToMPINumeric(Mat,Mat);
596 extern PetscErrorCode  MatMPIAIJGetLocalMat(Mat,MatReuse,Mat*);
597 extern PetscErrorCode  MatMPIAIJGetLocalMatCondensed(Mat,MatReuse,IS*,IS*,Mat*);
598 extern PetscErrorCode  MatGetBrowsOfAcols(Mat,Mat,MatReuse,IS*,IS*,PetscInt*,Mat*);
599 extern PetscErrorCode  MatGetBrowsOfAoCols(Mat,Mat,MatReuse,PetscInt**,PetscInt**,MatScalar**,Mat*);
600 #if defined (PETSC_USE_CTABLE)
601 #include "petscctable.h"
602 extern PetscErrorCode  MatGetCommunicationStructs(Mat, Vec *, PetscTable *, VecScatter *);
603 #else
604 extern PetscErrorCode  MatGetCommunicationStructs(Mat, Vec *, PetscInt *[], VecScatter *);
605 #endif
606 extern PetscErrorCode  MatGetGhosts(Mat, PetscInt *,const PetscInt *[]);
607 
608 extern PetscErrorCode  MatIncreaseOverlap(Mat,PetscInt,IS[],PetscInt);
609 
610 extern PetscErrorCode  MatMatMult(Mat,Mat,MatReuse,PetscReal,Mat*);
611 extern PetscErrorCode  MatMatMultSymbolic(Mat,Mat,PetscReal,Mat*);
612 extern PetscErrorCode  MatMatMultNumeric(Mat,Mat,Mat);
613 
614 extern PetscErrorCode  MatPtAP(Mat,Mat,MatReuse,PetscReal,Mat*);
615 extern PetscErrorCode  MatPtAPSymbolic(Mat,Mat,PetscReal,Mat*);
616 extern PetscErrorCode  MatPtAPNumeric(Mat,Mat,Mat);
617 
618 extern PetscErrorCode  MatMatMultTranspose(Mat,Mat,MatReuse,PetscReal,Mat*);
619 extern PetscErrorCode  MatMatMultTransposeSymbolic(Mat,Mat,PetscReal,Mat*);
620 extern PetscErrorCode  MatMatMultTransposeNumeric(Mat,Mat,Mat);
621 
622 extern PetscErrorCode  MatAXPY(Mat,PetscScalar,Mat,MatStructure);
623 extern PetscErrorCode  MatAYPX(Mat,PetscScalar,Mat,MatStructure);
624 
625 extern PetscErrorCode  MatScale(Mat,PetscScalar);
626 extern PetscErrorCode  MatShift(Mat,PetscScalar);
627 
628 extern PetscErrorCode  MatSetLocalToGlobalMapping(Mat,ISLocalToGlobalMapping,ISLocalToGlobalMapping);
629 extern PetscErrorCode  MatSetLocalToGlobalMappingBlock(Mat,ISLocalToGlobalMapping,ISLocalToGlobalMapping);
630 extern PetscErrorCode  MatGetLocalToGlobalMapping(Mat,ISLocalToGlobalMapping*,ISLocalToGlobalMapping*);
631 extern PetscErrorCode  MatGetLocalToGlobalMappingBlock(Mat,ISLocalToGlobalMapping*,ISLocalToGlobalMapping*);
632 extern PetscErrorCode  MatZeroRowsLocal(Mat,PetscInt,const PetscInt [],PetscScalar,Vec,Vec);
633 extern PetscErrorCode  MatZeroRowsLocalIS(Mat,IS,PetscScalar,Vec,Vec);
634 extern PetscErrorCode  MatZeroRowsColumnsLocal(Mat,PetscInt,const PetscInt [],PetscScalar,Vec,Vec);
635 extern PetscErrorCode  MatZeroRowsColumnsLocalIS(Mat,IS,PetscScalar,Vec,Vec);
636 extern PetscErrorCode  MatSetValuesLocal(Mat,PetscInt,const PetscInt[],PetscInt,const PetscInt[],const PetscScalar[],InsertMode);
637 extern PetscErrorCode  MatSetValuesBlockedLocal(Mat,PetscInt,const PetscInt[],PetscInt,const PetscInt[],const PetscScalar[],InsertMode);
638 
639 extern PetscErrorCode  MatStashSetInitialSize(Mat,PetscInt,PetscInt);
640 extern PetscErrorCode  MatStashGetInfo(Mat,PetscInt*,PetscInt*,PetscInt*,PetscInt*);
641 
642 extern PetscErrorCode  MatInterpolate(Mat,Vec,Vec);
643 extern PetscErrorCode  MatInterpolateAdd(Mat,Vec,Vec,Vec);
644 PetscPolymorphicSubroutine(MatInterpolateAdd,(Mat A,Vec x,Vec y),(A,x,y,y))
645 extern PetscErrorCode  MatRestrict(Mat,Vec,Vec);
646 extern PetscErrorCode  MatGetVecs(Mat,Vec*,Vec*);
647 extern PetscErrorCode  MatGetRedundantMatrix(Mat,PetscInt,MPI_Comm,PetscInt,MatReuse,Mat*);
648 extern PetscErrorCode  MatGetMultiProcBlock(Mat,MPI_Comm,Mat*);
649 extern PetscErrorCode  MatFindZeroDiagonals(Mat,IS*);
650 
651 /*MC
652    MatSetValue - Set a single entry into a matrix.
653 
654    Not collective
655 
656    Input Parameters:
657 +  m - the matrix
658 .  row - the row location of the entry
659 .  col - the column location of the entry
660 .  value - the value to insert
661 -  mode - either INSERT_VALUES or ADD_VALUES
662 
663    Notes:
664    For efficiency one should use MatSetValues() and set several or many
665    values simultaneously if possible.
666 
667    Level: beginner
668 
669 .seealso: MatSetValues(), MatSetValueLocal()
670 M*/
671 PETSC_STATIC_INLINE PetscErrorCode MatSetValue(Mat v,PetscInt i,PetscInt j,PetscScalar va,InsertMode mode) {return MatSetValues(v,1,&i,1,&j,&va,mode);}
672 
673 PETSC_STATIC_INLINE PetscErrorCode MatGetValue(Mat v,PetscInt i,PetscInt j,PetscScalar *va) {return MatGetValues(v,1,&i,1,&j,va);}
674 
675 PETSC_STATIC_INLINE PetscErrorCode MatSetValueLocal(Mat v,PetscInt i,PetscInt j,PetscScalar va,InsertMode mode) {return MatSetValuesLocal(v,1,&i,1,&j,&va,mode);}
676 
677 /*MC
678    MatPreallocateInitialize - Begins the block of code that will count the number of nonzeros per
679        row in a matrix providing the data that one can use to correctly preallocate the matrix.
680 
681    Synopsis:
682    PetscErrorCode MatPreallocateInitialize(MPI_Comm comm, PetscInt nrows, PetscInt ncols, PetscInt *dnz, PetscInt *onz)
683 
684    Collective on MPI_Comm
685 
686    Input Parameters:
687 +  comm - the communicator that will share the eventually allocated matrix
688 .  nrows - the number of LOCAL rows in the matrix
689 -  ncols - the number of LOCAL columns in the matrix
690 
691    Output Parameters:
692 +  dnz - the array that will be passed to the matrix preallocation routines
693 -  ozn - the other array passed to the matrix preallocation routines
694 
695 
696    Level: intermediate
697 
698    Notes:
699     See the <A href="../../docs/manual.pdf#nameddest=ch_performance">Hints for Performance Improvment</A> chapter in the users manual for more details.
700 
701    Do not malloc or free dnz and onz, that is handled internally by these routines
702 
703    Use MatPreallocateInitializeSymmetric() for symmetric matrices (MPISBAIJ matrices)
704 
705    This is a MACRO not a function because it has a leading { that is closed by PetscPreallocateFinalize().
706 
707   Concepts: preallocation^Matrix
708 
709 .seealso: MatPreallocateFinalize(), MatPreallocateSet(), MatPreallocateSymmetricSet(), MatPreallocateSetLocal(),
710           MatPreallocateInitializeSymmetric(), MatPreallocateSymmetricSetLocal()
711 M*/
712 #define MatPreallocateInitialize(comm,nrows,ncols,dnz,onz) 0; \
713 { \
714   PetscErrorCode _4_ierr; PetscInt __nrows = (nrows),__ctmp = (ncols),__rstart,__start,__end; \
715   _4_ierr = PetscMalloc2(__nrows,PetscInt,&dnz,__nrows,PetscInt,&onz);CHKERRQ(_4_ierr); \
716   _4_ierr = PetscMemzero(dnz,__nrows*sizeof(PetscInt));CHKERRQ(_4_ierr);\
717   _4_ierr = PetscMemzero(onz,__nrows*sizeof(PetscInt));CHKERRQ(_4_ierr);\
718   _4_ierr = MPI_Scan(&__ctmp,&__end,1,MPIU_INT,MPI_SUM,comm);CHKERRQ(_4_ierr); __start = __end - __ctmp;\
719   _4_ierr = MPI_Scan(&__nrows,&__rstart,1,MPIU_INT,MPI_SUM,comm);CHKERRQ(_4_ierr); __rstart = __rstart - __nrows;
720 
721 /*MC
722    MatPreallocateSymmetricInitialize - Begins the block of code that will count the number of nonzeros per
723        row in a matrix providing the data that one can use to correctly preallocate the matrix.
724 
725    Synopsis:
726    PetscErrorCode MatPreallocateSymmetricInitialize(MPI_Comm comm, PetscInt nrows, PetscInt ncols, PetscInt *dnz, PetscInt *onz)
727 
728    Collective on MPI_Comm
729 
730    Input Parameters:
731 +  comm - the communicator that will share the eventually allocated matrix
732 .  nrows - the number of LOCAL rows in the matrix
733 -  ncols - the number of LOCAL columns in the matrix
734 
735    Output Parameters:
736 +  dnz - the array that will be passed to the matrix preallocation routines
737 -  ozn - the other array passed to the matrix preallocation routines
738 
739 
740    Level: intermediate
741 
742    Notes:
743     See the <A href="../../docs/manual.pdf#nameddest=ch_performance">Hints for Performance Improvment</A> chapter in the users manual for more details.
744 
745    Do not malloc or free dnz and onz, that is handled internally by these routines
746 
747    This is a MACRO not a function because it has a leading { that is closed by PetscPreallocateFinalize().
748 
749   Concepts: preallocation^Matrix
750 
751 .seealso: MatPreallocateFinalize(), MatPreallocateSet(), MatPreallocateSymmetricSet(), MatPreallocateSetLocal(),
752           MatPreallocateInitialize(), MatPreallocateSymmetricSetLocal()
753 M*/
754 #define MatPreallocateSymmetricInitialize(comm,nrows,ncols,dnz,onz) 0; \
755 { \
756   PetscErrorCode _4_ierr; PetscInt __nrows = (nrows),__ctmp = (ncols),__rstart,__end; \
757   _4_ierr = PetscMalloc2(__nrows,PetscInt,&dnz,__nrows,PetscInt,&onz);CHKERRQ(_4_ierr); \
758   _4_ierr = PetscMemzero(dnz,__nrows*sizeof(PetscInt));CHKERRQ(_4_ierr);\
759   _4_ierr = PetscMemzero(onz,__nrows*sizeof(PetscInt));CHKERRQ(_4_ierr);\
760   _4_ierr = MPI_Scan(&__ctmp,&__end,1,MPIU_INT,MPI_SUM,comm);CHKERRQ(_4_ierr);\
761   _4_ierr = MPI_Scan(&__nrows,&__rstart,1,MPIU_INT,MPI_SUM,comm);CHKERRQ(_4_ierr); __rstart = __rstart - __nrows;
762 
763 /*MC
764    MatPreallocateSetLocal - Indicates the locations (rows and columns) in the matrix where nonzeros will be
765        inserted using a local number of the rows and columns
766 
767    Synopsis:
768    PetscErrorCode MatPreallocateSetLocal(ISLocalToGlobalMappping map,PetscInt nrows, PetscInt *rows,PetscInt ncols, PetscInt *cols,PetscInt *dnz, PetscInt *onz)
769 
770    Not Collective
771 
772    Input Parameters:
773 +  map - the row mapping from local numbering to global numbering
774 .  nrows - the number of rows indicated
775 .  rows - the indices of the rows
776 .  cmap - the column mapping from local to global numbering
777 .  ncols - the number of columns in the matrix
778 .  cols - the columns indicated
779 .  dnz - the array that will be passed to the matrix preallocation routines
780 -  ozn - the other array passed to the matrix preallocation routines
781 
782 
783    Level: intermediate
784 
785    Notes:
786     See the <A href="../../docs/manual.pdf#nameddest=ch_performance">Hints for Performance Improvment</A> chapter in the users manual for more details.
787 
788    Do not malloc or free dnz and onz, that is handled internally by these routines
789 
790   Concepts: preallocation^Matrix
791 
792 .seealso: MatPreallocateFinalize(), MatPreallocateSet(), MatPreallocateSymmetricSet(), MatPreallocateInitialize(),
793           MatPreallocateInitialize(), MatPreallocateSymmetricSetLocal()
794 M*/
795 #define MatPreallocateSetLocal(rmap,nrows,rows,cmap,ncols,cols,dnz,onz) 0; \
796 {\
797   PetscInt __l;\
798   _4_ierr = ISLocalToGlobalMappingApply(rmap,nrows,rows,rows);CHKERRQ(_4_ierr);\
799   _4_ierr = ISLocalToGlobalMappingApply(cmap,ncols,cols,cols);CHKERRQ(_4_ierr);\
800   for (__l=0;__l<nrows;__l++) {\
801     _4_ierr = MatPreallocateSet((rows)[__l],ncols,cols,dnz,onz);CHKERRQ(_4_ierr);\
802   }\
803 }
804 
805 /*MC
806    MatPreallocateSymmetricSetLocal - Indicates the locations (rows and columns) in the matrix where nonzeros will be
807        inserted using a local number of the rows and columns
808 
809    Synopsis:
810    PetscErrorCode MatPreallocateSymmetricSetLocal(ISLocalToGlobalMappping map,PetscInt nrows, PetscInt *rows,PetscInt ncols, PetscInt *cols,PetscInt *dnz, PetscInt *onz)
811 
812    Not Collective
813 
814    Input Parameters:
815 +  map - the mapping between local numbering and global numbering
816 .  nrows - the number of rows indicated
817 .  rows - the indices of the rows
818 .  ncols - the number of columns in the matrix
819 .  cols - the columns indicated
820 .  dnz - the array that will be passed to the matrix preallocation routines
821 -  ozn - the other array passed to the matrix preallocation routines
822 
823 
824    Level: intermediate
825 
826    Notes:
827     See the <A href="../../docs/manual.pdf#nameddest=ch_performance">Hints for Performance Improvment</A> chapter in the users manual for more details.
828 
829    Do not malloc or free dnz and onz that is handled internally by these routines
830 
831   Concepts: preallocation^Matrix
832 
833 .seealso: MatPreallocateFinalize(), MatPreallocateSet(), MatPreallocateSymmetricSet(), MatPreallocateInitialize(),
834           MatPreallocateInitialize(), MatPreallocateSymmetricSetLocal(), MatPreallocateSetLocal()
835 M*/
836 #define MatPreallocateSymmetricSetLocal(map,nrows,rows,ncols,cols,dnz,onz) 0;\
837 {\
838   PetscInt __l;\
839   _4_ierr = ISLocalToGlobalMappingApply(map,nrows,rows,rows);CHKERRQ(_4_ierr);\
840   _4_ierr = ISLocalToGlobalMappingApply(map,ncols,cols,cols);CHKERRQ(_4_ierr);\
841   for (__l=0;__l<nrows;__l++) {\
842     _4_ierr = MatPreallocateSymmetricSet((rows)[__l],ncols,cols,dnz,onz);CHKERRQ(_4_ierr);\
843   }\
844 }
845 
846 /*MC
847    MatPreallocateSet - Indicates the locations (rows and columns) in the matrix where nonzeros will be
848        inserted using a local number of the rows and columns
849 
850    Synopsis:
851    PetscErrorCode MatPreallocateSet(PetscInt nrows, PetscInt *rows,PetscInt ncols, PetscInt *cols,PetscInt *dnz, PetscInt *onz)
852 
853    Not Collective
854 
855    Input Parameters:
856 +  row - the row
857 .  ncols - the number of columns in the matrix
858 -  cols - the columns indicated
859 
860    Output Parameters:
861 +  dnz - the array that will be passed to the matrix preallocation routines
862 -  ozn - the other array passed to the matrix preallocation routines
863 
864 
865    Level: intermediate
866 
867    Notes:
868     See the <A href="../../docs/manual.pdf#nameddest=ch_performance">Hints for Performance Improvment</A> chapter in the users manual for more details.
869 
870    Do not malloc or free dnz and onz that is handled internally by these routines
871 
872    This is a MACRO not a function because it uses variables declared in MatPreallocateInitialize().
873 
874   Concepts: preallocation^Matrix
875 
876 .seealso: MatPreallocateFinalize(), MatPreallocateSet(), MatPreallocateSymmetricSet(), MatPreallocateInitialize(),
877           MatPreallocateInitialize(), MatPreallocateSymmetricSetLocal(), MatPreallocateSetLocal()
878 M*/
879 #define MatPreallocateSet(row,nc,cols,dnz,onz) 0;\
880 { PetscInt __i; \
881   if (row < __rstart) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Trying to set preallocation for row %D less than first local row %D",row,__rstart);\
882   if (row >= __rstart+__nrows) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Trying to set preallocation for row %D greater than last local row %D",row,__rstart+__nrows-1);\
883   for (__i=0; __i<nc; __i++) {\
884     if ((cols)[__i] < __start || (cols)[__i] >= __end) onz[row - __rstart]++; \
885     else dnz[row - __rstart]++;\
886   }\
887 }
888 
889 /*MC
890    MatPreallocateSymmetricSet - Indicates the locations (rows and columns) in the matrix where nonzeros will be
891        inserted using a local number of the rows and columns
892 
893    Synopsis:
894    PetscErrorCode MatPreallocateSymmetricSet(PetscInt nrows, PetscInt *rows,PetscInt ncols, PetscInt *cols,PetscInt *dnz, PetscInt *onz)
895 
896    Not Collective
897 
898    Input Parameters:
899 +  nrows - the number of rows indicated
900 .  rows - the indices of the rows
901 .  ncols - the number of columns in the matrix
902 .  cols - the columns indicated
903 .  dnz - the array that will be passed to the matrix preallocation routines
904 -  ozn - the other array passed to the matrix preallocation routines
905 
906 
907    Level: intermediate
908 
909    Notes:
910     See the <A href="../../docs/manual.pdf#nameddest=ch_performance">Hints for Performance Improvment</A> chapter in the users manual for more details.
911 
912    Do not malloc or free dnz and onz that is handled internally by these routines
913 
914    This is a MACRO not a function because it uses variables declared in MatPreallocateInitialize().
915 
916   Concepts: preallocation^Matrix
917 
918 .seealso: MatPreallocateFinalize(), MatPreallocateSet(), MatPreallocateSymmetricSet(), MatPreallocateInitialize(),
919           MatPreallocateInitialize(), MatPreallocateSymmetricSetLocal(), MatPreallocateSetLocal()
920 M*/
921 #define MatPreallocateSymmetricSet(row,nc,cols,dnz,onz) 0;\
922 { PetscInt __i; \
923   for (__i=0; __i<nc; __i++) {\
924     if (cols[__i] >= __end) onz[row - __rstart]++; \
925     else if (cols[__i] >= row) dnz[row - __rstart]++;\
926   }\
927 }
928 
929 /*MC
930    MatPreallocateLocation -  An alternative to MatPreallocationSet() that puts the nonzero locations into the matrix if it exists
931 
932    Synopsis:
933    PetscErrorCode MatPreallocateLocations(Mat A,PetscInt row,PetscInt ncols,PetscInt *cols,PetscInt *dnz,PetscInt *onz)
934 
935    Not Collective
936 
937    Input Parameters:
938 .  A - matrix
939 .  row - row where values exist (must be local to this process)
940 .  ncols - number of columns
941 .  cols - columns with nonzeros
942 .  dnz - the array that will be passed to the matrix preallocation routines
943 -  ozn - the other array passed to the matrix preallocation routines
944 
945 
946    Level: intermediate
947 
948    Notes:
949     See the <A href="../../docs/manual.pdf#nameddest=ch_performance">Hints for Performance Improvment</A> chapter in the users manual for more details.
950 
951    Do not malloc or free dnz and onz that is handled internally by these routines
952 
953    This is a MACRO not a function because it uses a bunch of variables private to the MatPreallocation.... routines.
954 
955   Concepts: preallocation^Matrix
956 
957 .seealso: MatPreallocateInitialize(), MatPreallocateSet(), MatPreallocateSymmetricSet(), MatPreallocateSetLocal(),
958           MatPreallocateSymmetricInitialize(), MatPreallocateSymmetricSetLocal()
959 M*/
960 #define MatPreallocateLocation(A,row,ncols,cols,dnz,onz) 0;if (A) {ierr = MatSetValues(A,1,&row,ncols,cols,PETSC_NULL,INSERT_VALUES);CHKERRQ(ierr);} else {ierr =  MatPreallocateSet(row,ncols,cols,dnz,onz);CHKERRQ(ierr);}
961 
962 
963 /*MC
964    MatPreallocateFinalize - Ends the block of code that will count the number of nonzeros per
965        row in a matrix providing the data that one can use to correctly preallocate the matrix.
966 
967    Synopsis:
968    PetscErrorCode MatPreallocateFinalize(PetscInt *dnz, PetscInt *onz)
969 
970    Collective on MPI_Comm
971 
972    Input Parameters:
973 +  dnz - the array that was be passed to the matrix preallocation routines
974 -  ozn - the other array passed to the matrix preallocation routines
975 
976 
977    Level: intermediate
978 
979    Notes:
980     See the <A href="../../docs/manual.pdf#nameddest=ch_performance">Hints for Performance Improvment</A> chapter in the users manual for more details.
981 
982    Do not malloc or free dnz and onz that is handled internally by these routines
983 
984    This is a MACRO not a function because it closes the { started in MatPreallocateInitialize().
985 
986   Concepts: preallocation^Matrix
987 
988 .seealso: MatPreallocateInitialize(), MatPreallocateSet(), MatPreallocateSymmetricSet(), MatPreallocateSetLocal(),
989           MatPreallocateSymmetricInitialize(), MatPreallocateSymmetricSetLocal()
990 M*/
991 #define MatPreallocateFinalize(dnz,onz) 0;_4_ierr = PetscFree2(dnz,onz);CHKERRQ(_4_ierr);}
992 
993 
994 
995 /* Routines unique to particular data structures */
996 extern PetscErrorCode  MatShellGetContext(Mat,void **);
997 PetscPolymorphicFunction(MatShellGetContext,(Mat A),(A,&t),void*,t)
998 
999 extern PetscErrorCode  MatInodeAdjustForInodes(Mat,IS*,IS*);
1000 extern PetscErrorCode  MatInodeGetInodeSizes(Mat,PetscInt *,PetscInt *[],PetscInt *);
1001 
1002 extern PetscErrorCode  MatSeqAIJSetColumnIndices(Mat,PetscInt[]);
1003 extern PetscErrorCode  MatSeqBAIJSetColumnIndices(Mat,PetscInt[]);
1004 extern PetscErrorCode  MatCreateSeqAIJWithArrays(MPI_Comm,PetscInt,PetscInt,PetscInt[],PetscInt[],PetscScalar[],Mat*);
1005 extern PetscErrorCode  MatCreateSeqBAIJWithArrays(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt[],PetscInt[],PetscScalar[],Mat*);
1006 extern PetscErrorCode  MatCreateSeqSBAIJWithArrays(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt[],PetscInt[],PetscScalar[],Mat*);
1007 
1008 #define MAT_SKIP_ALLOCATION -4
1009 
1010 extern PetscErrorCode  MatSeqBAIJSetPreallocation(Mat,PetscInt,PetscInt,const PetscInt[]);
1011 PetscPolymorphicSubroutine(MatSeqBAIJSetPreallocation,(Mat A,PetscInt bs,const PetscInt nnz[]),(A,bs,0,nnz))
1012 extern PetscErrorCode  MatSeqSBAIJSetPreallocation(Mat,PetscInt,PetscInt,const PetscInt[]);
1013 PetscPolymorphicSubroutine(MatSeqSBAIJSetPreallocation,(Mat A,PetscInt bs,const PetscInt nnz[]),(A,bs,0,nnz))
1014 extern PetscErrorCode  MatSeqAIJSetPreallocation(Mat,PetscInt,const PetscInt[]);
1015 PetscPolymorphicSubroutine(MatSeqAIJSetPreallocation,(Mat A,const PetscInt nnz[]),(A,0,nnz))
1016 
1017 extern PetscErrorCode  MatMPIBAIJSetPreallocation(Mat,PetscInt,PetscInt,const PetscInt[],PetscInt,const PetscInt[]);
1018 PetscPolymorphicSubroutine(MatMPIBAIJSetPreallocation,(Mat A,PetscInt bs,const PetscInt nnz[],const PetscInt onz[]),(A,bs,0,nnz,0,onz))
1019 extern PetscErrorCode  MatMPISBAIJSetPreallocation(Mat,PetscInt,PetscInt,const PetscInt[],PetscInt,const PetscInt[]);
1020 extern PetscErrorCode  MatMPIAIJSetPreallocation(Mat,PetscInt,const PetscInt[],PetscInt,const PetscInt[]);
1021 extern PetscErrorCode  MatSeqAIJSetPreallocationCSR(Mat,const PetscInt [],const PetscInt [],const PetscScalar []);
1022 extern PetscErrorCode  MatSeqBAIJSetPreallocationCSR(Mat,PetscInt,const PetscInt[],const PetscInt[],const PetscScalar[]);
1023 extern PetscErrorCode  MatMPIAIJSetPreallocationCSR(Mat,const PetscInt[],const PetscInt[],const PetscScalar[]);
1024 extern PetscErrorCode  MatMPIBAIJSetPreallocationCSR(Mat,PetscInt,const PetscInt[],const PetscInt[],const PetscScalar[]);
1025 extern PetscErrorCode  MatMPIAdjSetPreallocation(Mat,PetscInt[],PetscInt[],PetscInt[]);
1026 extern PetscErrorCode  MatMPIDenseSetPreallocation(Mat,PetscScalar[]);
1027 extern PetscErrorCode  MatSeqDenseSetPreallocation(Mat,PetscScalar[]);
1028 extern PetscErrorCode  MatMPIAIJGetSeqAIJ(Mat,Mat*,Mat*,PetscInt*[]);
1029 extern PetscErrorCode  MatMPIBAIJGetSeqBAIJ(Mat,Mat*,Mat*,PetscInt*[]);
1030 extern PetscErrorCode  MatAdicSetLocalFunction(Mat,void (*)(void));
1031 
1032 extern PetscErrorCode  MatSeqDenseSetLDA(Mat,PetscInt);
1033 extern PetscErrorCode  MatDenseGetLocalMatrix(Mat,Mat*);
1034 
1035 extern PetscErrorCode  MatStoreValues(Mat);
1036 extern PetscErrorCode  MatRetrieveValues(Mat);
1037 
1038 extern PetscErrorCode  MatDAADSetCtx(Mat,void*);
1039 
1040 extern PetscErrorCode  MatFindNonzeroRows(Mat,IS*);
1041 /*
1042   These routines are not usually accessed directly, rather solving is
1043   done through the KSP and PC interfaces.
1044 */
1045 
1046 /*E
1047     MatOrderingType - String with the name of a PETSc matrix ordering or the creation function
1048        with an optional dynamic library name, for example
1049        http://www.mcs.anl.gov/petsc/lib.a:orderingcreate()
1050 
1051    Level: beginner
1052 
1053    Cannot use const because the PC objects manipulate the string
1054 
1055 .seealso: MatGetOrdering()
1056 E*/
1057 #define MatOrderingType char*
1058 #define MATORDERINGNATURAL     "natural"
1059 #define MATORDERINGND          "nd"
1060 #define MATORDERING1WD         "1wd"
1061 #define MATORDERINGRCM         "rcm"
1062 #define MATORDERINGQMD         "qmd"
1063 #define MATORDERINGROWLENGTH   "rowlength"
1064 #define MATORDERINGDSC_ND      "dsc_nd"         /* these three are only for DSCPACK, see its documentation for details */
1065 #define MATORDERINGDSC_MMD     "dsc_mmd"
1066 #define MATORDERINGDSC_MDF     "dsc_mdf"
1067 #define MATORDERINGAMD         "amd"            /* only works if UMFPACK is installed with PETSc */
1068 
1069 extern PetscErrorCode  MatGetOrdering(Mat,const MatOrderingType,IS*,IS*);
1070 extern PetscErrorCode  MatGetOrderingList(PetscFList *list);
1071 extern PetscErrorCode  MatOrderingRegister(const char[],const char[],const char[],PetscErrorCode(*)(Mat,const MatOrderingType,IS*,IS*));
1072 
1073 /*MC
1074    MatOrderingRegisterDynamic - Adds a new sparse matrix ordering to the matrix package.
1075 
1076    Synopsis:
1077    PetscErrorCode MatOrderingRegisterDynamic(const char *name_ordering,const char *path,const char *name_create,PetscErrorCode (*routine_create)(MatOrdering))
1078 
1079    Not Collective
1080 
1081    Input Parameters:
1082 +  sname - name of ordering (for example MATORDERINGND)
1083 .  path - location of library where creation routine is
1084 .  name - name of function that creates the ordering type,a string
1085 -  function - function pointer that creates the ordering
1086 
1087    Level: developer
1088 
1089    If dynamic libraries are used, then the fourth input argument (function)
1090    is ignored.
1091 
1092    Sample usage:
1093 .vb
1094    MatOrderingRegisterDynamic("my_order",/home/username/my_lib/lib/libO/solaris/mylib.a,
1095                "MyOrder",MyOrder);
1096 .ve
1097 
1098    Then, your partitioner can be chosen with the procedural interface via
1099 $     MatOrderingSetType(part,"my_order)
1100    or at runtime via the option
1101 $     -pc_factor_mat_ordering_type my_order
1102 
1103    ${PETSC_ARCH} occuring in pathname will be replaced with appropriate values.
1104 
1105 .keywords: matrix, ordering, register
1106 
1107 .seealso: MatOrderingRegisterDestroy(), MatOrderingRegisterAll()
1108 M*/
1109 #if defined(PETSC_USE_DYNAMIC_LIBRARIES)
1110 #define MatOrderingRegisterDynamic(a,b,c,d) MatOrderingRegister(a,b,c,0)
1111 #else
1112 #define MatOrderingRegisterDynamic(a,b,c,d) MatOrderingRegister(a,b,c,d)
1113 #endif
1114 
1115 extern PetscErrorCode  MatOrderingRegisterDestroy(void);
1116 extern PetscErrorCode  MatOrderingRegisterAll(const char[]);
1117 extern PetscBool  MatOrderingRegisterAllCalled;
1118 extern PetscFList MatOrderingList;
1119 
1120 extern PetscErrorCode  MatReorderForNonzeroDiagonal(Mat,PetscReal,IS,IS);
1121 
1122 /*S
1123     MatFactorShiftType - Numeric Shift.
1124 
1125    Level: beginner
1126 
1127 S*/
1128 typedef enum {MAT_SHIFT_NONE,MAT_SHIFT_NONZERO,MAT_SHIFT_POSITIVE_DEFINITE,MAT_SHIFT_INBLOCKS} MatFactorShiftType;
1129 extern const char *MatFactorShiftTypes[];
1130 
1131 /*S
1132    MatFactorInfo - Data passed into the matrix factorization routines
1133 
1134    In Fortran these are simply double precision arrays of size MAT_FACTORINFO_SIZE, that is use
1135 $     MatFactorInfo  info(MAT_FACTORINFO_SIZE)
1136 
1137    Notes: These are not usually directly used by users, instead use PC type of LU, ILU, CHOLESKY or ICC.
1138 
1139       You can use MatFactorInfoInitialize() to set default values.
1140 
1141    Level: developer
1142 
1143 .seealso: MatLUFactorSymbolic(), MatILUFactorSymbolic(), MatCholeskyFactorSymbolic(), MatICCFactorSymbolic(), MatICCFactor(),
1144           MatFactorInfoInitialize()
1145 
1146 S*/
1147 typedef struct {
1148   PetscReal     diagonal_fill;  /* force diagonal to fill in if initially not filled */
1149   PetscReal     usedt;
1150   PetscReal     dt;             /* drop tolerance */
1151   PetscReal     dtcol;          /* tolerance for pivoting */
1152   PetscReal     dtcount;        /* maximum nonzeros to be allowed per row */
1153   PetscReal     fill;           /* expected fill, nonzeros in factored matrix/nonzeros in original matrix */
1154   PetscReal     levels;         /* ICC/ILU(levels) */
1155   PetscReal     pivotinblocks;  /* for BAIJ and SBAIJ matrices pivot in factorization on blocks, default 1.0
1156                                    factorization may be faster if do not pivot */
1157   PetscReal     zeropivot;      /* pivot is called zero if less than this */
1158   PetscReal     shifttype;      /* type of shift added to matrix factor to prevent zero pivots */
1159   PetscReal     shiftamount;     /* how large the shift is */
1160 } MatFactorInfo;
1161 
1162 extern PetscErrorCode  MatFactorInfoInitialize(MatFactorInfo*);
1163 extern PetscErrorCode  MatCholeskyFactor(Mat,IS,const MatFactorInfo*);
1164 extern PetscErrorCode  MatCholeskyFactorSymbolic(Mat,Mat,IS,const MatFactorInfo*);
1165 extern PetscErrorCode  MatCholeskyFactorNumeric(Mat,Mat,const MatFactorInfo*);
1166 extern PetscErrorCode  MatLUFactor(Mat,IS,IS,const MatFactorInfo*);
1167 extern PetscErrorCode  MatILUFactor(Mat,IS,IS,const MatFactorInfo*);
1168 extern PetscErrorCode  MatLUFactorSymbolic(Mat,Mat,IS,IS,const MatFactorInfo*);
1169 extern PetscErrorCode  MatILUFactorSymbolic(Mat,Mat,IS,IS,const MatFactorInfo*);
1170 extern PetscErrorCode  MatICCFactorSymbolic(Mat,Mat,IS,const MatFactorInfo*);
1171 extern PetscErrorCode  MatICCFactor(Mat,IS,const MatFactorInfo*);
1172 extern PetscErrorCode  MatLUFactorNumeric(Mat,Mat,const MatFactorInfo*);
1173 extern PetscErrorCode  MatGetInertia(Mat,PetscInt*,PetscInt*,PetscInt*);
1174 extern PetscErrorCode  MatSolve(Mat,Vec,Vec);
1175 extern PetscErrorCode  MatForwardSolve(Mat,Vec,Vec);
1176 extern PetscErrorCode  MatBackwardSolve(Mat,Vec,Vec);
1177 extern PetscErrorCode  MatSolveAdd(Mat,Vec,Vec,Vec);
1178 extern PetscErrorCode  MatSolveTranspose(Mat,Vec,Vec);
1179 extern PetscErrorCode  MatSolveTransposeAdd(Mat,Vec,Vec,Vec);
1180 extern PetscErrorCode  MatSolves(Mat,Vecs,Vecs);
1181 
1182 extern PetscErrorCode  MatSetUnfactored(Mat);
1183 
1184 /*E
1185     MatSORType - What type of (S)SOR to perform
1186 
1187     Level: beginner
1188 
1189    May be bitwise ORd together
1190 
1191    Any additions/changes here MUST also be made in include/finclude/petscmat.h
1192 
1193    MatSORType may be bitwise ORd together, so do not change the numbers
1194 
1195 .seealso: MatSOR()
1196 E*/
1197 typedef enum {SOR_FORWARD_SWEEP=1,SOR_BACKWARD_SWEEP=2,SOR_SYMMETRIC_SWEEP=3,
1198               SOR_LOCAL_FORWARD_SWEEP=4,SOR_LOCAL_BACKWARD_SWEEP=8,
1199               SOR_LOCAL_SYMMETRIC_SWEEP=12,SOR_ZERO_INITIAL_GUESS=16,
1200               SOR_EISENSTAT=32,SOR_APPLY_UPPER=64,SOR_APPLY_LOWER=128} MatSORType;
1201 extern PetscErrorCode  MatSOR(Mat,Vec,PetscReal,MatSORType,PetscReal,PetscInt,PetscInt,Vec);
1202 
1203 /*
1204     These routines are for efficiently computing Jacobians via finite differences.
1205 */
1206 
1207 /*E
1208     MatColoringType - String with the name of a PETSc matrix coloring or the creation function
1209        with an optional dynamic library name, for example
1210        http://www.mcs.anl.gov/petsc/lib.a:coloringcreate()
1211 
1212    Level: beginner
1213 
1214 .seealso: MatGetColoring()
1215 E*/
1216 #define MatColoringType char*
1217 #define MATCOLORINGNATURAL "natural"
1218 #define MATCOLORINGSL      "sl"
1219 #define MATCOLORINGLF      "lf"
1220 #define MATCOLORINGID      "id"
1221 
1222 extern PetscErrorCode  MatGetColoring(Mat,const MatColoringType,ISColoring*);
1223 extern PetscErrorCode  MatColoringRegister(const char[],const char[],const char[],PetscErrorCode(*)(Mat,MatColoringType,ISColoring *));
1224 
1225 /*MC
1226    MatColoringRegisterDynamic - Adds a new sparse matrix coloring to the
1227                                matrix package.
1228 
1229    Synopsis:
1230    PetscErrorCode MatColoringRegisterDynamic(const char *name_coloring,const char *path,const char *name_create,PetscErrorCode (*routine_create)(MatColoring))
1231 
1232    Not Collective
1233 
1234    Input Parameters:
1235 +  sname - name of Coloring (for example MATCOLORINGSL)
1236 .  path - location of library where creation routine is
1237 .  name - name of function that creates the Coloring type, a string
1238 -  function - function pointer that creates the coloring
1239 
1240    Level: developer
1241 
1242    If dynamic libraries are used, then the fourth input argument (function)
1243    is ignored.
1244 
1245    Sample usage:
1246 .vb
1247    MatColoringRegisterDynamic("my_color",/home/username/my_lib/lib/libO/solaris/mylib.a,
1248                "MyColor",MyColor);
1249 .ve
1250 
1251    Then, your partitioner can be chosen with the procedural interface via
1252 $     MatColoringSetType(part,"my_color")
1253    or at runtime via the option
1254 $     -mat_coloring_type my_color
1255 
1256    $PETSC_ARCH occuring in pathname will be replaced with appropriate values.
1257 
1258 .keywords: matrix, Coloring, register
1259 
1260 .seealso: MatColoringRegisterDestroy(), MatColoringRegisterAll()
1261 M*/
1262 #if defined(PETSC_USE_DYNAMIC_LIBRARIES)
1263 #define MatColoringRegisterDynamic(a,b,c,d) MatColoringRegister(a,b,c,0)
1264 #else
1265 #define MatColoringRegisterDynamic(a,b,c,d) MatColoringRegister(a,b,c,d)
1266 #endif
1267 
1268 extern PetscBool  MatColoringRegisterAllCalled;
1269 
1270 extern PetscErrorCode  MatColoringRegisterAll(const char[]);
1271 extern PetscErrorCode  MatColoringRegisterDestroy(void);
1272 extern PetscErrorCode  MatColoringPatch(Mat,PetscInt,PetscInt,ISColoringValue[],ISColoring*);
1273 
1274 /*S
1275      MatFDColoring - Object for computing a sparse Jacobian via finite differences
1276         and coloring
1277 
1278    Level: beginner
1279 
1280   Concepts: coloring, sparse Jacobian, finite differences
1281 
1282 .seealso:  MatFDColoringCreate()
1283 S*/
1284 typedef struct _p_MatFDColoring* MatFDColoring;
1285 
1286 extern PetscErrorCode  MatFDColoringCreate(Mat,ISColoring,MatFDColoring *);
1287 extern PetscErrorCode  MatFDColoringDestroy(MatFDColoring*);
1288 extern PetscErrorCode  MatFDColoringView(MatFDColoring,PetscViewer);
1289 extern PetscErrorCode  MatFDColoringSetFunction(MatFDColoring,PetscErrorCode (*)(void),void*);
1290 extern PetscErrorCode  MatFDColoringGetFunction(MatFDColoring,PetscErrorCode (**)(void),void**);
1291 extern PetscErrorCode  MatFDColoringSetParameters(MatFDColoring,PetscReal,PetscReal);
1292 extern PetscErrorCode  MatFDColoringSetFromOptions(MatFDColoring);
1293 extern PetscErrorCode  MatFDColoringApply(Mat,MatFDColoring,Vec,MatStructure*,void *);
1294 extern PetscErrorCode  MatFDColoringApplyTS(Mat,MatFDColoring,PetscReal,Vec,MatStructure*,void *);
1295 extern PetscErrorCode  MatFDColoringSetF(MatFDColoring,Vec);
1296 extern PetscErrorCode  MatFDColoringGetPerturbedColumns(MatFDColoring,PetscInt*,PetscInt*[]);
1297 /*
1298     These routines are for partitioning matrices: currently used only
1299   for adjacency matrix, MatCreateMPIAdj().
1300 */
1301 
1302 /*S
1303      MatPartitioning - Object for managing the partitioning of a matrix or graph
1304 
1305    Level: beginner
1306 
1307   Concepts: partitioning
1308 
1309 .seealso:  MatPartitioningCreate(), MatPartitioningType
1310 S*/
1311 typedef struct _p_MatPartitioning* MatPartitioning;
1312 
1313 /*E
1314     MatPartitioningType - String with the name of a PETSc matrix partitioning or the creation function
1315        with an optional dynamic library name, for example
1316        http://www.mcs.anl.gov/petsc/lib.a:partitioningcreate()
1317 
1318    Level: beginner
1319 
1320 .seealso: MatPartitioningCreate(), MatPartitioning
1321 E*/
1322 #define MatPartitioningType char*
1323 #define MATPARTITIONINGCURRENT  "current"
1324 #define MATPARTITIONINGSQUARE   "square"
1325 #define MATPARTITIONINGPARMETIS "parmetis"
1326 #define MATPARTITIONINGCHACO    "chaco"
1327 #define MATPARTITIONINGJOSTLE   "jostle"
1328 #define MATPARTITIONINGPARTY    "party"
1329 #define MATPARTITIONINGSCOTCH   "scotch"
1330 
1331 
1332 extern PetscErrorCode  MatPartitioningCreate(MPI_Comm,MatPartitioning*);
1333 extern PetscErrorCode  MatPartitioningSetType(MatPartitioning,const MatPartitioningType);
1334 extern PetscErrorCode  MatPartitioningSetNParts(MatPartitioning,PetscInt);
1335 extern PetscErrorCode  MatPartitioningSetAdjacency(MatPartitioning,Mat);
1336 extern PetscErrorCode  MatPartitioningSetVertexWeights(MatPartitioning,const PetscInt[]);
1337 extern PetscErrorCode  MatPartitioningSetPartitionWeights(MatPartitioning,const PetscReal []);
1338 extern PetscErrorCode  MatPartitioningApply(MatPartitioning,IS*);
1339 extern PetscErrorCode  MatPartitioningDestroy(MatPartitioning*);
1340 
1341 extern PetscErrorCode  MatPartitioningRegister(const char[],const char[],const char[],PetscErrorCode (*)(MatPartitioning));
1342 
1343 /*MC
1344    MatPartitioningRegisterDynamic - Adds a new sparse matrix partitioning to the
1345    matrix package.
1346 
1347    Synopsis:
1348    PetscErrorCode MatPartitioningRegisterDynamic(const char *name_partitioning,const char *path,const char *name_create,PetscErrorCode (*routine_create)(MatPartitioning))
1349 
1350    Not Collective
1351 
1352    Input Parameters:
1353 +  sname - name of partitioning (for example MATPARTITIONINGCURRENT) or parmetis
1354 .  path - location of library where creation routine is
1355 .  name - name of function that creates the partitioning type, a string
1356 -  function - function pointer that creates the partitioning type
1357 
1358    Level: developer
1359 
1360    If dynamic libraries are used, then the fourth input argument (function)
1361    is ignored.
1362 
1363    Sample usage:
1364 .vb
1365    MatPartitioningRegisterDynamic("my_part",/home/username/my_lib/lib/libO/solaris/mylib.a,
1366                "MyPartCreate",MyPartCreate);
1367 .ve
1368 
1369    Then, your partitioner can be chosen with the procedural interface via
1370 $     MatPartitioningSetType(part,"my_part")
1371    or at runtime via the option
1372 $     -mat_partitioning_type my_part
1373 
1374    $PETSC_ARCH occuring in pathname will be replaced with appropriate values.
1375 
1376 .keywords: matrix, partitioning, register
1377 
1378 .seealso: MatPartitioningRegisterDestroy(), MatPartitioningRegisterAll()
1379 M*/
1380 #if defined(PETSC_USE_DYNAMIC_LIBRARIES)
1381 #define MatPartitioningRegisterDynamic(a,b,c,d) MatPartitioningRegister(a,b,c,0)
1382 #else
1383 #define MatPartitioningRegisterDynamic(a,b,c,d) MatPartitioningRegister(a,b,c,d)
1384 #endif
1385 
1386 extern PetscBool  MatPartitioningRegisterAllCalled;
1387 
1388 extern PetscErrorCode  MatPartitioningRegisterAll(const char[]);
1389 extern PetscErrorCode  MatPartitioningRegisterDestroy(void);
1390 
1391 extern PetscErrorCode  MatPartitioningView(MatPartitioning,PetscViewer);
1392 extern PetscErrorCode  MatPartitioningSetFromOptions(MatPartitioning);
1393 extern PetscErrorCode  MatPartitioningGetType(MatPartitioning,const MatPartitioningType*);
1394 
1395 extern PetscErrorCode  MatPartitioningParmetisSetCoarseSequential(MatPartitioning);
1396 extern PetscErrorCode  MatPartitioningParmetisGetEdgeCut(MatPartitioning, PetscInt *);
1397 
1398 extern PetscErrorCode  MatPartitioningJostleSetCoarseLevel(MatPartitioning,PetscReal);
1399 extern PetscErrorCode  MatPartitioningJostleSetCoarseSequential(MatPartitioning);
1400 
1401 typedef enum {MP_CHACO_MULTILEVEL_KL,MP_CHACO_SPECTRAL,MP_CHACO_LINEAR,MP_CHACO_RANDOM, MP_CHACO_SCATTERED} MPChacoGlobalType;
1402 extern PetscErrorCode  MatPartitioningChacoSetGlobal(MatPartitioning, MPChacoGlobalType);
1403 typedef enum { MP_CHACO_KERNIGHAN_LIN, MP_CHACO_NONE } MPChacoLocalType;
1404 extern PetscErrorCode  MatPartitioningChacoSetLocal(MatPartitioning, MPChacoLocalType);
1405 extern PetscErrorCode  MatPartitioningChacoSetCoarseLevel(MatPartitioning,PetscReal);
1406 typedef enum { MP_CHACO_LANCZOS, MP_CHACO_RQI_SYMMLQ } MPChacoEigenType;
1407 extern PetscErrorCode  MatPartitioningChacoSetEigenSolver(MatPartitioning,MPChacoEigenType);
1408 extern PetscErrorCode  MatPartitioningChacoSetEigenTol(MatPartitioning, PetscReal);
1409 extern PetscErrorCode  MatPartitioningChacoSetEigenNumber(MatPartitioning, PetscInt);
1410 
1411 #define MP_PARTY_OPT "opt"
1412 #define MP_PARTY_LIN "lin"
1413 #define MP_PARTY_SCA "sca"
1414 #define MP_PARTY_RAN "ran"
1415 #define MP_PARTY_GBF "gbf"
1416 #define MP_PARTY_GCF "gcf"
1417 #define MP_PARTY_BUB "bub"
1418 #define MP_PARTY_DEF "def"
1419 extern PetscErrorCode  MatPartitioningPartySetGlobal(MatPartitioning, const char*);
1420 #define MP_PARTY_HELPFUL_SETS "hs"
1421 #define MP_PARTY_KERNIGHAN_LIN "kl"
1422 #define MP_PARTY_NONE "no"
1423 extern PetscErrorCode  MatPartitioningPartySetLocal(MatPartitioning, const char*);
1424 extern PetscErrorCode  MatPartitioningPartySetCoarseLevel(MatPartitioning,PetscReal);
1425 extern PetscErrorCode  MatPartitioningPartySetBipart(MatPartitioning,PetscBool );
1426 extern PetscErrorCode  MatPartitioningPartySetMatchOptimization(MatPartitioning,PetscBool );
1427 
1428 typedef enum { MP_SCOTCH_GREEDY, MP_SCOTCH_GPS, MP_SCOTCH_GR_GPS } MPScotchGlobalType;
1429 extern PetscErrorCode  MatPartitioningScotchSetArch(MatPartitioning,const char*);
1430 extern PetscErrorCode  MatPartitioningScotchSetMultilevel(MatPartitioning);
1431 extern PetscErrorCode  MatPartitioningScotchSetGlobal(MatPartitioning,MPScotchGlobalType);
1432 extern PetscErrorCode  MatPartitioningScotchSetCoarseLevel(MatPartitioning,PetscReal);
1433 extern PetscErrorCode  MatPartitioningScotchSetHostList(MatPartitioning,const char*);
1434 typedef enum { MP_SCOTCH_KERNIGHAN_LIN, MP_SCOTCH_NONE } MPScotchLocalType;
1435 extern PetscErrorCode  MatPartitioningScotchSetLocal(MatPartitioning,MPScotchLocalType);
1436 extern PetscErrorCode  MatPartitioningScotchSetMapping(MatPartitioning);
1437 extern PetscErrorCode  MatPartitioningScotchSetStrategy(MatPartitioning,char*);
1438 
1439 extern PetscErrorCode MatMeshToVertexGraph(Mat,PetscInt,Mat*);
1440 extern PetscErrorCode MatMeshToCellGraph(Mat,PetscInt,Mat*);
1441 
1442 /*
1443     If you add entries here you must also add them to finclude/petscmat.h
1444 */
1445 typedef enum { MATOP_SET_VALUES=0,
1446                MATOP_GET_ROW=1,
1447                MATOP_RESTORE_ROW=2,
1448                MATOP_MULT=3,
1449                MATOP_MULT_ADD=4,
1450                MATOP_MULT_TRANSPOSE=5,
1451                MATOP_MULT_TRANSPOSE_ADD=6,
1452                MATOP_SOLVE=7,
1453                MATOP_SOLVE_ADD=8,
1454                MATOP_SOLVE_TRANSPOSE=9,
1455                MATOP_SOLVE_TRANSPOSE_ADD=10,
1456                MATOP_LUFACTOR=11,
1457                MATOP_CHOLESKYFACTOR=12,
1458                MATOP_SOR=13,
1459                MATOP_TRANSPOSE=14,
1460                MATOP_GETINFO=15,
1461                MATOP_EQUAL=16,
1462                MATOP_GET_DIAGONAL=17,
1463                MATOP_DIAGONAL_SCALE=18,
1464                MATOP_NORM=19,
1465                MATOP_ASSEMBLY_BEGIN=20,
1466                MATOP_ASSEMBLY_END=21,
1467                MATOP_SET_OPTION=22,
1468                MATOP_ZERO_ENTRIES=23,
1469                MATOP_ZERO_ROWS=24,
1470                MATOP_LUFACTOR_SYMBOLIC=25,
1471                MATOP_LUFACTOR_NUMERIC=26,
1472                MATOP_CHOLESKY_FACTOR_SYMBOLIC=27,
1473                MATOP_CHOLESKY_FACTOR_NUMERIC=28,
1474                MATOP_SETUP_PREALLOCATION=29,
1475                MATOP_ILUFACTOR_SYMBOLIC=30,
1476                MATOP_ICCFACTOR_SYMBOLIC=31,
1477                MATOP_GET_ARRAY=32,
1478                MATOP_RESTORE_ARRAY=33,
1479                MATOP_DUPLICATE=34,
1480                MATOP_FORWARD_SOLVE=35,
1481                MATOP_BACKWARD_SOLVE=36,
1482                MATOP_ILUFACTOR=37,
1483                MATOP_ICCFACTOR=38,
1484                MATOP_AXPY=39,
1485                MATOP_GET_SUBMATRICES=40,
1486                MATOP_INCREASE_OVERLAP=41,
1487                MATOP_GET_VALUES=42,
1488                MATOP_COPY=43,
1489                MATOP_GET_ROW_MAX=44,
1490                MATOP_SCALE=45,
1491                MATOP_SHIFT=46,
1492                MATOP_DIAGONAL_SET=47,
1493                MATOP_ILUDT_FACTOR=48,
1494                MATOP_SET_BLOCK_SIZE=49,
1495                MATOP_GET_ROW_IJ=50,
1496                MATOP_RESTORE_ROW_IJ=51,
1497                MATOP_GET_COLUMN_IJ=52,
1498                MATOP_RESTORE_COLUMN_IJ=53,
1499                MATOP_FDCOLORING_CREATE=54,
1500                MATOP_COLORING_PATCH=55,
1501                MATOP_SET_UNFACTORED=56,
1502                MATOP_PERMUTE=57,
1503                MATOP_SET_VALUES_BLOCKED=58,
1504                MATOP_GET_SUBMATRIX=59,
1505                MATOP_DESTROY=60,
1506                MATOP_VIEW=61,
1507                MATOP_CONVERT_FROM=62,
1508                MATOP_USE_SCALED_FORM=63,
1509                MATOP_SCALE_SYSTEM=64,
1510                MATOP_UNSCALE_SYSTEM=65,
1511                MATOP_SET_LOCAL_TO_GLOBAL_MAP=66,
1512                MATOP_SET_VALUES_LOCAL=67,
1513                MATOP_ZERO_ROWS_LOCAL=68,
1514                MATOP_GET_ROW_MAX_ABS=69,
1515                MATOP_GET_ROW_MIN_ABS=70,
1516                MATOP_CONVERT=71,
1517                MATOP_SET_COLORING=72,
1518                MATOP_SET_VALUES_ADIC=73,
1519                MATOP_SET_VALUES_ADIFOR=74,
1520                MATOP_FD_COLORING_APPLY=75,
1521                MATOP_SET_FROM_OPTIONS=76,
1522                MATOP_MULT_CON=77,
1523                MATOP_MULT_TRANSPOSE_CON=78,
1524                MATOP_PERMUTE_SPARSIFY=79,
1525                MATOP_MULT_MULTIPLE=80,
1526                MATOP_SOLVE_MULTIPLE=81,
1527                MATOP_GET_INERTIA=82,
1528                MATOP_LOAD=83,
1529                MATOP_IS_SYMMETRIC=84,
1530                MATOP_IS_HERMITIAN=85,
1531                MATOP_IS_STRUCTURALLY_SYMMETRIC=86,
1532                MATOP_DUMMY=87,
1533                MATOP_GET_VECS=88,
1534                MATOP_MAT_MULT=89,
1535                MATOP_MAT_MULT_SYMBOLIC=90,
1536                MATOP_MAT_MULT_NUMERIC=91,
1537                MATOP_PTAP=92,
1538                MATOP_PTAP_SYMBOLIC=93,
1539                MATOP_PTAP_NUMERIC=94,
1540                MATOP_MAT_MULTTRANSPOSE=95,
1541                MATOP_MAT_MULTTRANSPOSE_SYM=96,
1542                MATOP_MAT_MULTTRANSPOSE_NUM=97,
1543                MATOP_PTAP_SYMBOLIC_SEQAIJ=98,
1544                MATOP_PTAP_NUMERIC_SEQAIJ=99,
1545                MATOP_PTAP_SYMBOLIC_MPIAIJ=100,
1546                MATOP_PTAP_NUMERIC_MPIAIJ=101,
1547                MATOP_CONJUGATE=102,
1548                MATOP_SET_SIZES=103,
1549                MATOP_SET_VALUES_ROW=104,
1550                MATOP_REAL_PART=105,
1551                MATOP_IMAG_PART=106,
1552                MATOP_GET_ROW_UTRIANGULAR=107,
1553                MATOP_RESTORE_ROW_UTRIANGULAR=108,
1554                MATOP_MATSOLVE=109,
1555                MATOP_GET_REDUNDANTMATRIX=110,
1556                MATOP_GET_ROW_MIN=111,
1557                MATOP_GET_COLUMN_VEC=112,
1558                MATOP_MISSING_DIAGONAL=113,
1559                MATOP_MATGETSEQNONZEROSTRUCTURE=114,
1560                MATOP_CREATE=115,
1561                MATOP_GET_GHOSTS=116,
1562                MATOP_GET_LOCALSUBMATRIX=117,
1563                MATOP_RESTORE_LOCALSUBMATRIX=118,
1564                MATOP_MULT_DIAGONAL_BLOCK=119,
1565                MATOP_HERMITIANTRANSPOSE=120,
1566                MATOP_MULTHERMITIANTRANSPOSE=121,
1567                MATOP_MULTHERMITIANTRANSPOSEADD=122,
1568                MATOP_GETMULTIPROCBLOCK=123,
1569 	       MATOP_GET_SUBMATRICES_PARALLEL=128
1570              } MatOperation;
1571 extern PetscErrorCode  MatHasOperation(Mat,MatOperation,PetscBool *);
1572 extern PetscErrorCode  MatShellSetOperation(Mat,MatOperation,void(*)(void));
1573 extern PetscErrorCode  MatShellGetOperation(Mat,MatOperation,void(**)(void));
1574 extern PetscErrorCode  MatShellSetContext(Mat,void*);
1575 
1576 /*
1577    Codes for matrices stored on disk. By default they are
1578    stored in a universal format. By changing the format with
1579    PetscViewerSetFormat(viewer,PETSC_VIEWER_NATIVE); the matrices will
1580    be stored in a way natural for the matrix, for example dense matrices
1581    would be stored as dense. Matrices stored this way may only be
1582    read into matrices of the same type.
1583 */
1584 #define MATRIX_BINARY_FORMAT_DENSE -1
1585 
1586 extern PetscErrorCode  MatMPIBAIJSetHashTableFactor(Mat,PetscReal);
1587 extern PetscErrorCode  MatISGetLocalMat(Mat,Mat*);
1588 
1589 /*S
1590      MatNullSpace - Object that removes a null space from a vector, i.e.
1591          orthogonalizes the vector to a subsapce
1592 
1593    Level: advanced
1594 
1595   Concepts: matrix; linear operator, null space
1596 
1597   Users manual sections:
1598 .   sec_singular
1599 
1600 .seealso:  MatNullSpaceCreate()
1601 S*/
1602 typedef struct _p_MatNullSpace* MatNullSpace;
1603 
1604 extern PetscErrorCode  MatNullSpaceCreate(MPI_Comm,PetscBool ,PetscInt,const Vec[],MatNullSpace*);
1605 extern PetscErrorCode  MatNullSpaceSetFunction(MatNullSpace,PetscErrorCode (*)(MatNullSpace,Vec,void*),void*);
1606 extern PetscErrorCode  MatNullSpaceDestroy(MatNullSpace*);
1607 extern PetscErrorCode  MatNullSpaceRemove(MatNullSpace,Vec,Vec*);
1608 extern PetscErrorCode  MatNullSpaceAttach(Mat,MatNullSpace);
1609 extern PetscErrorCode  MatNullSpaceTest(MatNullSpace,Mat,PetscBool  *);
1610 extern PetscErrorCode  MatNullSpaceView(MatNullSpace,PetscViewer);
1611 
1612 extern PetscErrorCode  MatReorderingSeqSBAIJ(Mat,IS);
1613 extern PetscErrorCode  MatMPISBAIJSetHashTableFactor(Mat,PetscReal);
1614 extern PetscErrorCode  MatSeqSBAIJSetColumnIndices(Mat,PetscInt *);
1615 extern PetscErrorCode  MatSeqBAIJInvertBlockDiagonal(Mat);
1616 
1617 extern PetscErrorCode  MatCreateMAIJ(Mat,PetscInt,Mat*);
1618 extern PetscErrorCode  MatMAIJRedimension(Mat,PetscInt,Mat*);
1619 extern PetscErrorCode  MatMAIJGetAIJ(Mat,Mat*);
1620 
1621 extern PetscErrorCode  MatComputeExplicitOperator(Mat,Mat*);
1622 
1623 extern PetscErrorCode  MatDiagonalScaleLocal(Mat,Vec);
1624 
1625 extern PetscErrorCode  MatCreateMFFD(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,Mat*);
1626 extern PetscErrorCode  MatMFFDSetBase(Mat,Vec,Vec);
1627 extern PetscErrorCode  MatMFFDSetFunction(Mat,PetscErrorCode(*)(void*,Vec,Vec),void*);
1628 extern PetscErrorCode  MatMFFDSetFunctioni(Mat,PetscErrorCode (*)(void*,PetscInt,Vec,PetscScalar*));
1629 extern PetscErrorCode  MatMFFDSetFunctioniBase(Mat,PetscErrorCode (*)(void*,Vec));
1630 extern PetscErrorCode  MatMFFDAddNullSpace(Mat,MatNullSpace);
1631 extern PetscErrorCode  MatMFFDSetHHistory(Mat,PetscScalar[],PetscInt);
1632 extern PetscErrorCode  MatMFFDResetHHistory(Mat);
1633 extern PetscErrorCode  MatMFFDSetFunctionError(Mat,PetscReal);
1634 extern PetscErrorCode  MatMFFDSetPeriod(Mat,PetscInt);
1635 extern PetscErrorCode  MatMFFDGetH(Mat,PetscScalar *);
1636 extern PetscErrorCode  MatMFFDSetOptionsPrefix(Mat,const char[]);
1637 extern PetscErrorCode  MatMFFDCheckPositivity(void*,Vec,Vec,PetscScalar*);
1638 extern PetscErrorCode  MatMFFDSetCheckh(Mat,PetscErrorCode (*)(void*,Vec,Vec,PetscScalar*),void*);
1639 
1640 /*S
1641     MatMFFD - A data structured used to manage the computation of the h differencing parameter for matrix-free
1642               Jacobian vector products
1643 
1644     Notes: MATMFFD is a specific MatType which uses the MatMFFD data structure
1645 
1646            MatMFFD*() methods actually take the Mat as their first argument. Not a MatMFFD data structure
1647 
1648     Level: developer
1649 
1650 .seealso: MATMFFD, MatCreateMFFD(), MatMFFDSetFuction(), MatMFFDSetType(), MatMFFDRegister()
1651 S*/
1652 typedef struct _p_MatMFFD* MatMFFD;
1653 
1654 /*E
1655     MatMFFDType - algorithm used to compute the h used in computing matrix-vector products via differencing of the function
1656 
1657    Level: beginner
1658 
1659 .seealso: MatMFFDSetType(), MatMFFDRegister()
1660 E*/
1661 #define MatMFFDType char*
1662 #define MATMFFD_DS  "ds"
1663 #define MATMFFD_WP  "wp"
1664 
1665 extern PetscErrorCode  MatMFFDSetType(Mat,const MatMFFDType);
1666 extern PetscErrorCode  MatMFFDRegister(const char[],const char[],const char[],PetscErrorCode (*)(MatMFFD));
1667 
1668 /*MC
1669    MatMFFDRegisterDynamic - Adds a method to the MatMFFD registry.
1670 
1671    Synopsis:
1672    PetscErrorCode MatMFFDRegisterDynamic(const char *name_solver,const char *path,const char *name_create,PetscErrorCode (*routine_create)(MatMFFD))
1673 
1674    Not Collective
1675 
1676    Input Parameters:
1677 +  name_solver - name of a new user-defined compute-h module
1678 .  path - path (either absolute or relative) the library containing this solver
1679 .  name_create - name of routine to create method context
1680 -  routine_create - routine to create method context
1681 
1682    Level: developer
1683 
1684    Notes:
1685    MatMFFDRegisterDynamic() may be called multiple times to add several user-defined solvers.
1686 
1687    If dynamic libraries are used, then the fourth input argument (routine_create)
1688    is ignored.
1689 
1690    Sample usage:
1691 .vb
1692    MatMFFDRegisterDynamic("my_h",/home/username/my_lib/lib/libO/solaris/mylib.a,
1693                "MyHCreate",MyHCreate);
1694 .ve
1695 
1696    Then, your solver can be chosen with the procedural interface via
1697 $     MatMFFDSetType(mfctx,"my_h")
1698    or at runtime via the option
1699 $     -snes_mf_type my_h
1700 
1701 .keywords: MatMFFD, register
1702 
1703 .seealso: MatMFFDRegisterAll(), MatMFFDRegisterDestroy()
1704 M*/
1705 #if defined(PETSC_USE_DYNAMIC_LIBRARIES)
1706 #define MatMFFDRegisterDynamic(a,b,c,d) MatMFFDRegister(a,b,c,0)
1707 #else
1708 #define MatMFFDRegisterDynamic(a,b,c,d) MatMFFDRegister(a,b,c,d)
1709 #endif
1710 
1711 extern PetscErrorCode  MatMFFDRegisterAll(const char[]);
1712 extern PetscErrorCode  MatMFFDRegisterDestroy(void);
1713 extern PetscErrorCode  MatMFFDDSSetUmin(Mat,PetscReal);
1714 extern PetscErrorCode  MatMFFDWPSetComputeNormU(Mat,PetscBool );
1715 
1716 
1717 extern PetscErrorCode  PetscViewerMathematicaPutMatrix(PetscViewer, PetscInt, PetscInt, PetscReal *);
1718 extern PetscErrorCode  PetscViewerMathematicaPutCSRMatrix(PetscViewer, PetscInt, PetscInt, PetscInt *, PetscInt *, PetscReal *);
1719 
1720 /*
1721    PETSc interface to MUMPS
1722 */
1723 #ifdef PETSC_HAVE_MUMPS
1724 extern PetscErrorCode  MatMumpsSetIcntl(Mat,PetscInt,PetscInt);
1725 #endif
1726 
1727 /*
1728    PETSc interface to SUPERLU
1729 */
1730 #ifdef PETSC_HAVE_SUPERLU
1731 extern PetscErrorCode  MatSuperluSetILUDropTol(Mat,PetscReal);
1732 #endif
1733 
1734 extern PetscErrorCode MatCreateNest(MPI_Comm,PetscInt,const IS[],PetscInt,const IS[],const Mat[],Mat*);
1735 extern PetscErrorCode MatNestGetSize(Mat,PetscInt*,PetscInt*);
1736 extern PetscErrorCode MatNestGetSubMats(Mat,PetscInt*,PetscInt*,Mat***);
1737 extern PetscErrorCode MatNestGetSubMat(Mat,PetscInt,PetscInt,Mat*);
1738 extern PetscErrorCode MatNestSetVecType(Mat,const VecType);
1739 extern PetscErrorCode MatNestSetSubMats(Mat,PetscInt,const IS[],PetscInt,const IS[],const Mat[]);
1740 
1741 PETSC_EXTERN_CXX_END
1742 #endif
1743