xref: /petsc/include/petscmat.h (revision 275089eca2c8bd5bfe5c75ceb215ae024be1bedc)
1 /*
2      Include file for the matrix component of PETSc
3 */
4 #ifndef __PETSCMAT_H
5 #define __PETSCMAT_H
6 #include "petscvec.h"
7 PETSC_EXTERN_CXX_BEGIN
8 
9 /*S
10      Mat - Abstract PETSc matrix object
11 
12    Level: beginner
13 
14   Concepts: matrix; linear operator
15 
16 .seealso:  MatCreate(), MatType, MatSetType()
17 S*/
18 typedef struct _p_Mat*           Mat;
19 
20 /*E
21     MatType - String with the name of a PETSc matrix or the creation function
22        with an optional dynamic library name, for example
23        http://www.mcs.anl.gov/petsc/lib.a:mymatcreate()
24 
25    Level: beginner
26 
27 .seealso: MatSetType(), Mat, MatSolverPackage
28 E*/
29 #define MatType char*
30 #define MATSAME            "same"
31 #define MATMAIJ            "maij"
32 #define MATSEQMAIJ           "seqmaij"
33 #define MATMPIMAIJ           "mpimaij"
34 #define MATIS              "is"
35 #define MATAIJ             "aij"
36 #define MATSEQAIJ            "seqaij"
37 #define MATMPIAIJ            "mpiaij"
38 #define MATAIJCRL              "aijcrl"
39 #define MATSEQAIJCRL             "seqaijcrl"
40 #define MATMPIAIJCRL             "mpiaijcrl"
41 #define MATAIJCUDA             "aijcuda"
42 #define MATSEQAIJCUDA            "seqaijcuda"
43 #define MATMPIAIJCUDA            "mpiaijcuda"
44 #define MATAIJPERM             "aijperm"
45 #define MATSEQAIJPERM            "seqaijperm"
46 #define MATMPIAIJPERM            "mpiaijperm"
47 #define MATSHELL           "shell"
48 #define MATDENSE           "dense"
49 #define MATSEQDENSE          "seqdense"
50 #define MATMPIDENSE          "mpidense"
51 #define MATBAIJ            "baij"
52 #define MATSEQBAIJ           "seqbaij"
53 #define MATMPIBAIJ           "mpibaij"
54 #define MATMPIADJ          "mpiadj"
55 #define MATSBAIJ           "sbaij"
56 #define MATSEQSBAIJ          "seqsbaij"
57 #define MATMPISBAIJ          "mpisbaij"
58 #define MATDAAD            "daad"
59 #define MATMFFD            "mffd"
60 #define MATNORMAL          "normal"
61 #define MATLRC             "lrc"
62 #define MATSCATTER         "scatter"
63 #define MATBLOCKMAT        "blockmat"
64 #define MATCOMPOSITE       "composite"
65 #define MATSEQFFTW         "seqfftw"
66 #define MATSEQCUFFT        "seqcufft"
67 #define MATTRANSPOSEMAT    "transpose"
68 #define MATSCHURCOMPLEMENT "schurcomplement"
69 #define MATPYTHON          "python"
70 #define MATHYPRESTRUCT     "hyprestruct"
71 #define MATHYPRESSTRUCT    "hypresstruct"
72 #define MATSUBMATRIX       "submatrix"
73 #define MATDD              "matdd"
74 #define MATIM              "matim"
75 #define MATLOCALREF        "localref"
76 #define MATNEST            "nest"
77 
78 /*E
79     MatSolverPackage - String with the name of a PETSc matrix solver type.
80 
81     For example: "petsc" indicates what PETSc provides, "superlu" indicates either
82        SuperLU or SuperLU_Dist etc.
83 
84 
85    Level: beginner
86 
87 .seealso: MatGetFactor(), Mat, MatSetType(), MatType
88 E*/
89 #define MatSolverPackage char*
90 #define MATSOLVERSPOOLES      "spooles"
91 #define MATSOLVERSUPERLU      "superlu"
92 #define MATSOLVERSUPERLU_DIST "superlu_dist"
93 #define MATSOLVERUMFPACK      "umfpack"
94 #define MATSOLVERCHOLMOD      "cholmod"
95 #define MATSOLVERESSL         "essl"
96 #define MATSOLVERLUSOL        "lusol"
97 #define MATSOLVERMUMPS        "mumps"
98 #define MATSOLVERPASTIX       "pastix"
99 #define MATSOLVERDSCPACK      "dscpack"
100 #define MATSOLVERMATLAB       "matlab"
101 #define MATSOLVERPETSC        "petsc"
102 #define MATSOLVERPLAPACK      "plapack"
103 #define MATSOLVERBAS          "bas"
104 
105 /*E
106     MatFactorType - indicates what type of factorization is requested
107 
108     Level: beginner
109 
110    Any additions/changes here MUST also be made in include/finclude/petscmat.h
111 
112 .seealso: MatSolverPackage, MatGetFactor()
113 E*/
114 typedef enum {MAT_FACTOR_NONE, MAT_FACTOR_LU, MAT_FACTOR_CHOLESKY, MAT_FACTOR_ILU, MAT_FACTOR_ICC,MAT_FACTOR_ILUDT} MatFactorType;
115 extern const char *const MatFactorTypes[];
116 
117 extern PetscErrorCode  MatGetFactor(Mat,const MatSolverPackage,MatFactorType,Mat*);
118 extern PetscErrorCode  MatGetFactorAvailable(Mat,const MatSolverPackage,MatFactorType,PetscBool *);
119 extern PetscErrorCode  MatFactorGetSolverPackage(Mat,const MatSolverPackage*);
120 extern PetscErrorCode  MatGetFactorType(Mat,MatFactorType*);
121 
122 /* Logging support */
123 #define    MAT_FILE_CLASSID 1211216    /* used to indicate matrices in binary files */
124 extern PetscClassId  MAT_CLASSID;
125 extern PetscClassId  MAT_FDCOLORING_CLASSID;
126 extern PetscClassId  MAT_PARTITIONING_CLASSID;
127 extern PetscClassId  MAT_NULLSPACE_CLASSID;
128 extern PetscClassId  MATMFFD_CLASSID;
129 
130 /*E
131     MatReuse - Indicates if matrices obtained from a previous call to MatGetSubMatrices()
132      or MatGetSubMatrix() are to be reused to store the new matrix values. For MatConvert() is used to indicate
133      that the input matrix is to be replaced with the converted matrix.
134 
135     Level: beginner
136 
137    Any additions/changes here MUST also be made in include/finclude/petscmat.h
138 
139 .seealso: MatGetSubMatrices(), MatGetSubMatrix(), MatDestroyMatrices(), MatConvert()
140 E*/
141 typedef enum {MAT_INITIAL_MATRIX,MAT_REUSE_MATRIX,MAT_IGNORE_MATRIX} MatReuse;
142 
143 /*E
144     MatGetSubMatrixOption - Indicates if matrices obtained from a call to MatGetSubMatrices()
145      include the matrix values. Currently it is only used by MatGetSeqNonzerostructure().
146 
147     Level: beginner
148 
149 .seealso: MatGetSeqNonzerostructure()
150 E*/
151 typedef enum {MAT_DO_NOT_GET_VALUES,MAT_GET_VALUES} MatGetSubMatrixOption;
152 
153 extern PetscErrorCode  MatInitializePackage(const char[]);
154 
155 extern PetscErrorCode  MatCreate(MPI_Comm,Mat*);
156 PetscPolymorphicFunction(MatCreate,(MPI_Comm comm),(comm,&A),Mat,A)
157 PetscPolymorphicFunction(MatCreate,(),(PETSC_COMM_WORLD,&A),Mat,A)
158 extern PetscErrorCode  MatSetSizes(Mat,PetscInt,PetscInt,PetscInt,PetscInt);
159 extern PetscErrorCode  MatSetType(Mat,const MatType);
160 extern PetscErrorCode  MatSetFromOptions(Mat);
161 extern PetscErrorCode  MatSetUpPreallocation(Mat);
162 extern PetscErrorCode  MatRegisterAll(const char[]);
163 extern PetscErrorCode  MatRegister(const char[],const char[],const char[],PetscErrorCode(*)(Mat));
164 
165 extern PetscErrorCode  MatSetOptionsPrefix(Mat,const char[]);
166 extern PetscErrorCode  MatAppendOptionsPrefix(Mat,const char[]);
167 extern PetscErrorCode  MatGetOptionsPrefix(Mat,const char*[]);
168 
169 /*MC
170    MatRegisterDynamic - Adds a new matrix type
171 
172    Synopsis:
173    PetscErrorCode MatRegisterDynamic(const char *name,const char *path,const char *name_create,PetscErrorCode (*routine_create)(Mat))
174 
175    Not Collective
176 
177    Input Parameters:
178 +  name - name of a new user-defined matrix type
179 .  path - path (either absolute or relative) the library containing this solver
180 .  name_create - name of routine to create method context
181 -  routine_create - routine to create method context
182 
183    Notes:
184    MatRegisterDynamic() may be called multiple times to add several user-defined solvers.
185 
186    If dynamic libraries are used, then the fourth input argument (routine_create)
187    is ignored.
188 
189    Sample usage:
190 .vb
191    MatRegisterDynamic("my_mat",/home/username/my_lib/lib/libO/solaris/mylib.a,
192                "MyMatCreate",MyMatCreate);
193 .ve
194 
195    Then, your solver can be chosen with the procedural interface via
196 $     MatSetType(Mat,"my_mat")
197    or at runtime via the option
198 $     -mat_type my_mat
199 
200    Level: advanced
201 
202    Notes: ${PETSC_ARCH} occuring in pathname will be replaced with appropriate values.
203          If your function is not being put into a shared library then use VecRegister() instead
204 
205 .keywords: Mat, register
206 
207 .seealso: MatRegisterAll(), MatRegisterDestroy()
208 
209 M*/
210 #if defined(PETSC_USE_DYNAMIC_LIBRARIES)
211 #define MatRegisterDynamic(a,b,c,d) MatRegister(a,b,c,0)
212 #else
213 #define MatRegisterDynamic(a,b,c,d) MatRegister(a,b,c,d)
214 #endif
215 
216 extern PetscBool  MatRegisterAllCalled;
217 extern PetscFList MatList;
218 extern PetscFList MatColoringList;
219 extern PetscFList MatPartitioningList;
220 
221 /*E
222     MatStructure - Indicates if the matrix has the same nonzero structure
223 
224     Level: beginner
225 
226    Any additions/changes here MUST also be made in include/finclude/petscmat.h
227 
228 .seealso: MatCopy(), KSPSetOperators(), PCSetOperators()
229 E*/
230 typedef enum {SAME_NONZERO_PATTERN,DIFFERENT_NONZERO_PATTERN,SAME_PRECONDITIONER,SUBSET_NONZERO_PATTERN} MatStructure;
231 
232 extern PetscErrorCode  MatCreateSeqDense(MPI_Comm,PetscInt,PetscInt,PetscScalar[],Mat*);
233 extern PetscErrorCode  MatCreateMPIDense(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,PetscScalar[],Mat*);
234 extern PetscErrorCode  MatCreateSeqAIJ(MPI_Comm,PetscInt,PetscInt,PetscInt,const PetscInt[],Mat*);
235 PetscPolymorphicFunction(MatCreateSeqAIJ,(PetscInt m,PetscInt n,PetscInt nz,const PetscInt nnz[]),(PETSC_COMM_SELF,m,n,nz,nnz,&A),Mat,A)
236 PetscPolymorphicFunction(MatCreateSeqAIJ,(PetscInt m,PetscInt n,PetscInt nz),(PETSC_COMM_SELF,m,n,nz,PETSC_NULL,&A),Mat,A)
237 PetscPolymorphicFunction(MatCreateSeqAIJ,(PetscInt m,PetscInt n,const PetscInt nnz[]),(PETSC_COMM_SELF,m,n,0,nnz,&A),Mat,A)
238 PetscPolymorphicFunction(MatCreateSeqAIJ,(PetscInt m,PetscInt n),(PETSC_COMM_SELF,m,n,0,PETSC_NULL,&A),Mat,A)
239 PetscPolymorphicSubroutine(MatCreateSeqAIJ,(PetscInt m,PetscInt n,PetscInt nz,Mat *A),(PETSC_COMM_SELF,m,n,nz,PETSC_NULL,A))
240 PetscPolymorphicSubroutine(MatCreateSeqAIJ,(PetscInt m,PetscInt n,const PetscInt nnz[],Mat *A),(PETSC_COMM_SELF,m,n,0,nnz,A))
241 PetscPolymorphicSubroutine(MatCreateSeqAIJ,(PetscInt m,PetscInt n,Mat *A),(PETSC_COMM_SELF,m,n,0,PETSC_NULL,A))
242 extern PetscErrorCode  MatCreateMPIAIJ(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt,const PetscInt[],PetscInt,const PetscInt[],Mat*);
243 PetscPolymorphicFunction(MatCreateMPIAIJ,(MPI_Comm comm,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,const PetscInt nnz[],PetscInt onz,const PetscInt onnz[]),(comm,m,n,M,N,nz,nnz,onz,onnz,&A),Mat,A)
244 PetscPolymorphicFunction(MatCreateMPIAIJ,(MPI_Comm comm,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,PetscInt nnz),(comm,m,n,M,N,nz,PETSC_NULL,nnz,PETSC_NULL,&A),Mat,A)
245 PetscPolymorphicFunction(MatCreateMPIAIJ,(MPI_Comm comm,PetscInt m,PetscInt n,PetscInt M,PetscInt N,const PetscInt nnz[],const PetscInt onz[]),(comm,m,n,M,N,0,nnz,0,onz,&A),Mat,A)
246 PetscPolymorphicFunction(MatCreateMPIAIJ,(MPI_Comm comm,PetscInt m,PetscInt n,PetscInt M,PetscInt N),(comm,m,n,M,N,0,PETSC_NULL,0,PETSC_NULL,&A),Mat,A)
247 PetscPolymorphicSubroutine(MatCreateMPIAIJ,(MPI_Comm comm,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,PetscInt nnz,Mat *A),(comm,m,n,M,N,nz,PETSC_NULL,nnz,PETSC_NULL,A))
248 PetscPolymorphicSubroutine(MatCreateMPIAIJ,(MPI_Comm comm,PetscInt m,PetscInt n,PetscInt M,PetscInt N,const PetscInt nnz[],const PetscInt onz[],Mat *A),(comm,m,n,M,N,0,nnz,0,onz,A))
249 PetscPolymorphicSubroutine(MatCreateMPIAIJ,(MPI_Comm comm,PetscInt m,PetscInt n,PetscInt M,PetscInt N,Mat *A),(comm,m,n,M,N,0,PETSC_NULL,0,PETSC_NULL,A))
250 PetscPolymorphicFunction(MatCreateMPIAIJ,(PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,const PetscInt nnz[],PetscInt onz,const PetscInt onnz[]),(PETSC_COMM_WORLD,m,n,M,N,nz,nnz,onz,onnz,&A),Mat,A)
251 PetscPolymorphicFunction(MatCreateMPIAIJ,(PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,PetscInt nnz),(PETSC_COMM_WORLD,m,n,M,N,nz,PETSC_NULL,nnz,PETSC_NULL,&A),Mat,A)
252 PetscPolymorphicFunction(MatCreateMPIAIJ,(PetscInt m,PetscInt n,PetscInt M,PetscInt N,const PetscInt nnz[],const PetscInt onz[]),(PETSC_COMM_WORLD,m,n,M,N,0,nnz,0,onz,&A),Mat,A)
253 PetscPolymorphicFunction(MatCreateMPIAIJ,(PetscInt m,PetscInt n,PetscInt M,PetscInt N),(PETSC_COMM_WORLD,m,n,M,N,0,PETSC_NULL,0,PETSC_NULL,&A),Mat,A)
254 PetscPolymorphicSubroutine(MatCreateMPIAIJ,(PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,PetscInt nnz,Mat *A),(PETSC_COMM_WORLD,m,n,M,N,nz,PETSC_NULL,nnz,PETSC_NULL,A))
255 PetscPolymorphicSubroutine(MatCreateMPIAIJ,(PetscInt m,PetscInt n,PetscInt M,PetscInt N,const PetscInt nnz[],const PetscInt onz[],Mat *A),(PETSC_COMM_WORLD,m,n,M,N,0,nnz,0,onz,A))
256 PetscPolymorphicSubroutine(MatCreateMPIAIJ,(PetscInt m,PetscInt n,PetscInt M,PetscInt N,Mat *A),(PETSC_COMM_WORLD,m,n,M,N,0,PETSC_NULL,0,PETSC_NULL,A))
257 extern PetscErrorCode  MatCreateMPIAIJWithArrays(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,const PetscInt[],const PetscInt[],const PetscScalar[],Mat *);
258 extern PetscErrorCode  MatCreateMPIAIJWithSplitArrays(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt[],PetscInt[],PetscScalar[],PetscInt[],PetscInt[],PetscScalar[],Mat*);
259 
260 extern PetscErrorCode  MatCreateSeqBAIJ(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,const PetscInt[],Mat*);
261 PetscPolymorphicFunction(MatCreateSeqBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt nz,const PetscInt nnz[]),(PETSC_COMM_SELF,bs,m,n,nz,nnz,&A),Mat,A)
262 PetscPolymorphicFunction(MatCreateSeqBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt nz),(PETSC_COMM_SELF,bs,m,n,nz,PETSC_NULL,&A),Mat,A)
263 PetscPolymorphicFunction(MatCreateSeqBAIJ,(PetscInt bs,PetscInt m,PetscInt n,const PetscInt nnz[]),(PETSC_COMM_SELF,bs,m,n,0,nnz,&A),Mat,A)
264 PetscPolymorphicFunction(MatCreateSeqBAIJ,(PetscInt bs,PetscInt m,PetscInt n),(PETSC_COMM_SELF,bs,m,n,0,PETSC_NULL,&A),Mat,A)
265 PetscPolymorphicSubroutine(MatCreateSeqBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt nz,Mat *A),(PETSC_COMM_SELF,bs,m,n,nz,PETSC_NULL,A))
266 PetscPolymorphicSubroutine(MatCreateSeqBAIJ,(PetscInt bs,PetscInt m,PetscInt n,const PetscInt nnz[],Mat *A),(PETSC_COMM_SELF,bs,m,n,0,nnz,A))
267 PetscPolymorphicSubroutine(MatCreateSeqBAIJ,(PetscInt bs,PetscInt m,PetscInt n,Mat *A),(PETSC_COMM_SELF,bs,m,n,0,PETSC_NULL,A))
268 extern PetscErrorCode  MatCreateMPIBAIJ(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt,const PetscInt[],PetscInt,const PetscInt[],Mat*);
269 PetscPolymorphicFunction(MatCreateMPIBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,const PetscInt nnz[],PetscInt onz,const PetscInt onnz[]),(comm,bs,m,n,M,N,nz,nnz,onz,onnz,&A),Mat,A)
270 PetscPolymorphicFunction(MatCreateMPIBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,PetscInt nnz),(comm,bs,m,n,M,N,nz,PETSC_NULL,nnz,PETSC_NULL,&A),Mat,A)
271 PetscPolymorphicFunction(MatCreateMPIBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,const PetscInt nnz[],const PetscInt onz[]),(comm,bs,m,n,M,N,0,nnz,0,onz,&A),Mat,A)
272 PetscPolymorphicFunction(MatCreateMPIBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N),(comm,bs,m,n,M,N,0,PETSC_NULL,0,PETSC_NULL,&A),Mat,A)
273 PetscPolymorphicSubroutine(MatCreateMPIBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,PetscInt nnz,Mat *A),(comm,bs,m,n,M,N,nz,PETSC_NULL,nnz,PETSC_NULL,A))
274 PetscPolymorphicSubroutine(MatCreateMPIBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,const PetscInt nnz[],const PetscInt onz[],Mat *A),(comm,bs,m,n,M,N,0,nnz,0,onz,A))
275 PetscPolymorphicSubroutine(MatCreateMPIBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,Mat *A),(comm,bs,m,n,M,N,0,PETSC_NULL,0,PETSC_NULL,A))
276 PetscPolymorphicFunction(MatCreateMPIBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,const PetscInt nnz[],PetscInt onz,const PetscInt onnz[]),(PETSC_COMM_WORLD,bs,m,n,M,N,nz,nnz,onz,onnz,&A),Mat,A)
277 PetscPolymorphicFunction(MatCreateMPIBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,PetscInt nnz),(PETSC_COMM_WORLD,bs,m,n,M,N,nz,PETSC_NULL,nnz,PETSC_NULL,&A),Mat,A)
278 PetscPolymorphicFunction(MatCreateMPIBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,const PetscInt nnz[],const PetscInt onz[]),(PETSC_COMM_WORLD,bs,m,n,M,N,0,nnz,0,onz,&A),Mat,A)
279 PetscPolymorphicFunction(MatCreateMPIBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N),(PETSC_COMM_WORLD,bs,m,n,M,N,0,PETSC_NULL,0,PETSC_NULL,&A),Mat,A)
280 PetscPolymorphicSubroutine(MatCreateMPIBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,PetscInt nnz,Mat *A),(PETSC_COMM_WORLD,bs,m,n,M,N,nz,PETSC_NULL,nnz,PETSC_NULL,A))
281 PetscPolymorphicSubroutine(MatCreateMPIBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,const PetscInt nnz[],const PetscInt onz[],Mat *A),(PETSC_COMM_WORLD,bs,m,n,M,N,0,nnz,0,onz,A))
282 PetscPolymorphicSubroutine(MatCreateMPIBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,Mat *A),(PETSC_COMM_WORLD,bs,m,n,M,N,0,PETSC_NULL,0,PETSC_NULL,A))
283 extern PetscErrorCode  MatCreateMPIBAIJWithArrays(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt,const PetscInt[],const PetscInt[],const PetscScalar[],Mat*);
284 
285 extern PetscErrorCode  MatCreateMPIAdj(MPI_Comm,PetscInt,PetscInt,PetscInt[],PetscInt[],PetscInt[],Mat*);
286 extern PetscErrorCode  MatCreateSeqSBAIJ(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,const PetscInt[],Mat*);
287 PetscPolymorphicFunction(MatCreateSeqSBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt nz,const PetscInt nnz[]),(PETSC_COMM_SELF,bs,m,n,nz,nnz,&A),Mat,A)
288 PetscPolymorphicFunction(MatCreateSeqSBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt nz),(PETSC_COMM_SELF,bs,m,n,nz,PETSC_NULL,&A),Mat,A)
289 PetscPolymorphicFunction(MatCreateSeqSBAIJ,(PetscInt bs,PetscInt m,PetscInt n,const PetscInt nnz[]),(PETSC_COMM_SELF,bs,m,n,0,nnz,&A),Mat,A)
290 PetscPolymorphicFunction(MatCreateSeqSBAIJ,(PetscInt bs,PetscInt m,PetscInt n),(PETSC_COMM_SELF,bs,m,n,0,PETSC_NULL,&A),Mat,A)
291 PetscPolymorphicSubroutine(MatCreateSeqSBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt nz,Mat *A),(PETSC_COMM_SELF,bs,m,n,nz,PETSC_NULL,A))
292 PetscPolymorphicSubroutine(MatCreateSeqSBAIJ,(PetscInt bs,PetscInt m,PetscInt n,const PetscInt nnz[],Mat *A),(PETSC_COMM_SELF,bs,m,n,0,nnz,A))
293 PetscPolymorphicSubroutine(MatCreateSeqSBAIJ,(PetscInt bs,PetscInt m,PetscInt n,Mat *A),(PETSC_COMM_SELF,bs,m,n,0,PETSC_NULL,A))
294 
295 extern PetscErrorCode  MatCreateMPISBAIJ(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt,const PetscInt[],PetscInt,const PetscInt[],Mat*);
296 PetscPolymorphicFunction(MatCreateMPISBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,const PetscInt nnz[],PetscInt onz,const PetscInt onnz[]),(comm,bs,m,n,M,N,nz,nnz,onz,onnz,&A),Mat,A)
297 PetscPolymorphicFunction(MatCreateMPISBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,PetscInt nnz),(comm,bs,m,n,M,N,nz,PETSC_NULL,nnz,PETSC_NULL,&A),Mat,A)
298 PetscPolymorphicFunction(MatCreateMPISBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,const PetscInt nnz[],const PetscInt onz[]),(comm,bs,m,n,M,N,0,nnz,0,onz,&A),Mat,A)
299 PetscPolymorphicFunction(MatCreateMPISBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N),(comm,bs,m,n,M,N,0,PETSC_NULL,0,PETSC_NULL,&A),Mat,A)
300 PetscPolymorphicSubroutine(MatCreateMPISBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,PetscInt nnz,Mat *A),(comm,bs,m,n,M,N,nz,PETSC_NULL,nnz,PETSC_NULL,A))
301 PetscPolymorphicSubroutine(MatCreateMPISBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,const PetscInt nnz[],const PetscInt onz[],Mat *A),(comm,bs,m,n,M,N,0,nnz,0,onz,A))
302 PetscPolymorphicSubroutine(MatCreateMPISBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,Mat *A),(comm,bs,m,n,M,N,0,PETSC_NULL,0,PETSC_NULL,A))
303 PetscPolymorphicFunction(MatCreateMPISBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,const PetscInt nnz[],PetscInt onz,const PetscInt onnz[]),(PETSC_COMM_WORLD,bs,m,n,M,N,nz,nnz,onz,onnz,&A),Mat,A)
304 PetscPolymorphicFunction(MatCreateMPISBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,PetscInt nnz),(PETSC_COMM_WORLD,bs,m,n,M,N,nz,PETSC_NULL,nnz,PETSC_NULL,&A),Mat,A)
305 PetscPolymorphicFunction(MatCreateMPISBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,const PetscInt nnz[],const PetscInt onz[]),(PETSC_COMM_WORLD,bs,m,n,M,N,0,nnz,0,onz,&A),Mat,A)
306 PetscPolymorphicFunction(MatCreateMPISBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N),(PETSC_COMM_WORLD,bs,m,n,M,N,0,PETSC_NULL,0,PETSC_NULL,&A),Mat,A)
307 PetscPolymorphicSubroutine(MatCreateMPISBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,PetscInt nnz,Mat *A),(PETSC_COMM_WORLD,bs,m,n,M,N,nz,PETSC_NULL,nnz,PETSC_NULL,A))
308 PetscPolymorphicSubroutine(MatCreateMPISBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,const PetscInt nnz[],const PetscInt onz[],Mat *A),(PETSC_COMM_WORLD,bs,m,n,M,N,0,nnz,0,onz,A))
309 PetscPolymorphicSubroutine(MatCreateMPISBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,Mat *A),(PETSC_COMM_WORLD,bs,m,n,M,N,0,PETSC_NULL,0,PETSC_NULL,A))
310 extern PetscErrorCode  MatCreateMPISBAIJWithArrays(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt,const PetscInt[],const PetscInt[],const PetscScalar[],Mat *);
311 extern PetscErrorCode  MatMPISBAIJSetPreallocationCSR(Mat,PetscInt,const PetscInt[],const PetscInt[],const PetscScalar[]);
312 
313 extern PetscErrorCode  MatCreateShell(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,void *,Mat*);
314 PetscPolymorphicFunction(MatCreateShell,(MPI_Comm comm,PetscInt m,PetscInt n,PetscInt M,PetscInt N,void *ctx),(comm,m,n,M,N,ctx,&A),Mat,A)
315 PetscPolymorphicFunction(MatCreateShell,(PetscInt m,PetscInt n,PetscInt M,PetscInt N,void *ctx),(PETSC_COMM_WORLD,m,n,M,N,ctx,&A),Mat,A)
316 extern PetscErrorCode  MatCreateAdic(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt,void (*)(void),Mat*);
317 extern PetscErrorCode  MatCreateNormal(Mat,Mat*);
318 PetscPolymorphicFunction(MatCreateNormal,(Mat mat),(mat,&A),Mat,A)
319 extern PetscErrorCode  MatCreateLRC(Mat,Mat,Mat,Mat*);
320 extern PetscErrorCode  MatCreateIS(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,ISLocalToGlobalMapping,Mat*);
321 extern PetscErrorCode  MatCreateSeqAIJCRL(MPI_Comm,PetscInt,PetscInt,PetscInt,const PetscInt[],Mat*);
322 extern PetscErrorCode  MatCreateMPIAIJCRL(MPI_Comm,PetscInt,PetscInt,PetscInt,const PetscInt[],PetscInt,const PetscInt[],Mat*);
323 extern PetscErrorCode  MatCreateScatter(MPI_Comm,VecScatter,Mat*);
324 extern PetscErrorCode  MatScatterSetVecScatter(Mat,VecScatter);
325 extern PetscErrorCode  MatScatterGetVecScatter(Mat,VecScatter*);
326 extern PetscErrorCode  MatCreateBlockMat(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt*,Mat*);
327 extern PetscErrorCode  MatCompositeAddMat(Mat,Mat);
328 extern PetscErrorCode  MatCompositeMerge(Mat);
329 extern PetscErrorCode  MatCreateComposite(MPI_Comm,PetscInt,const Mat*,Mat*);
330 typedef enum {MAT_COMPOSITE_ADDITIVE,MAT_COMPOSITE_MULTIPLICATIVE} MatCompositeType;
331 extern PetscErrorCode  MatCompositeSetType(Mat,MatCompositeType);
332 
333 #if defined PETSC_HAVE_MATDD
334 extern PetscErrorCode  MatDDCreate(Mat A);
335 typedef enum {MATDD_DOMAINS_COLUMN, MATDD_DOMAINS_ROW} MatDDDomainType;
336 extern PetscErrorCode  MatDDSetDomainsLocal(Mat A, MatDDDomainType type, PetscInt domain_count, const PetscInt *supported_domains, const PetscInt *domain_limits, PetscBool covering);
337 extern PetscErrorCode  MatDDSetDomainsLocalIS(Mat A, MatDDDomainType type, IS supported_domains, IS domain_limits, PetscBool covering);
338 extern PetscErrorCode  MatDDSetScatter(Mat A, Mat S);
339 extern PetscErrorCode  MatDDSetGather(Mat A,  Mat G);
340 /**/
341 typedef enum {MATDD_BLOCK_COMM_DEFAULT = 0, MATDD_BLOCK_COMM_SELF = -1, MATDD_BLOCK_COMM_DETERMINE = -2} MatDDBlockCommType;
342 extern PetscErrorCode  MatDDGetDefaltBlockType(Mat A, const MatType *type);
343 extern PetscErrorCode  MatDDSetDefaltBlockType(Mat A, const MatType type);
344 extern PetscErrorCode  MatDDAddBlockLocal(Mat A, PetscInt rowblock, PetscInt colblock, const MatType blockmattype,  MatDDBlockCommType blockcommtype, Mat *block);
345 extern PetscErrorCode  MatDDSetBlockLocal(Mat A, PetscInt rowblock, PetscInt colblock, Mat block);
346 extern PetscErrorCode  MatDDGetBlockLocal(Mat A, PetscInt rowblock, PetscInt colblock, Mat *block);
347 /**/
348 extern PetscErrorCode  MatDDAIJSetPreallocation(Mat A,PetscInt nz,PetscInt *nnz);
349 #endif
350 
351 #if defined PETSC_HAVE_MATIM
352 extern PetscErrorCode  MatIMSetIS(Mat A, IS in, IS out);
353 extern PetscErrorCode  MatIMGetIS(Mat A, IS *in, IS *out);
354 #endif
355 
356 
357 
358 extern PetscErrorCode  MatCreateSeqFFTW(MPI_Comm,PetscInt,const PetscInt[],Mat*);
359 extern PetscErrorCode  MatCreateSeqCUFFT(MPI_Comm,PetscInt,const PetscInt[],Mat*);
360 extern PetscErrorCode  MatCreateTranspose(Mat,Mat*);
361 extern PetscErrorCode  MatCreateSubMatrix(Mat,IS,IS,Mat*);
362 extern PetscErrorCode  MatSubMatrixUpdate(Mat,Mat,IS,IS);
363 extern PetscErrorCode  MatCreateLocalRef(Mat,IS,IS,Mat*);
364 
365 extern PetscErrorCode  MatCreatePython(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,const char[],Mat*);
366 extern PetscErrorCode  MatPythonSetType(Mat,const char[]);
367 
368 
369 extern PetscErrorCode  MatSetUp(Mat);
370 extern PetscErrorCode  MatDestroy(Mat);
371 
372 extern PetscErrorCode  MatConjugate(Mat);
373 extern PetscErrorCode  MatRealPart(Mat);
374 extern PetscErrorCode  MatImaginaryPart(Mat);
375 extern PetscErrorCode  MatGetDiagonalBlock(Mat,PetscBool *,MatReuse,Mat*);
376 extern PetscErrorCode  MatGetTrace(Mat,PetscScalar*);
377 
378 /* ------------------------------------------------------------*/
379 extern PetscErrorCode  MatSetValues(Mat,PetscInt,const PetscInt[],PetscInt,const PetscInt[],const PetscScalar[],InsertMode);
380 extern PetscErrorCode  MatSetValuesBlocked(Mat,PetscInt,const PetscInt[],PetscInt,const PetscInt[],const PetscScalar[],InsertMode);
381 extern PetscErrorCode  MatSetValuesRow(Mat,PetscInt,const PetscScalar[]);
382 extern PetscErrorCode  MatSetValuesRowLocal(Mat,PetscInt,const PetscScalar[]);
383 
384 /*S
385      MatStencil - Data structure (C struct) for storing information about a single row or
386         column of a matrix as index on an associated grid.
387 
388    Level: beginner
389 
390   Concepts: matrix; linear operator
391 
392 .seealso:  MatSetValuesStencil(), MatSetStencil(), MatSetValuesBlockStencil()
393 S*/
394 typedef struct {
395   PetscInt k,j,i,c;
396 } MatStencil;
397 
398 extern PetscErrorCode  MatSetValuesStencil(Mat,PetscInt,const MatStencil[],PetscInt,const MatStencil[],const PetscScalar[],InsertMode);
399 extern PetscErrorCode  MatSetValuesBlockedStencil(Mat,PetscInt,const MatStencil[],PetscInt,const MatStencil[],const PetscScalar[],InsertMode);
400 extern PetscErrorCode  MatSetStencil(Mat,PetscInt,const PetscInt[],const PetscInt[],PetscInt);
401 
402 extern PetscErrorCode  MatSetColoring(Mat,ISColoring);
403 extern PetscErrorCode  MatSetValuesAdic(Mat,void*);
404 extern PetscErrorCode  MatSetValuesAdifor(Mat,PetscInt,void*);
405 
406 /*E
407     MatAssemblyType - Indicates if the matrix is now to be used, or if you plan
408      to continue to add values to it
409 
410     Level: beginner
411 
412 .seealso: MatAssemblyBegin(), MatAssemblyEnd()
413 E*/
414 typedef enum {MAT_FLUSH_ASSEMBLY=1,MAT_FINAL_ASSEMBLY=0} MatAssemblyType;
415 extern PetscErrorCode  MatAssemblyBegin(Mat,MatAssemblyType);
416 extern PetscErrorCode  MatAssemblyEnd(Mat,MatAssemblyType);
417 extern PetscErrorCode  MatAssembled(Mat,PetscBool *);
418 
419 
420 
421 /*E
422     MatOption - Options that may be set for a matrix and its behavior or storage
423 
424     Level: beginner
425 
426    Any additions/changes here MUST also be made in include/finclude/petscmat.h
427 
428 .seealso: MatSetOption()
429 E*/
430 typedef enum {MAT_ROW_ORIENTED,MAT_NEW_NONZERO_LOCATIONS,
431               MAT_SYMMETRIC,
432               MAT_STRUCTURALLY_SYMMETRIC,
433               MAT_NEW_DIAGONALS,MAT_IGNORE_OFF_PROC_ENTRIES,
434               MAT_NEW_NONZERO_LOCATION_ERR,
435               MAT_NEW_NONZERO_ALLOCATION_ERR,MAT_USE_HASH_TABLE,
436               MAT_KEEP_NONZERO_PATTERN,MAT_IGNORE_ZERO_ENTRIES,
437               MAT_USE_INODES,
438               MAT_HERMITIAN,
439               MAT_SYMMETRY_ETERNAL,
440               MAT_CHECK_COMPRESSED_ROW,
441               MAT_IGNORE_LOWER_TRIANGULAR,MAT_ERROR_LOWER_TRIANGULAR,
442               MAT_GETROW_UPPERTRIANGULAR,MAT_UNUSED_NONZERO_LOCATION_ERR,
443               MAT_SPD,MAT_NO_OFF_PROC_ENTRIES,MAT_NO_OFF_PROC_ZERO_ROWS,
444               NUM_MAT_OPTIONS} MatOption;
445 extern const char *MatOptions[];
446 extern PetscErrorCode  MatSetOption(Mat,MatOption,PetscBool );
447 extern PetscErrorCode  MatGetType(Mat,const MatType*);
448 PetscPolymorphicFunction(MatGetType,(Mat mat),(mat,&t),const MatType,t)
449 
450 extern PetscErrorCode  MatGetValues(Mat,PetscInt,const PetscInt[],PetscInt,const PetscInt[],PetscScalar[]);
451 extern PetscErrorCode  MatGetRow(Mat,PetscInt,PetscInt *,const PetscInt *[],const PetscScalar*[]);
452 extern PetscErrorCode  MatRestoreRow(Mat,PetscInt,PetscInt *,const PetscInt *[],const PetscScalar*[]);
453 extern PetscErrorCode  MatGetRowUpperTriangular(Mat);
454 extern PetscErrorCode  MatRestoreRowUpperTriangular(Mat);
455 extern PetscErrorCode  MatGetColumn(Mat,PetscInt,PetscInt *,const PetscInt *[],const PetscScalar*[]);
456 extern PetscErrorCode  MatRestoreColumn(Mat,PetscInt,PetscInt *,const PetscInt *[],const PetscScalar*[]);
457 extern PetscErrorCode  MatGetColumnVector(Mat,Vec,PetscInt);
458 extern PetscErrorCode  MatGetArray(Mat,PetscScalar *[]);
459 PetscPolymorphicFunction(MatGetArray,(Mat mat),(mat,&a),PetscScalar*,a)
460 extern PetscErrorCode  MatRestoreArray(Mat,PetscScalar *[]);
461 extern PetscErrorCode  MatGetBlockSize(Mat,PetscInt *);
462 PetscPolymorphicFunction(MatGetBlockSize,(Mat mat),(mat,&a),PetscInt,a)
463 extern PetscErrorCode  MatSetBlockSize(Mat,PetscInt);
464 
465 
466 extern PetscErrorCode  MatMult(Mat,Vec,Vec);
467 extern PetscErrorCode  MatMultDiagonalBlock(Mat,Vec,Vec);
468 extern PetscErrorCode  MatMultAdd(Mat,Vec,Vec,Vec);
469 PetscPolymorphicSubroutine(MatMultAdd,(Mat A,Vec x,Vec y),(A,x,y,y))
470 extern PetscErrorCode  MatMultTranspose(Mat,Vec,Vec);
471 extern PetscErrorCode  MatMultHermitianTranspose(Mat,Vec,Vec);
472 extern PetscErrorCode  MatIsTranspose(Mat,Mat,PetscReal,PetscBool *);
473 PetscPolymorphicFunction(MatIsTranspose,(Mat A,Mat B,PetscReal tol),(A,B,tol,&t),PetscBool ,t)
474 PetscPolymorphicFunction(MatIsTranspose,(Mat A,Mat B),(A,B,0,&t),PetscBool ,t)
475 extern PetscErrorCode  MatIsHermitianTranspose(Mat,Mat,PetscReal,PetscBool *);
476 extern PetscErrorCode  MatMultTransposeAdd(Mat,Vec,Vec,Vec);
477 extern PetscErrorCode  MatMultHermitianTransposeAdd(Mat,Vec,Vec,Vec);
478 PetscPolymorphicSubroutine(MatMultTransposeAdd,(Mat A,Vec x,Vec y),(A,x,y,y))
479 extern PetscErrorCode  MatMultConstrained(Mat,Vec,Vec);
480 extern PetscErrorCode  MatMultTransposeConstrained(Mat,Vec,Vec);
481 extern PetscErrorCode  MatMatSolve(Mat,Mat,Mat);
482 
483 /*E
484     MatDuplicateOption - Indicates if a duplicated sparse matrix should have
485   its numerical values copied over or just its nonzero structure.
486 
487     Level: beginner
488 
489    Any additions/changes here MUST also be made in include/finclude/petscmat.h
490 
491 $   MAT_SHARE_NONZERO_PATTERN - the i and j arrays in the new matrix will be shared with the original matrix
492 $                               this also triggers the MAT_DO_NOT_COPY_VALUES option. This is used when you
493 $                               have several matrices with the same nonzero pattern.
494 
495 .seealso: MatDuplicate()
496 E*/
497 typedef enum {MAT_DO_NOT_COPY_VALUES,MAT_COPY_VALUES,MAT_SHARE_NONZERO_PATTERN} MatDuplicateOption;
498 
499 extern PetscErrorCode  MatConvert(Mat,const MatType,MatReuse,Mat*);
500 PetscPolymorphicFunction(MatConvert,(Mat A,const MatType t),(A,t,MAT_INITIAL_MATRIX,&a),Mat,a)
501 extern PetscErrorCode  MatDuplicate(Mat,MatDuplicateOption,Mat*);
502 PetscPolymorphicFunction(MatDuplicate,(Mat A,MatDuplicateOption o),(A,o,&a),Mat,a)
503 PetscPolymorphicFunction(MatDuplicate,(Mat A),(A,MAT_COPY_VALUES,&a),Mat,a)
504 
505 
506 extern PetscErrorCode  MatCopy(Mat,Mat,MatStructure);
507 extern PetscErrorCode  MatView(Mat,PetscViewer);
508 extern PetscErrorCode  MatIsSymmetric(Mat,PetscReal,PetscBool *);
509 PetscPolymorphicFunction(MatIsSymmetric,(Mat A,PetscReal tol),(A,tol,&t),PetscBool ,t)
510 PetscPolymorphicFunction(MatIsSymmetric,(Mat A),(A,0,&t),PetscBool ,t)
511 extern PetscErrorCode  MatIsStructurallySymmetric(Mat,PetscBool *);
512 PetscPolymorphicFunction(MatIsStructurallySymmetric,(Mat A),(A,&t),PetscBool ,t)
513 extern PetscErrorCode  MatIsHermitian(Mat,PetscReal,PetscBool *);
514 PetscPolymorphicFunction(MatIsHermitian,(Mat A),(A,0,&t),PetscBool ,t)
515 extern PetscErrorCode  MatIsSymmetricKnown(Mat,PetscBool *,PetscBool *);
516 extern PetscErrorCode  MatIsHermitianKnown(Mat,PetscBool *,PetscBool *);
517 extern PetscErrorCode  MatMissingDiagonal(Mat,PetscBool  *,PetscInt *);
518 extern PetscErrorCode  MatLoad(Mat, PetscViewer);
519 
520 extern PetscErrorCode  MatGetRowIJ(Mat,PetscInt,PetscBool ,PetscBool ,PetscInt*,PetscInt *[],PetscInt *[],PetscBool  *);
521 extern PetscErrorCode  MatRestoreRowIJ(Mat,PetscInt,PetscBool ,PetscBool ,PetscInt *,PetscInt *[],PetscInt *[],PetscBool  *);
522 extern PetscErrorCode  MatGetColumnIJ(Mat,PetscInt,PetscBool ,PetscBool ,PetscInt*,PetscInt *[],PetscInt *[],PetscBool  *);
523 extern PetscErrorCode  MatRestoreColumnIJ(Mat,PetscInt,PetscBool ,PetscBool ,PetscInt *,PetscInt *[],PetscInt *[],PetscBool  *);
524 
525 /*S
526      MatInfo - Context of matrix information, used with MatGetInfo()
527 
528    In Fortran this is simply a double precision array of dimension MAT_INFO_SIZE
529 
530    Level: intermediate
531 
532   Concepts: matrix^nonzero information
533 
534 .seealso:  MatGetInfo(), MatInfoType
535 S*/
536 typedef struct {
537   PetscLogDouble block_size;                         /* block size */
538   PetscLogDouble nz_allocated,nz_used,nz_unneeded;   /* number of nonzeros */
539   PetscLogDouble memory;                             /* memory allocated */
540   PetscLogDouble assemblies;                         /* number of matrix assemblies called */
541   PetscLogDouble mallocs;                            /* number of mallocs during MatSetValues() */
542   PetscLogDouble fill_ratio_given,fill_ratio_needed; /* fill ratio for LU/ILU */
543   PetscLogDouble factor_mallocs;                     /* number of mallocs during factorization */
544 } MatInfo;
545 
546 /*E
547     MatInfoType - Indicates if you want information about the local part of the matrix,
548      the entire parallel matrix or the maximum over all the local parts.
549 
550     Level: beginner
551 
552    Any additions/changes here MUST also be made in include/finclude/petscmat.h
553 
554 .seealso: MatGetInfo(), MatInfo
555 E*/
556 typedef enum {MAT_LOCAL=1,MAT_GLOBAL_MAX=2,MAT_GLOBAL_SUM=3} MatInfoType;
557 extern PetscErrorCode  MatGetInfo(Mat,MatInfoType,MatInfo*);
558 extern PetscErrorCode  MatGetDiagonal(Mat,Vec);
559 extern PetscErrorCode  MatGetRowMax(Mat,Vec,PetscInt[]);
560 extern PetscErrorCode  MatGetRowMin(Mat,Vec,PetscInt[]);
561 extern PetscErrorCode  MatGetRowMaxAbs(Mat,Vec,PetscInt[]);
562 extern PetscErrorCode  MatGetRowMinAbs(Mat,Vec,PetscInt[]);
563 extern PetscErrorCode  MatGetRowSum(Mat,Vec);
564 extern PetscErrorCode  MatTranspose(Mat,MatReuse,Mat*);
565 PetscPolymorphicFunction(MatTranspose,(Mat A),(A,MAT_INITIAL_MATRIX,&t),Mat,t)
566 extern PetscErrorCode  MatHermitianTranspose(Mat,MatReuse,Mat*);
567 extern PetscErrorCode  MatPermute(Mat,IS,IS,Mat *);
568 PetscPolymorphicFunction(MatPermute,(Mat A,IS is1,IS is2),(A,is1,is2,&t),Mat,t)
569 extern PetscErrorCode  MatDiagonalScale(Mat,Vec,Vec);
570 extern PetscErrorCode  MatDiagonalSet(Mat,Vec,InsertMode);
571 extern PetscErrorCode  MatEqual(Mat,Mat,PetscBool *);
572 PetscPolymorphicFunction(MatEqual,(Mat A,Mat B),(A,B,&t),PetscBool ,t)
573 extern PetscErrorCode  MatMultEqual(Mat,Mat,PetscInt,PetscBool *);
574 extern PetscErrorCode  MatMultAddEqual(Mat,Mat,PetscInt,PetscBool *);
575 extern PetscErrorCode  MatMultTransposeEqual(Mat,Mat,PetscInt,PetscBool *);
576 extern PetscErrorCode  MatMultTransposeAddEqual(Mat,Mat,PetscInt,PetscBool *);
577 
578 extern PetscErrorCode  MatNorm(Mat,NormType,PetscReal *);
579 PetscPolymorphicFunction(MatNorm,(Mat A,NormType t),(A,t,&n),PetscReal,n)
580 extern PetscErrorCode MatGetColumnNorms(Mat,NormType,PetscReal *);
581 extern PetscErrorCode  MatZeroEntries(Mat);
582 extern PetscErrorCode  MatZeroRows(Mat,PetscInt,const PetscInt [],PetscScalar,Vec,Vec);
583 extern PetscErrorCode  MatZeroRowsIS(Mat,IS,PetscScalar,Vec,Vec);
584 extern PetscErrorCode  MatZeroRowsStencil(Mat,PetscInt,const MatStencil [],PetscScalar,Vec,Vec);
585 extern PetscErrorCode  MatZeroRowsColumns(Mat,PetscInt,const PetscInt [],PetscScalar,Vec,Vec);
586 extern PetscErrorCode  MatZeroRowsColumnsIS(Mat,IS,PetscScalar,Vec,Vec);
587 
588 extern PetscErrorCode  MatUseScaledForm(Mat,PetscBool );
589 extern PetscErrorCode  MatScaleSystem(Mat,Vec,Vec);
590 extern PetscErrorCode  MatUnScaleSystem(Mat,Vec,Vec);
591 
592 extern PetscErrorCode  MatGetSize(Mat,PetscInt*,PetscInt*);
593 extern PetscErrorCode  MatGetLocalSize(Mat,PetscInt*,PetscInt*);
594 extern PetscErrorCode  MatGetOwnershipRange(Mat,PetscInt*,PetscInt*);
595 extern PetscErrorCode  MatGetOwnershipRanges(Mat,const PetscInt**);
596 extern PetscErrorCode  MatGetOwnershipRangeColumn(Mat,PetscInt*,PetscInt*);
597 extern PetscErrorCode  MatGetOwnershipRangesColumn(Mat,const PetscInt**);
598 
599 extern PetscErrorCode  MatGetSubMatrices(Mat,PetscInt,const IS[],const IS[],MatReuse,Mat *[]);
600 extern PetscErrorCode  MatGetSubMatricesParallel(Mat,PetscInt,const IS[],const IS[],MatReuse,Mat *[]);
601 extern PetscErrorCode  MatDestroyMatrices(PetscInt,Mat *[]);
602 extern PetscErrorCode  MatGetSubMatrix(Mat,IS,IS,MatReuse,Mat *);
603 extern PetscErrorCode  MatGetLocalSubMatrix(Mat,IS,IS,Mat*);
604 extern PetscErrorCode  MatRestoreLocalSubMatrix(Mat,IS,IS,Mat*);
605 extern PetscErrorCode  MatGetSeqNonzeroStructure(Mat,Mat*);
606 extern PetscErrorCode  MatDestroySeqNonzeroStructure(Mat*);
607 
608 extern PetscErrorCode  MatMerge(MPI_Comm,Mat,PetscInt,MatReuse,Mat*);
609 extern PetscErrorCode  MatMerge_SeqsToMPI(MPI_Comm,Mat,PetscInt,PetscInt,MatReuse,Mat*);
610 extern PetscErrorCode  MatMerge_SeqsToMPISymbolic(MPI_Comm,Mat,PetscInt,PetscInt,Mat*);
611 extern PetscErrorCode  MatMerge_SeqsToMPINumeric(Mat,Mat);
612 extern PetscErrorCode  MatDestroy_MPIAIJ_SeqsToMPI(Mat);
613 extern PetscErrorCode  MatGetLocalMat(Mat,MatReuse,Mat*);
614 extern PetscErrorCode  MatGetLocalMatCondensed(Mat,MatReuse,IS*,IS*,Mat*);
615 extern PetscErrorCode  MatGetBrowsOfAcols(Mat,Mat,MatReuse,IS*,IS*,PetscInt*,Mat*);
616 extern PetscErrorCode  MatGetBrowsOfAoCols(Mat,Mat,MatReuse,PetscInt**,PetscInt**,MatScalar**,Mat*);
617 #if defined (PETSC_USE_CTABLE)
618 #include "petscctable.h"
619 extern PetscErrorCode  MatGetCommunicationStructs(Mat, Vec *, PetscTable *, VecScatter *);
620 #else
621 extern PetscErrorCode  MatGetCommunicationStructs(Mat, Vec *, PetscInt *[], VecScatter *);
622 #endif
623 extern PetscErrorCode  MatGetGhosts(Mat, PetscInt *,const PetscInt *[]);
624 
625 extern PetscErrorCode  MatIncreaseOverlap(Mat,PetscInt,IS[],PetscInt);
626 
627 extern PetscErrorCode  MatMatMult(Mat,Mat,MatReuse,PetscReal,Mat*);
628 extern PetscErrorCode  MatMatMultSymbolic(Mat,Mat,PetscReal,Mat*);
629 extern PetscErrorCode  MatMatMultNumeric(Mat,Mat,Mat);
630 
631 extern PetscErrorCode  MatPtAP(Mat,Mat,MatReuse,PetscReal,Mat*);
632 extern PetscErrorCode  MatPtAPSymbolic(Mat,Mat,PetscReal,Mat*);
633 extern PetscErrorCode  MatPtAPNumeric(Mat,Mat,Mat);
634 
635 extern PetscErrorCode  MatMatMultTranspose(Mat,Mat,MatReuse,PetscReal,Mat*);
636 extern PetscErrorCode  MatMatMultTransposeSymbolic(Mat,Mat,PetscReal,Mat*);
637 extern PetscErrorCode  MatMatMultTransposeNumeric(Mat,Mat,Mat);
638 
639 extern PetscErrorCode  MatAXPY(Mat,PetscScalar,Mat,MatStructure);
640 extern PetscErrorCode  MatAYPX(Mat,PetscScalar,Mat,MatStructure);
641 
642 extern PetscErrorCode  MatScale(Mat,PetscScalar);
643 extern PetscErrorCode  MatShift(Mat,PetscScalar);
644 
645 extern PetscErrorCode  MatSetLocalToGlobalMapping(Mat,ISLocalToGlobalMapping,ISLocalToGlobalMapping);
646 extern PetscErrorCode  MatSetLocalToGlobalMappingBlock(Mat,ISLocalToGlobalMapping,ISLocalToGlobalMapping);
647 extern PetscErrorCode  MatGetLocalToGlobalMapping(Mat,ISLocalToGlobalMapping*,ISLocalToGlobalMapping*);
648 extern PetscErrorCode  MatGetLocalToGlobalMappingBlock(Mat,ISLocalToGlobalMapping*,ISLocalToGlobalMapping*);
649 extern PetscErrorCode  MatZeroRowsLocal(Mat,PetscInt,const PetscInt [],PetscScalar,Vec,Vec);
650 extern PetscErrorCode  MatZeroRowsLocalIS(Mat,IS,PetscScalar,Vec,Vec);
651 extern PetscErrorCode  MatZeroRowsColumnsLocal(Mat,PetscInt,const PetscInt [],PetscScalar,Vec,Vec);
652 extern PetscErrorCode  MatZeroRowsColumnsLocalIS(Mat,IS,PetscScalar,Vec,Vec);
653 extern PetscErrorCode  MatSetValuesLocal(Mat,PetscInt,const PetscInt[],PetscInt,const PetscInt[],const PetscScalar[],InsertMode);
654 extern PetscErrorCode  MatSetValuesBlockedLocal(Mat,PetscInt,const PetscInt[],PetscInt,const PetscInt[],const PetscScalar[],InsertMode);
655 
656 extern PetscErrorCode  MatStashSetInitialSize(Mat,PetscInt,PetscInt);
657 extern PetscErrorCode  MatStashGetInfo(Mat,PetscInt*,PetscInt*,PetscInt*,PetscInt*);
658 
659 extern PetscErrorCode  MatInterpolate(Mat,Vec,Vec);
660 extern PetscErrorCode  MatInterpolateAdd(Mat,Vec,Vec,Vec);
661 PetscPolymorphicSubroutine(MatInterpolateAdd,(Mat A,Vec x,Vec y),(A,x,y,y))
662 extern PetscErrorCode  MatRestrict(Mat,Vec,Vec);
663 extern PetscErrorCode  MatGetVecs(Mat,Vec*,Vec*);
664 extern PetscErrorCode  MatGetRedundantMatrix(Mat,PetscInt,MPI_Comm,PetscInt,MatReuse,Mat*);
665 extern PetscErrorCode  MatGetMultiProcBlock(Mat,MPI_Comm,Mat*);
666 
667 /*MC
668    MatSetValue - Set a single entry into a matrix.
669 
670    Not collective
671 
672    Input Parameters:
673 +  m - the matrix
674 .  row - the row location of the entry
675 .  col - the column location of the entry
676 .  value - the value to insert
677 -  mode - either INSERT_VALUES or ADD_VALUES
678 
679    Notes:
680    For efficiency one should use MatSetValues() and set several or many
681    values simultaneously if possible.
682 
683    Level: beginner
684 
685 .seealso: MatSetValues(), MatSetValueLocal()
686 M*/
687 PETSC_STATIC_INLINE PetscErrorCode MatSetValue(Mat v,PetscInt i,PetscInt j,PetscScalar va,InsertMode mode) {return MatSetValues(v,1,&i,1,&j,&va,mode);}
688 
689 PETSC_STATIC_INLINE PetscErrorCode MatGetValue(Mat v,PetscInt i,PetscInt j,PetscScalar *va) {return MatGetValues(v,1,&i,1,&j,va);}
690 
691 PETSC_STATIC_INLINE PetscErrorCode MatSetValueLocal(Mat v,PetscInt i,PetscInt j,PetscScalar va,InsertMode mode) {return MatSetValuesLocal(v,1,&i,1,&j,&va,mode);}
692 
693 /*MC
694    MatPreallocateInitialize - Begins the block of code that will count the number of nonzeros per
695        row in a matrix providing the data that one can use to correctly preallocate the matrix.
696 
697    Synopsis:
698    PetscErrorCode MatPreallocateInitialize(MPI_Comm comm, PetscInt nrows, PetscInt ncols, PetscInt *dnz, PetscInt *onz)
699 
700    Collective on MPI_Comm
701 
702    Input Parameters:
703 +  comm - the communicator that will share the eventually allocated matrix
704 .  nrows - the number of LOCAL rows in the matrix
705 -  ncols - the number of LOCAL columns in the matrix
706 
707    Output Parameters:
708 +  dnz - the array that will be passed to the matrix preallocation routines
709 -  ozn - the other array passed to the matrix preallocation routines
710 
711 
712    Level: intermediate
713 
714    Notes:
715     See the <A href="../../docs/manual.pdf#nameddest=ch_performance">Hints for Performance Improvment</A> chapter in the users manual for more details.
716 
717    Do not malloc or free dnz and onz, that is handled internally by these routines
718 
719    Use MatPreallocateInitializeSymmetric() for symmetric matrices (MPISBAIJ matrices)
720 
721    This is a MACRO not a function because it has a leading { that is closed by PetscPreallocateFinalize().
722 
723   Concepts: preallocation^Matrix
724 
725 .seealso: MatPreallocateFinalize(), MatPreallocateSet(), MatPreallocateSymmetricSet(), MatPreallocateSetLocal(),
726           MatPreallocateInitializeSymmetric(), MatPreallocateSymmetricSetLocal()
727 M*/
728 #define MatPreallocateInitialize(comm,nrows,ncols,dnz,onz) 0; \
729 { \
730   PetscErrorCode _4_ierr; PetscInt __nrows = (nrows),__ctmp = (ncols),__rstart,__start,__end; \
731   _4_ierr = PetscMalloc2(__nrows,PetscInt,&dnz,__nrows,PetscInt,&onz);CHKERRQ(_4_ierr); \
732   _4_ierr = PetscMemzero(dnz,__nrows*sizeof(PetscInt));CHKERRQ(_4_ierr);\
733   _4_ierr = PetscMemzero(onz,__nrows*sizeof(PetscInt));CHKERRQ(_4_ierr);\
734   _4_ierr = MPI_Scan(&__ctmp,&__end,1,MPIU_INT,MPI_SUM,comm);CHKERRQ(_4_ierr); __start = __end - __ctmp;\
735   _4_ierr = MPI_Scan(&__nrows,&__rstart,1,MPIU_INT,MPI_SUM,comm);CHKERRQ(_4_ierr); __rstart = __rstart - __nrows;
736 
737 /*MC
738    MatPreallocateSymmetricInitialize - Begins the block of code that will count the number of nonzeros per
739        row in a matrix providing the data that one can use to correctly preallocate the matrix.
740 
741    Synopsis:
742    PetscErrorCode MatPreallocateSymmetricInitialize(MPI_Comm comm, PetscInt nrows, PetscInt ncols, PetscInt *dnz, PetscInt *onz)
743 
744    Collective on MPI_Comm
745 
746    Input Parameters:
747 +  comm - the communicator that will share the eventually allocated matrix
748 .  nrows - the number of LOCAL rows in the matrix
749 -  ncols - the number of LOCAL columns in the matrix
750 
751    Output Parameters:
752 +  dnz - the array that will be passed to the matrix preallocation routines
753 -  ozn - the other array passed to the matrix preallocation routines
754 
755 
756    Level: intermediate
757 
758    Notes:
759     See the <A href="../../docs/manual.pdf#nameddest=ch_performance">Hints for Performance Improvment</A> chapter in the users manual for more details.
760 
761    Do not malloc or free dnz and onz, that is handled internally by these routines
762 
763    This is a MACRO not a function because it has a leading { that is closed by PetscPreallocateFinalize().
764 
765   Concepts: preallocation^Matrix
766 
767 .seealso: MatPreallocateFinalize(), MatPreallocateSet(), MatPreallocateSymmetricSet(), MatPreallocateSetLocal(),
768           MatPreallocateInitialize(), MatPreallocateSymmetricSetLocal()
769 M*/
770 #define MatPreallocateSymmetricInitialize(comm,nrows,ncols,dnz,onz) 0; \
771 { \
772   PetscErrorCode _4_ierr; PetscInt __nrows = (nrows),__ctmp = (ncols),__rstart,__end; \
773   _4_ierr = PetscMalloc2(__nrows,PetscInt,&dnz,__nrows,PetscInt,&onz);CHKERRQ(_4_ierr); \
774   _4_ierr = PetscMemzero(dnz,__nrows*sizeof(PetscInt));CHKERRQ(_4_ierr);\
775   _4_ierr = PetscMemzero(onz,__nrows*sizeof(PetscInt));CHKERRQ(_4_ierr);\
776   _4_ierr = MPI_Scan(&__ctmp,&__end,1,MPIU_INT,MPI_SUM,comm);CHKERRQ(_4_ierr);\
777   _4_ierr = MPI_Scan(&__nrows,&__rstart,1,MPIU_INT,MPI_SUM,comm);CHKERRQ(_4_ierr); __rstart = __rstart - __nrows;
778 
779 /*MC
780    MatPreallocateSetLocal - Indicates the locations (rows and columns) in the matrix where nonzeros will be
781        inserted using a local number of the rows and columns
782 
783    Synopsis:
784    PetscErrorCode MatPreallocateSetLocal(ISLocalToGlobalMappping map,PetscInt nrows, PetscInt *rows,PetscInt ncols, PetscInt *cols,PetscInt *dnz, PetscInt *onz)
785 
786    Not Collective
787 
788    Input Parameters:
789 +  map - the row mapping from local numbering to global numbering
790 .  nrows - the number of rows indicated
791 .  rows - the indices of the rows
792 .  cmap - the column mapping from local to global numbering
793 .  ncols - the number of columns in the matrix
794 .  cols - the columns indicated
795 .  dnz - the array that will be passed to the matrix preallocation routines
796 -  ozn - the other array passed to the matrix preallocation routines
797 
798 
799    Level: intermediate
800 
801    Notes:
802     See the <A href="../../docs/manual.pdf#nameddest=ch_performance">Hints for Performance Improvment</A> chapter in the users manual for more details.
803 
804    Do not malloc or free dnz and onz, that is handled internally by these routines
805 
806   Concepts: preallocation^Matrix
807 
808 .seealso: MatPreallocateFinalize(), MatPreallocateSet(), MatPreallocateSymmetricSet(), MatPreallocateInitialize(),
809           MatPreallocateInitialize(), MatPreallocateSymmetricSetLocal()
810 M*/
811 #define MatPreallocateSetLocal(rmap,nrows,rows,cmap,ncols,cols,dnz,onz) 0; \
812 {\
813   PetscInt __l;\
814   _4_ierr = ISLocalToGlobalMappingApply(rmap,nrows,rows,rows);CHKERRQ(_4_ierr);\
815   _4_ierr = ISLocalToGlobalMappingApply(cmap,ncols,cols,cols);CHKERRQ(_4_ierr);\
816   for (__l=0;__l<nrows;__l++) {\
817     _4_ierr = MatPreallocateSet((rows)[__l],ncols,cols,dnz,onz);CHKERRQ(_4_ierr);\
818   }\
819 }
820 
821 /*MC
822    MatPreallocateSymmetricSetLocal - Indicates the locations (rows and columns) in the matrix where nonzeros will be
823        inserted using a local number of the rows and columns
824 
825    Synopsis:
826    PetscErrorCode MatPreallocateSymmetricSetLocal(ISLocalToGlobalMappping map,PetscInt nrows, PetscInt *rows,PetscInt ncols, PetscInt *cols,PetscInt *dnz, PetscInt *onz)
827 
828    Not Collective
829 
830    Input Parameters:
831 +  map - the mapping between local numbering and global numbering
832 .  nrows - the number of rows indicated
833 .  rows - the indices of the rows
834 .  ncols - the number of columns in the matrix
835 .  cols - the columns indicated
836 .  dnz - the array that will be passed to the matrix preallocation routines
837 -  ozn - the other array passed to the matrix preallocation routines
838 
839 
840    Level: intermediate
841 
842    Notes:
843     See the <A href="../../docs/manual.pdf#nameddest=ch_performance">Hints for Performance Improvment</A> chapter in the users manual for more details.
844 
845    Do not malloc or free dnz and onz that is handled internally by these routines
846 
847   Concepts: preallocation^Matrix
848 
849 .seealso: MatPreallocateFinalize(), MatPreallocateSet(), MatPreallocateSymmetricSet(), MatPreallocateInitialize(),
850           MatPreallocateInitialize(), MatPreallocateSymmetricSetLocal(), MatPreallocateSetLocal()
851 M*/
852 #define MatPreallocateSymmetricSetLocal(map,nrows,rows,ncols,cols,dnz,onz) 0;\
853 {\
854   PetscInt __l;\
855   _4_ierr = ISLocalToGlobalMappingApply(map,nrows,rows,rows);CHKERRQ(_4_ierr);\
856   _4_ierr = ISLocalToGlobalMappingApply(map,ncols,cols,cols);CHKERRQ(_4_ierr);\
857   for (__l=0;__l<nrows;__l++) {\
858     _4_ierr = MatPreallocateSymmetricSet((rows)[__l],ncols,cols,dnz,onz);CHKERRQ(_4_ierr);\
859   }\
860 }
861 
862 /*MC
863    MatPreallocateSet - Indicates the locations (rows and columns) in the matrix where nonzeros will be
864        inserted using a local number of the rows and columns
865 
866    Synopsis:
867    PetscErrorCode MatPreallocateSet(PetscInt nrows, PetscInt *rows,PetscInt ncols, PetscInt *cols,PetscInt *dnz, PetscInt *onz)
868 
869    Not Collective
870 
871    Input Parameters:
872 +  row - the row
873 .  ncols - the number of columns in the matrix
874 -  cols - the columns indicated
875 
876    Output Parameters:
877 +  dnz - the array that will be passed to the matrix preallocation routines
878 -  ozn - the other array passed to the matrix preallocation routines
879 
880 
881    Level: intermediate
882 
883    Notes:
884     See the <A href="../../docs/manual.pdf#nameddest=ch_performance">Hints for Performance Improvment</A> chapter in the users manual for more details.
885 
886    Do not malloc or free dnz and onz that is handled internally by these routines
887 
888    This is a MACRO not a function because it uses variables declared in MatPreallocateInitialize().
889 
890   Concepts: preallocation^Matrix
891 
892 .seealso: MatPreallocateFinalize(), MatPreallocateSet(), MatPreallocateSymmetricSet(), MatPreallocateInitialize(),
893           MatPreallocateInitialize(), MatPreallocateSymmetricSetLocal(), MatPreallocateSetLocal()
894 M*/
895 #define MatPreallocateSet(row,nc,cols,dnz,onz) 0;\
896 { PetscInt __i; \
897   if (row < __rstart) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Trying to set preallocation for row %D less than first local row %D",row,__rstart);\
898   if (row >= __rstart+__nrows) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Trying to set preallocation for row %D greater than last local row %D",row,__rstart+__nrows-1);\
899   for (__i=0; __i<nc; __i++) {\
900     if ((cols)[__i] < __start || (cols)[__i] >= __end) onz[row - __rstart]++; \
901     else dnz[row - __rstart]++;\
902   }\
903 }
904 
905 /*MC
906    MatPreallocateSymmetricSet - Indicates the locations (rows and columns) in the matrix where nonzeros will be
907        inserted using a local number of the rows and columns
908 
909    Synopsis:
910    PetscErrorCode MatPreallocateSymmetricSet(PetscInt nrows, PetscInt *rows,PetscInt ncols, PetscInt *cols,PetscInt *dnz, PetscInt *onz)
911 
912    Not Collective
913 
914    Input Parameters:
915 +  nrows - the number of rows indicated
916 .  rows - the indices of the rows
917 .  ncols - the number of columns in the matrix
918 .  cols - the columns indicated
919 .  dnz - the array that will be passed to the matrix preallocation routines
920 -  ozn - the other array passed to the matrix preallocation routines
921 
922 
923    Level: intermediate
924 
925    Notes:
926     See the <A href="../../docs/manual.pdf#nameddest=ch_performance">Hints for Performance Improvment</A> chapter in the users manual for more details.
927 
928    Do not malloc or free dnz and onz that is handled internally by these routines
929 
930    This is a MACRO not a function because it uses variables declared in MatPreallocateInitialize().
931 
932   Concepts: preallocation^Matrix
933 
934 .seealso: MatPreallocateFinalize(), MatPreallocateSet(), MatPreallocateSymmetricSet(), MatPreallocateInitialize(),
935           MatPreallocateInitialize(), MatPreallocateSymmetricSetLocal(), MatPreallocateSetLocal()
936 M*/
937 #define MatPreallocateSymmetricSet(row,nc,cols,dnz,onz) 0;\
938 { PetscInt __i; \
939   for (__i=0; __i<nc; __i++) {\
940     if (cols[__i] >= __end) onz[row - __rstart]++; \
941     else if (cols[__i] >= row) dnz[row - __rstart]++;\
942   }\
943 }
944 
945 /*MC
946    MatPreallocateLocation -  An alternative to MatPreallocationSet() that puts the nonzero locations into the matrix if it exists
947 
948    Synopsis:
949    PetscErrorCode MatPreallocateLocations(Mat A,PetscInt row,PetscInt ncols,PetscInt *cols,PetscInt *dnz,PetscInt *onz)
950 
951    Not Collective
952 
953    Input Parameters:
954 .  A - matrix
955 .  row - row where values exist (must be local to this process)
956 .  ncols - number of columns
957 .  cols - columns with nonzeros
958 .  dnz - the array that will be passed to the matrix preallocation routines
959 -  ozn - the other array passed to the matrix preallocation routines
960 
961 
962    Level: intermediate
963 
964    Notes:
965     See the <A href="../../docs/manual.pdf#nameddest=ch_performance">Hints for Performance Improvment</A> chapter in the users manual for more details.
966 
967    Do not malloc or free dnz and onz that is handled internally by these routines
968 
969    This is a MACRO not a function because it uses a bunch of variables private to the MatPreallocation.... routines.
970 
971   Concepts: preallocation^Matrix
972 
973 .seealso: MatPreallocateInitialize(), MatPreallocateSet(), MatPreallocateSymmetricSet(), MatPreallocateSetLocal(),
974           MatPreallocateSymmetricInitialize(), MatPreallocateSymmetricSetLocal()
975 M*/
976 #define MatPreallocateLocation(A,row,ncols,cols,dnz,onz) 0;if (A) {ierr = MatSetValues(A,1,&row,ncols,cols,PETSC_NULL,INSERT_VALUES);CHKERRQ(ierr);} else {ierr =  MatPreallocateSet(row,ncols,cols,dnz,onz);CHKERRQ(ierr);}
977 
978 
979 /*MC
980    MatPreallocateFinalize - Ends the block of code that will count the number of nonzeros per
981        row in a matrix providing the data that one can use to correctly preallocate the matrix.
982 
983    Synopsis:
984    PetscErrorCode MatPreallocateFinalize(PetscInt *dnz, PetscInt *onz)
985 
986    Collective on MPI_Comm
987 
988    Input Parameters:
989 +  dnz - the array that was be passed to the matrix preallocation routines
990 -  ozn - the other array passed to the matrix preallocation routines
991 
992 
993    Level: intermediate
994 
995    Notes:
996     See the <A href="../../docs/manual.pdf#nameddest=ch_performance">Hints for Performance Improvment</A> chapter in the users manual for more details.
997 
998    Do not malloc or free dnz and onz that is handled internally by these routines
999 
1000    This is a MACRO not a function because it closes the { started in MatPreallocateInitialize().
1001 
1002   Concepts: preallocation^Matrix
1003 
1004 .seealso: MatPreallocateInitialize(), MatPreallocateSet(), MatPreallocateSymmetricSet(), MatPreallocateSetLocal(),
1005           MatPreallocateSymmetricInitialize(), MatPreallocateSymmetricSetLocal()
1006 M*/
1007 #define MatPreallocateFinalize(dnz,onz) 0;_4_ierr = PetscFree2(dnz,onz);CHKERRQ(_4_ierr);}
1008 
1009 
1010 
1011 /* Routines unique to particular data structures */
1012 extern PetscErrorCode  MatShellGetContext(Mat,void **);
1013 PetscPolymorphicFunction(MatShellGetContext,(Mat A),(A,&t),void*,t)
1014 
1015 extern PetscErrorCode  MatInodeAdjustForInodes(Mat,IS*,IS*);
1016 extern PetscErrorCode  MatInodeGetInodeSizes(Mat,PetscInt *,PetscInt *[],PetscInt *);
1017 
1018 extern PetscErrorCode  MatSeqAIJSetColumnIndices(Mat,PetscInt[]);
1019 extern PetscErrorCode  MatSeqBAIJSetColumnIndices(Mat,PetscInt[]);
1020 extern PetscErrorCode  MatCreateSeqAIJWithArrays(MPI_Comm,PetscInt,PetscInt,PetscInt[],PetscInt[],PetscScalar[],Mat*);
1021 extern PetscErrorCode  MatCreateSeqBAIJWithArrays(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt[],PetscInt[],PetscScalar[],Mat*);
1022 extern PetscErrorCode  MatCreateSeqSBAIJWithArrays(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt[],PetscInt[],PetscScalar[],Mat*);
1023 
1024 #define MAT_SKIP_ALLOCATION -4
1025 
1026 extern PetscErrorCode  MatSeqBAIJSetPreallocation(Mat,PetscInt,PetscInt,const PetscInt[]);
1027 PetscPolymorphicSubroutine(MatSeqBAIJSetPreallocation,(Mat A,PetscInt bs,const PetscInt nnz[]),(A,bs,0,nnz))
1028 extern PetscErrorCode  MatSeqSBAIJSetPreallocation(Mat,PetscInt,PetscInt,const PetscInt[]);
1029 PetscPolymorphicSubroutine(MatSeqSBAIJSetPreallocation,(Mat A,PetscInt bs,const PetscInt nnz[]),(A,bs,0,nnz))
1030 extern PetscErrorCode  MatSeqAIJSetPreallocation(Mat,PetscInt,const PetscInt[]);
1031 PetscPolymorphicSubroutine(MatSeqAIJSetPreallocation,(Mat A,const PetscInt nnz[]),(A,0,nnz))
1032 
1033 extern PetscErrorCode  MatMPIBAIJSetPreallocation(Mat,PetscInt,PetscInt,const PetscInt[],PetscInt,const PetscInt[]);
1034 PetscPolymorphicSubroutine(MatMPIBAIJSetPreallocation,(Mat A,PetscInt bs,const PetscInt nnz[],const PetscInt onz[]),(A,bs,0,nnz,0,onz))
1035 extern PetscErrorCode  MatMPISBAIJSetPreallocation(Mat,PetscInt,PetscInt,const PetscInt[],PetscInt,const PetscInt[]);
1036 extern PetscErrorCode  MatMPIAIJSetPreallocation(Mat,PetscInt,const PetscInt[],PetscInt,const PetscInt[]);
1037 extern PetscErrorCode  MatSeqAIJSetPreallocationCSR(Mat,const PetscInt [],const PetscInt [],const PetscScalar []);
1038 extern PetscErrorCode  MatSeqBAIJSetPreallocationCSR(Mat,PetscInt,const PetscInt[],const PetscInt[],const PetscScalar[]);
1039 extern PetscErrorCode  MatMPIAIJSetPreallocationCSR(Mat,const PetscInt[],const PetscInt[],const PetscScalar[]);
1040 extern PetscErrorCode  MatMPIBAIJSetPreallocationCSR(Mat,PetscInt,const PetscInt[],const PetscInt[],const PetscScalar[]);
1041 extern PetscErrorCode  MatMPIAdjSetPreallocation(Mat,PetscInt[],PetscInt[],PetscInt[]);
1042 extern PetscErrorCode  MatMPIDenseSetPreallocation(Mat,PetscScalar[]);
1043 extern PetscErrorCode  MatSeqDenseSetPreallocation(Mat,PetscScalar[]);
1044 extern PetscErrorCode  MatMPIAIJGetSeqAIJ(Mat,Mat*,Mat*,PetscInt*[]);
1045 extern PetscErrorCode  MatMPIBAIJGetSeqBAIJ(Mat,Mat*,Mat*,PetscInt*[]);
1046 extern PetscErrorCode  MatAdicSetLocalFunction(Mat,void (*)(void));
1047 
1048 extern PetscErrorCode  MatSeqDenseSetLDA(Mat,PetscInt);
1049 extern PetscErrorCode  MatDenseGetLocalMatrix(Mat,Mat*);
1050 
1051 extern PetscErrorCode  MatStoreValues(Mat);
1052 extern PetscErrorCode  MatRetrieveValues(Mat);
1053 
1054 extern PetscErrorCode  MatDAADSetCtx(Mat,void*);
1055 
1056 /*
1057   These routines are not usually accessed directly, rather solving is
1058   done through the KSP and PC interfaces.
1059 */
1060 
1061 /*E
1062     MatOrderingType - String with the name of a PETSc matrix ordering or the creation function
1063        with an optional dynamic library name, for example
1064        http://www.mcs.anl.gov/petsc/lib.a:orderingcreate()
1065 
1066    Level: beginner
1067 
1068    Cannot use const because the PC objects manipulate the string
1069 
1070 .seealso: MatGetOrdering()
1071 E*/
1072 #define MatOrderingType char*
1073 #define MATORDERINGNATURAL     "natural"
1074 #define MATORDERINGND          "nd"
1075 #define MATORDERING1WD         "1wd"
1076 #define MATORDERINGRCM         "rcm"
1077 #define MATORDERINGQMD         "qmd"
1078 #define MATORDERINGROWLENGTH   "rowlength"
1079 #define MATORDERINGDSC_ND      "dsc_nd"         /* these three are only for DSCPACK, see its documentation for details */
1080 #define MATORDERINGDSC_MMD     "dsc_mmd"
1081 #define MATORDERINGDSC_MDF     "dsc_mdf"
1082 #define MATORDERINGAMD         "amd"            /* only works if UMFPACK is installed with PETSc */
1083 
1084 extern PetscErrorCode  MatGetOrdering(Mat,const MatOrderingType,IS*,IS*);
1085 extern PetscErrorCode  MatGetOrderingList(PetscFList *list);
1086 extern PetscErrorCode  MatOrderingRegister(const char[],const char[],const char[],PetscErrorCode(*)(Mat,const MatOrderingType,IS*,IS*));
1087 
1088 /*MC
1089    MatOrderingRegisterDynamic - Adds a new sparse matrix ordering to the matrix package.
1090 
1091    Synopsis:
1092    PetscErrorCode MatOrderingRegisterDynamic(const char *name_ordering,const char *path,const char *name_create,PetscErrorCode (*routine_create)(MatOrdering))
1093 
1094    Not Collective
1095 
1096    Input Parameters:
1097 +  sname - name of ordering (for example MATORDERINGND)
1098 .  path - location of library where creation routine is
1099 .  name - name of function that creates the ordering type,a string
1100 -  function - function pointer that creates the ordering
1101 
1102    Level: developer
1103 
1104    If dynamic libraries are used, then the fourth input argument (function)
1105    is ignored.
1106 
1107    Sample usage:
1108 .vb
1109    MatOrderingRegisterDynamic("my_order",/home/username/my_lib/lib/libO/solaris/mylib.a,
1110                "MyOrder",MyOrder);
1111 .ve
1112 
1113    Then, your partitioner can be chosen with the procedural interface via
1114 $     MatOrderingSetType(part,"my_order)
1115    or at runtime via the option
1116 $     -pc_factor_mat_ordering_type my_order
1117 
1118    ${PETSC_ARCH} occuring in pathname will be replaced with appropriate values.
1119 
1120 .keywords: matrix, ordering, register
1121 
1122 .seealso: MatOrderingRegisterDestroy(), MatOrderingRegisterAll()
1123 M*/
1124 #if defined(PETSC_USE_DYNAMIC_LIBRARIES)
1125 #define MatOrderingRegisterDynamic(a,b,c,d) MatOrderingRegister(a,b,c,0)
1126 #else
1127 #define MatOrderingRegisterDynamic(a,b,c,d) MatOrderingRegister(a,b,c,d)
1128 #endif
1129 
1130 extern PetscErrorCode  MatOrderingRegisterDestroy(void);
1131 extern PetscErrorCode  MatOrderingRegisterAll(const char[]);
1132 extern PetscBool  MatOrderingRegisterAllCalled;
1133 extern PetscFList MatOrderingList;
1134 
1135 extern PetscErrorCode  MatReorderForNonzeroDiagonal(Mat,PetscReal,IS,IS);
1136 
1137 /*S
1138     MatFactorShiftType - Numeric Shift.
1139 
1140    Level: beginner
1141 
1142 S*/
1143 typedef enum {MAT_SHIFT_NONE,MAT_SHIFT_NONZERO,MAT_SHIFT_POSITIVE_DEFINITE,MAT_SHIFT_INBLOCKS} MatFactorShiftType;
1144 extern const char *MatFactorShiftTypes[];
1145 
1146 /*S
1147    MatFactorInfo - Data passed into the matrix factorization routines
1148 
1149    In Fortran these are simply double precision arrays of size MAT_FACTORINFO_SIZE, that is use
1150 $     MatFactorInfo  info(MAT_FACTORINFO_SIZE)
1151 
1152    Notes: These are not usually directly used by users, instead use PC type of LU, ILU, CHOLESKY or ICC.
1153 
1154       You can use MatFactorInfoInitialize() to set default values.
1155 
1156    Level: developer
1157 
1158 .seealso: MatLUFactorSymbolic(), MatILUFactorSymbolic(), MatCholeskyFactorSymbolic(), MatICCFactorSymbolic(), MatICCFactor(),
1159           MatFactorInfoInitialize()
1160 
1161 S*/
1162 typedef struct {
1163   PetscReal     diagonal_fill;  /* force diagonal to fill in if initially not filled */
1164   PetscReal     usedt;
1165   PetscReal     dt;             /* drop tolerance */
1166   PetscReal     dtcol;          /* tolerance for pivoting */
1167   PetscReal     dtcount;        /* maximum nonzeros to be allowed per row */
1168   PetscReal     fill;           /* expected fill, nonzeros in factored matrix/nonzeros in original matrix */
1169   PetscReal     levels;         /* ICC/ILU(levels) */
1170   PetscReal     pivotinblocks;  /* for BAIJ and SBAIJ matrices pivot in factorization on blocks, default 1.0
1171                                    factorization may be faster if do not pivot */
1172   PetscReal     zeropivot;      /* pivot is called zero if less than this */
1173   PetscReal     shifttype;      /* type of shift added to matrix factor to prevent zero pivots */
1174   PetscReal     shiftamount;     /* how large the shift is */
1175 } MatFactorInfo;
1176 
1177 extern PetscErrorCode  MatFactorInfoInitialize(MatFactorInfo*);
1178 extern PetscErrorCode  MatCholeskyFactor(Mat,IS,const MatFactorInfo*);
1179 extern PetscErrorCode  MatCholeskyFactorSymbolic(Mat,Mat,IS,const MatFactorInfo*);
1180 extern PetscErrorCode  MatCholeskyFactorNumeric(Mat,Mat,const MatFactorInfo*);
1181 extern PetscErrorCode  MatLUFactor(Mat,IS,IS,const MatFactorInfo*);
1182 extern PetscErrorCode  MatILUFactor(Mat,IS,IS,const MatFactorInfo*);
1183 extern PetscErrorCode  MatLUFactorSymbolic(Mat,Mat,IS,IS,const MatFactorInfo*);
1184 extern PetscErrorCode  MatILUFactorSymbolic(Mat,Mat,IS,IS,const MatFactorInfo*);
1185 extern PetscErrorCode  MatICCFactorSymbolic(Mat,Mat,IS,const MatFactorInfo*);
1186 extern PetscErrorCode  MatICCFactor(Mat,IS,const MatFactorInfo*);
1187 extern PetscErrorCode  MatLUFactorNumeric(Mat,Mat,const MatFactorInfo*);
1188 extern PetscErrorCode  MatGetInertia(Mat,PetscInt*,PetscInt*,PetscInt*);
1189 extern PetscErrorCode  MatSolve(Mat,Vec,Vec);
1190 extern PetscErrorCode  MatForwardSolve(Mat,Vec,Vec);
1191 extern PetscErrorCode  MatBackwardSolve(Mat,Vec,Vec);
1192 extern PetscErrorCode  MatSolveAdd(Mat,Vec,Vec,Vec);
1193 extern PetscErrorCode  MatSolveTranspose(Mat,Vec,Vec);
1194 extern PetscErrorCode  MatSolveTransposeAdd(Mat,Vec,Vec,Vec);
1195 extern PetscErrorCode  MatSolves(Mat,Vecs,Vecs);
1196 
1197 extern PetscErrorCode  MatSetUnfactored(Mat);
1198 
1199 /*E
1200     MatSORType - What type of (S)SOR to perform
1201 
1202     Level: beginner
1203 
1204    May be bitwise ORd together
1205 
1206    Any additions/changes here MUST also be made in include/finclude/petscmat.h
1207 
1208    MatSORType may be bitwise ORd together, so do not change the numbers
1209 
1210 .seealso: MatSOR()
1211 E*/
1212 typedef enum {SOR_FORWARD_SWEEP=1,SOR_BACKWARD_SWEEP=2,SOR_SYMMETRIC_SWEEP=3,
1213               SOR_LOCAL_FORWARD_SWEEP=4,SOR_LOCAL_BACKWARD_SWEEP=8,
1214               SOR_LOCAL_SYMMETRIC_SWEEP=12,SOR_ZERO_INITIAL_GUESS=16,
1215               SOR_EISENSTAT=32,SOR_APPLY_UPPER=64,SOR_APPLY_LOWER=128} MatSORType;
1216 extern PetscErrorCode  MatSOR(Mat,Vec,PetscReal,MatSORType,PetscReal,PetscInt,PetscInt,Vec);
1217 
1218 /*
1219     These routines are for efficiently computing Jacobians via finite differences.
1220 */
1221 
1222 /*E
1223     MatColoringType - String with the name of a PETSc matrix coloring or the creation function
1224        with an optional dynamic library name, for example
1225        http://www.mcs.anl.gov/petsc/lib.a:coloringcreate()
1226 
1227    Level: beginner
1228 
1229 .seealso: MatGetColoring()
1230 E*/
1231 #define MatColoringType char*
1232 #define MATCOLORINGNATURAL "natural"
1233 #define MATCOLORINGSL      "sl"
1234 #define MATCOLORINGLF      "lf"
1235 #define MATCOLORINGID      "id"
1236 
1237 extern PetscErrorCode  MatGetColoring(Mat,const MatColoringType,ISColoring*);
1238 extern PetscErrorCode  MatColoringRegister(const char[],const char[],const char[],PetscErrorCode(*)(Mat,MatColoringType,ISColoring *));
1239 
1240 /*MC
1241    MatColoringRegisterDynamic - Adds a new sparse matrix coloring to the
1242                                matrix package.
1243 
1244    Synopsis:
1245    PetscErrorCode MatColoringRegisterDynamic(const char *name_coloring,const char *path,const char *name_create,PetscErrorCode (*routine_create)(MatColoring))
1246 
1247    Not Collective
1248 
1249    Input Parameters:
1250 +  sname - name of Coloring (for example MATCOLORINGSL)
1251 .  path - location of library where creation routine is
1252 .  name - name of function that creates the Coloring type, a string
1253 -  function - function pointer that creates the coloring
1254 
1255    Level: developer
1256 
1257    If dynamic libraries are used, then the fourth input argument (function)
1258    is ignored.
1259 
1260    Sample usage:
1261 .vb
1262    MatColoringRegisterDynamic("my_color",/home/username/my_lib/lib/libO/solaris/mylib.a,
1263                "MyColor",MyColor);
1264 .ve
1265 
1266    Then, your partitioner can be chosen with the procedural interface via
1267 $     MatColoringSetType(part,"my_color")
1268    or at runtime via the option
1269 $     -mat_coloring_type my_color
1270 
1271    $PETSC_ARCH occuring in pathname will be replaced with appropriate values.
1272 
1273 .keywords: matrix, Coloring, register
1274 
1275 .seealso: MatColoringRegisterDestroy(), MatColoringRegisterAll()
1276 M*/
1277 #if defined(PETSC_USE_DYNAMIC_LIBRARIES)
1278 #define MatColoringRegisterDynamic(a,b,c,d) MatColoringRegister(a,b,c,0)
1279 #else
1280 #define MatColoringRegisterDynamic(a,b,c,d) MatColoringRegister(a,b,c,d)
1281 #endif
1282 
1283 extern PetscBool  MatColoringRegisterAllCalled;
1284 
1285 extern PetscErrorCode  MatColoringRegisterAll(const char[]);
1286 extern PetscErrorCode  MatColoringRegisterDestroy(void);
1287 extern PetscErrorCode  MatColoringPatch(Mat,PetscInt,PetscInt,ISColoringValue[],ISColoring*);
1288 
1289 /*S
1290      MatFDColoring - Object for computing a sparse Jacobian via finite differences
1291         and coloring
1292 
1293    Level: beginner
1294 
1295   Concepts: coloring, sparse Jacobian, finite differences
1296 
1297 .seealso:  MatFDColoringCreate()
1298 S*/
1299 typedef struct _p_MatFDColoring* MatFDColoring;
1300 
1301 extern PetscErrorCode  MatFDColoringCreate(Mat,ISColoring,MatFDColoring *);
1302 extern PetscErrorCode  MatFDColoringDestroy(MatFDColoring);
1303 extern PetscErrorCode  MatFDColoringView(MatFDColoring,PetscViewer);
1304 extern PetscErrorCode  MatFDColoringSetFunction(MatFDColoring,PetscErrorCode (*)(void),void*);
1305 extern PetscErrorCode  MatFDColoringGetFunction(MatFDColoring,PetscErrorCode (**)(void),void**);
1306 extern PetscErrorCode  MatFDColoringSetParameters(MatFDColoring,PetscReal,PetscReal);
1307 extern PetscErrorCode  MatFDColoringSetFromOptions(MatFDColoring);
1308 extern PetscErrorCode  MatFDColoringApply(Mat,MatFDColoring,Vec,MatStructure*,void *);
1309 extern PetscErrorCode  MatFDColoringApplyTS(Mat,MatFDColoring,PetscReal,Vec,MatStructure*,void *);
1310 extern PetscErrorCode  MatFDColoringSetF(MatFDColoring,Vec);
1311 extern PetscErrorCode  MatFDColoringGetPerturbedColumns(MatFDColoring,PetscInt*,PetscInt*[]);
1312 /*
1313     These routines are for partitioning matrices: currently used only
1314   for adjacency matrix, MatCreateMPIAdj().
1315 */
1316 
1317 /*S
1318      MatPartitioning - Object for managing the partitioning of a matrix or graph
1319 
1320    Level: beginner
1321 
1322   Concepts: partitioning
1323 
1324 .seealso:  MatPartitioningCreate(), MatPartitioningType
1325 S*/
1326 typedef struct _p_MatPartitioning* MatPartitioning;
1327 
1328 /*E
1329     MatPartitioningType - String with the name of a PETSc matrix partitioning or the creation function
1330        with an optional dynamic library name, for example
1331        http://www.mcs.anl.gov/petsc/lib.a:partitioningcreate()
1332 
1333    Level: beginner
1334 
1335 .seealso: MatPartitioningCreate(), MatPartitioning
1336 E*/
1337 #define MatPartitioningType char*
1338 #define MATPARTITIONINGCURRENT  "current"
1339 #define MATPARTITIONINGSQUARE   "square"
1340 #define MATPARTITIONINGPARMETIS "parmetis"
1341 #define MATPARTITIONINGCHACO    "chaco"
1342 #define MATPARTITIONINGJOSTLE   "jostle"
1343 #define MATPARTITIONINGPARTY    "party"
1344 #define MATPARTITIONINGSCOTCH   "scotch"
1345 
1346 
1347 extern PetscErrorCode  MatPartitioningCreate(MPI_Comm,MatPartitioning*);
1348 extern PetscErrorCode  MatPartitioningSetType(MatPartitioning,const MatPartitioningType);
1349 extern PetscErrorCode  MatPartitioningSetNParts(MatPartitioning,PetscInt);
1350 extern PetscErrorCode  MatPartitioningSetAdjacency(MatPartitioning,Mat);
1351 extern PetscErrorCode  MatPartitioningSetVertexWeights(MatPartitioning,const PetscInt[]);
1352 extern PetscErrorCode  MatPartitioningSetPartitionWeights(MatPartitioning,const PetscReal []);
1353 extern PetscErrorCode  MatPartitioningApply(MatPartitioning,IS*);
1354 extern PetscErrorCode  MatPartitioningDestroy(MatPartitioning);
1355 
1356 extern PetscErrorCode  MatPartitioningRegister(const char[],const char[],const char[],PetscErrorCode (*)(MatPartitioning));
1357 
1358 /*MC
1359    MatPartitioningRegisterDynamic - Adds a new sparse matrix partitioning to the
1360    matrix package.
1361 
1362    Synopsis:
1363    PetscErrorCode MatPartitioningRegisterDynamic(const char *name_partitioning,const char *path,const char *name_create,PetscErrorCode (*routine_create)(MatPartitioning))
1364 
1365    Not Collective
1366 
1367    Input Parameters:
1368 +  sname - name of partitioning (for example MATPARTITIONINGCURRENT) or parmetis
1369 .  path - location of library where creation routine is
1370 .  name - name of function that creates the partitioning type, a string
1371 -  function - function pointer that creates the partitioning type
1372 
1373    Level: developer
1374 
1375    If dynamic libraries are used, then the fourth input argument (function)
1376    is ignored.
1377 
1378    Sample usage:
1379 .vb
1380    MatPartitioningRegisterDynamic("my_part",/home/username/my_lib/lib/libO/solaris/mylib.a,
1381                "MyPartCreate",MyPartCreate);
1382 .ve
1383 
1384    Then, your partitioner can be chosen with the procedural interface via
1385 $     MatPartitioningSetType(part,"my_part")
1386    or at runtime via the option
1387 $     -mat_partitioning_type my_part
1388 
1389    $PETSC_ARCH occuring in pathname will be replaced with appropriate values.
1390 
1391 .keywords: matrix, partitioning, register
1392 
1393 .seealso: MatPartitioningRegisterDestroy(), MatPartitioningRegisterAll()
1394 M*/
1395 #if defined(PETSC_USE_DYNAMIC_LIBRARIES)
1396 #define MatPartitioningRegisterDynamic(a,b,c,d) MatPartitioningRegister(a,b,c,0)
1397 #else
1398 #define MatPartitioningRegisterDynamic(a,b,c,d) MatPartitioningRegister(a,b,c,d)
1399 #endif
1400 
1401 extern PetscBool  MatPartitioningRegisterAllCalled;
1402 
1403 extern PetscErrorCode  MatPartitioningRegisterAll(const char[]);
1404 extern PetscErrorCode  MatPartitioningRegisterDestroy(void);
1405 
1406 extern PetscErrorCode  MatPartitioningView(MatPartitioning,PetscViewer);
1407 extern PetscErrorCode  MatPartitioningSetFromOptions(MatPartitioning);
1408 extern PetscErrorCode  MatPartitioningGetType(MatPartitioning,const MatPartitioningType*);
1409 
1410 extern PetscErrorCode  MatPartitioningParmetisSetCoarseSequential(MatPartitioning);
1411 extern PetscErrorCode  MatPartitioningParmetisGetEdgeCut(MatPartitioning, PetscInt *);
1412 
1413 extern PetscErrorCode  MatPartitioningJostleSetCoarseLevel(MatPartitioning,PetscReal);
1414 extern PetscErrorCode  MatPartitioningJostleSetCoarseSequential(MatPartitioning);
1415 
1416 typedef enum {MP_CHACO_MULTILEVEL_KL,MP_CHACO_SPECTRAL,MP_CHACO_LINEAR,MP_CHACO_RANDOM, MP_CHACO_SCATTERED} MPChacoGlobalType;
1417 extern PetscErrorCode  MatPartitioningChacoSetGlobal(MatPartitioning, MPChacoGlobalType);
1418 typedef enum { MP_CHACO_KERNIGHAN_LIN, MP_CHACO_NONE } MPChacoLocalType;
1419 extern PetscErrorCode  MatPartitioningChacoSetLocal(MatPartitioning, MPChacoLocalType);
1420 extern PetscErrorCode  MatPartitioningChacoSetCoarseLevel(MatPartitioning,PetscReal);
1421 typedef enum { MP_CHACO_LANCZOS, MP_CHACO_RQI_SYMMLQ } MPChacoEigenType;
1422 extern PetscErrorCode  MatPartitioningChacoSetEigenSolver(MatPartitioning,MPChacoEigenType);
1423 extern PetscErrorCode  MatPartitioningChacoSetEigenTol(MatPartitioning, PetscReal);
1424 extern PetscErrorCode  MatPartitioningChacoSetEigenNumber(MatPartitioning, PetscInt);
1425 
1426 #define MP_PARTY_OPT "opt"
1427 #define MP_PARTY_LIN "lin"
1428 #define MP_PARTY_SCA "sca"
1429 #define MP_PARTY_RAN "ran"
1430 #define MP_PARTY_GBF "gbf"
1431 #define MP_PARTY_GCF "gcf"
1432 #define MP_PARTY_BUB "bub"
1433 #define MP_PARTY_DEF "def"
1434 extern PetscErrorCode  MatPartitioningPartySetGlobal(MatPartitioning, const char*);
1435 #define MP_PARTY_HELPFUL_SETS "hs"
1436 #define MP_PARTY_KERNIGHAN_LIN "kl"
1437 #define MP_PARTY_NONE "no"
1438 extern PetscErrorCode  MatPartitioningPartySetLocal(MatPartitioning, const char*);
1439 extern PetscErrorCode  MatPartitioningPartySetCoarseLevel(MatPartitioning,PetscReal);
1440 extern PetscErrorCode  MatPartitioningPartySetBipart(MatPartitioning,PetscBool );
1441 extern PetscErrorCode  MatPartitioningPartySetMatchOptimization(MatPartitioning,PetscBool );
1442 
1443 typedef enum { MP_SCOTCH_GREEDY, MP_SCOTCH_GPS, MP_SCOTCH_GR_GPS } MPScotchGlobalType;
1444 extern PetscErrorCode  MatPartitioningScotchSetArch(MatPartitioning,const char*);
1445 extern PetscErrorCode  MatPartitioningScotchSetMultilevel(MatPartitioning);
1446 extern PetscErrorCode  MatPartitioningScotchSetGlobal(MatPartitioning,MPScotchGlobalType);
1447 extern PetscErrorCode  MatPartitioningScotchSetCoarseLevel(MatPartitioning,PetscReal);
1448 extern PetscErrorCode  MatPartitioningScotchSetHostList(MatPartitioning,const char*);
1449 typedef enum { MP_SCOTCH_KERNIGHAN_LIN, MP_SCOTCH_NONE } MPScotchLocalType;
1450 extern PetscErrorCode  MatPartitioningScotchSetLocal(MatPartitioning,MPScotchLocalType);
1451 extern PetscErrorCode  MatPartitioningScotchSetMapping(MatPartitioning);
1452 extern PetscErrorCode  MatPartitioningScotchSetStrategy(MatPartitioning,char*);
1453 
1454 extern PetscErrorCode MatMeshToVertexGraph(Mat,PetscInt,Mat*);
1455 extern PetscErrorCode MatMeshToCellGraph(Mat,PetscInt,Mat*);
1456 
1457 /*
1458     If you add entries here you must also add them to finclude/petscmat.h
1459 */
1460 typedef enum { MATOP_SET_VALUES=0,
1461                MATOP_GET_ROW=1,
1462                MATOP_RESTORE_ROW=2,
1463                MATOP_MULT=3,
1464                MATOP_MULT_ADD=4,
1465                MATOP_MULT_TRANSPOSE=5,
1466                MATOP_MULT_TRANSPOSE_ADD=6,
1467                MATOP_SOLVE=7,
1468                MATOP_SOLVE_ADD=8,
1469                MATOP_SOLVE_TRANSPOSE=9,
1470                MATOP_SOLVE_TRANSPOSE_ADD=10,
1471                MATOP_LUFACTOR=11,
1472                MATOP_CHOLESKYFACTOR=12,
1473                MATOP_SOR=13,
1474                MATOP_TRANSPOSE=14,
1475                MATOP_GETINFO=15,
1476                MATOP_EQUAL=16,
1477                MATOP_GET_DIAGONAL=17,
1478                MATOP_DIAGONAL_SCALE=18,
1479                MATOP_NORM=19,
1480                MATOP_ASSEMBLY_BEGIN=20,
1481                MATOP_ASSEMBLY_END=21,
1482                MATOP_SET_OPTION=22,
1483                MATOP_ZERO_ENTRIES=23,
1484                MATOP_ZERO_ROWS=24,
1485                MATOP_LUFACTOR_SYMBOLIC=25,
1486                MATOP_LUFACTOR_NUMERIC=26,
1487                MATOP_CHOLESKY_FACTOR_SYMBOLIC=27,
1488                MATOP_CHOLESKY_FACTOR_NUMERIC=28,
1489                MATOP_SETUP_PREALLOCATION=29,
1490                MATOP_ILUFACTOR_SYMBOLIC=30,
1491                MATOP_ICCFACTOR_SYMBOLIC=31,
1492                MATOP_GET_ARRAY=32,
1493                MATOP_RESTORE_ARRAY=33,
1494                MATOP_DUPLICATE=34,
1495                MATOP_FORWARD_SOLVE=35,
1496                MATOP_BACKWARD_SOLVE=36,
1497                MATOP_ILUFACTOR=37,
1498                MATOP_ICCFACTOR=38,
1499                MATOP_AXPY=39,
1500                MATOP_GET_SUBMATRICES=40,
1501                MATOP_INCREASE_OVERLAP=41,
1502                MATOP_GET_VALUES=42,
1503                MATOP_COPY=43,
1504                MATOP_GET_ROW_MAX=44,
1505                MATOP_SCALE=45,
1506                MATOP_SHIFT=46,
1507                MATOP_DIAGONAL_SET=47,
1508                MATOP_ILUDT_FACTOR=48,
1509                MATOP_SET_BLOCK_SIZE=49,
1510                MATOP_GET_ROW_IJ=50,
1511                MATOP_RESTORE_ROW_IJ=51,
1512                MATOP_GET_COLUMN_IJ=52,
1513                MATOP_RESTORE_COLUMN_IJ=53,
1514                MATOP_FDCOLORING_CREATE=54,
1515                MATOP_COLORING_PATCH=55,
1516                MATOP_SET_UNFACTORED=56,
1517                MATOP_PERMUTE=57,
1518                MATOP_SET_VALUES_BLOCKED=58,
1519                MATOP_GET_SUBMATRIX=59,
1520                MATOP_DESTROY=60,
1521                MATOP_VIEW=61,
1522                MATOP_CONVERT_FROM=62,
1523                MATOP_USE_SCALED_FORM=63,
1524                MATOP_SCALE_SYSTEM=64,
1525                MATOP_UNSCALE_SYSTEM=65,
1526                MATOP_SET_LOCAL_TO_GLOBAL_MAP=66,
1527                MATOP_SET_VALUES_LOCAL=67,
1528                MATOP_ZERO_ROWS_LOCAL=68,
1529                MATOP_GET_ROW_MAX_ABS=69,
1530                MATOP_GET_ROW_MIN_ABS=70,
1531                MATOP_CONVERT=71,
1532                MATOP_SET_COLORING=72,
1533                MATOP_SET_VALUES_ADIC=73,
1534                MATOP_SET_VALUES_ADIFOR=74,
1535                MATOP_FD_COLORING_APPLY=75,
1536                MATOP_SET_FROM_OPTIONS=76,
1537                MATOP_MULT_CON=77,
1538                MATOP_MULT_TRANSPOSE_CON=78,
1539                MATOP_PERMUTE_SPARSIFY=79,
1540                MATOP_MULT_MULTIPLE=80,
1541                MATOP_SOLVE_MULTIPLE=81,
1542                MATOP_GET_INERTIA=82,
1543                MATOP_LOAD=83,
1544                MATOP_IS_SYMMETRIC=84,
1545                MATOP_IS_HERMITIAN=85,
1546                MATOP_IS_STRUCTURALLY_SYMMETRIC=86,
1547                MATOP_DUMMY=87,
1548                MATOP_GET_VECS=88,
1549                MATOP_MAT_MULT=89,
1550                MATOP_MAT_MULT_SYMBOLIC=90,
1551                MATOP_MAT_MULT_NUMERIC=91,
1552                MATOP_PTAP=92,
1553                MATOP_PTAP_SYMBOLIC=93,
1554                MATOP_PTAP_NUMERIC=94,
1555                MATOP_MAT_MULTTRANSPOSE=95,
1556                MATOP_MAT_MULTTRANSPOSE_SYM=96,
1557                MATOP_MAT_MULTTRANSPOSE_NUM=97,
1558                MATOP_PTAP_SYMBOLIC_SEQAIJ=98,
1559                MATOP_PTAP_NUMERIC_SEQAIJ=99,
1560                MATOP_PTAP_SYMBOLIC_MPIAIJ=100,
1561                MATOP_PTAP_NUMERIC_MPIAIJ=101,
1562                MATOP_CONJUGATE=102,
1563                MATOP_SET_SIZES=103,
1564                MATOP_SET_VALUES_ROW=104,
1565                MATOP_REAL_PART=105,
1566                MATOP_IMAG_PART=106,
1567                MATOP_GET_ROW_UTRIANGULAR=107,
1568                MATOP_RESTORE_ROW_UTRIANGULAR=108,
1569                MATOP_MATSOLVE=109,
1570                MATOP_GET_REDUNDANTMATRIX=110,
1571                MATOP_GET_ROW_MIN=111,
1572                MATOP_GET_COLUMN_VEC=112,
1573                MATOP_MISSING_DIAGONAL=113,
1574                MATOP_MATGETSEQNONZEROSTRUCTURE=114,
1575                MATOP_CREATE=115,
1576                MATOP_GET_GHOSTS=116,
1577                MATOP_GET_LOCALSUBMATRIX=117,
1578                MATOP_RESTORE_LOCALSUBMATRIX=118,
1579                MATOP_MULT_DIAGONAL_BLOCK=119,
1580                MATOP_HERMITIANTRANSPOSE=120,
1581                MATOP_MULTHERMITIANTRANSPOSE=121,
1582                MATOP_MULTHERMITIANTRANSPOSEADD=122,
1583                MATOP_GETMULTIPROCBLOCK=123,
1584 	       MATOP_GET_SUBMATRICES_PARALLEL=128
1585              } MatOperation;
1586 extern PetscErrorCode  MatHasOperation(Mat,MatOperation,PetscBool *);
1587 extern PetscErrorCode  MatShellSetOperation(Mat,MatOperation,void(*)(void));
1588 extern PetscErrorCode  MatShellGetOperation(Mat,MatOperation,void(**)(void));
1589 extern PetscErrorCode  MatShellSetContext(Mat,void*);
1590 
1591 /*
1592    Codes for matrices stored on disk. By default they are
1593    stored in a universal format. By changing the format with
1594    PetscViewerSetFormat(viewer,PETSC_VIEWER_NATIVE); the matrices will
1595    be stored in a way natural for the matrix, for example dense matrices
1596    would be stored as dense. Matrices stored this way may only be
1597    read into matrices of the same type.
1598 */
1599 #define MATRIX_BINARY_FORMAT_DENSE -1
1600 
1601 extern PetscErrorCode  MatMPIBAIJSetHashTableFactor(Mat,PetscReal);
1602 extern PetscErrorCode  MatISGetLocalMat(Mat,Mat*);
1603 
1604 /*S
1605      MatNullSpace - Object that removes a null space from a vector, i.e.
1606          orthogonalizes the vector to a subsapce
1607 
1608    Level: advanced
1609 
1610   Concepts: matrix; linear operator, null space
1611 
1612   Users manual sections:
1613 .   sec_singular
1614 
1615 .seealso:  MatNullSpaceCreate()
1616 S*/
1617 typedef struct _p_MatNullSpace* MatNullSpace;
1618 
1619 extern PetscErrorCode  MatNullSpaceCreate(MPI_Comm,PetscBool ,PetscInt,const Vec[],MatNullSpace*);
1620 extern PetscErrorCode  MatNullSpaceSetFunction(MatNullSpace,PetscErrorCode (*)(MatNullSpace,Vec,void*),void*);
1621 extern PetscErrorCode  MatNullSpaceDestroy(MatNullSpace);
1622 extern PetscErrorCode  MatNullSpaceRemove(MatNullSpace,Vec,Vec*);
1623 extern PetscErrorCode  MatNullSpaceAttach(Mat,MatNullSpace);
1624 extern PetscErrorCode  MatNullSpaceTest(MatNullSpace,Mat,PetscBool  *);
1625 
1626 extern PetscErrorCode  MatReorderingSeqSBAIJ(Mat,IS);
1627 extern PetscErrorCode  MatMPISBAIJSetHashTableFactor(Mat,PetscReal);
1628 extern PetscErrorCode  MatSeqSBAIJSetColumnIndices(Mat,PetscInt *);
1629 extern PetscErrorCode  MatSeqBAIJInvertBlockDiagonal(Mat);
1630 
1631 extern PetscErrorCode  MatCreateMAIJ(Mat,PetscInt,Mat*);
1632 extern PetscErrorCode  MatMAIJRedimension(Mat,PetscInt,Mat*);
1633 extern PetscErrorCode  MatMAIJGetAIJ(Mat,Mat*);
1634 
1635 extern PetscErrorCode  MatComputeExplicitOperator(Mat,Mat*);
1636 
1637 extern PetscErrorCode  MatDiagonalScaleLocal(Mat,Vec);
1638 
1639 extern PetscErrorCode  MatCreateMFFD(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,Mat*);
1640 extern PetscErrorCode  MatMFFDSetBase(Mat,Vec,Vec);
1641 extern PetscErrorCode  MatMFFDSetFunction(Mat,PetscErrorCode(*)(void*,Vec,Vec),void*);
1642 extern PetscErrorCode  MatMFFDSetFunctioni(Mat,PetscErrorCode (*)(void*,PetscInt,Vec,PetscScalar*));
1643 extern PetscErrorCode  MatMFFDSetFunctioniBase(Mat,PetscErrorCode (*)(void*,Vec));
1644 extern PetscErrorCode  MatMFFDAddNullSpace(Mat,MatNullSpace);
1645 extern PetscErrorCode  MatMFFDSetHHistory(Mat,PetscScalar[],PetscInt);
1646 extern PetscErrorCode  MatMFFDResetHHistory(Mat);
1647 extern PetscErrorCode  MatMFFDSetFunctionError(Mat,PetscReal);
1648 extern PetscErrorCode  MatMFFDSetPeriod(Mat,PetscInt);
1649 extern PetscErrorCode  MatMFFDGetH(Mat,PetscScalar *);
1650 extern PetscErrorCode  MatMFFDSetOptionsPrefix(Mat,const char[]);
1651 extern PetscErrorCode  MatMFFDSetFromOptions(Mat);
1652 extern PetscErrorCode  MatMFFDCheckPositivity(void*,Vec,Vec,PetscScalar*);
1653 extern PetscErrorCode  MatMFFDSetCheckh(Mat,PetscErrorCode (*)(void*,Vec,Vec,PetscScalar*),void*);
1654 
1655 /*S
1656     MatMFFD - A data structured used to manage the computation of the h differencing parameter for matrix-free
1657               Jacobian vector products
1658 
1659     Notes: MATMFFD is a specific MatType which uses the MatMFFD data structure
1660 
1661            MatMFFD*() methods actually take the Mat as their first argument. Not a MatMFFD data structure
1662 
1663     Level: developer
1664 
1665 .seealso: MATMFFD, MatCreateMFFD(), MatMFFDSetFuction(), MatMFFDSetType(), MatMFFDRegister()
1666 S*/
1667 typedef struct _p_MatMFFD* MatMFFD;
1668 
1669 /*E
1670     MatMFFDType - algorithm used to compute the h used in computing matrix-vector products via differencing of the function
1671 
1672    Level: beginner
1673 
1674 .seealso: MatMFFDSetType(), MatMFFDRegister()
1675 E*/
1676 #define MatMFFDType char*
1677 #define MATMFFD_DS  "ds"
1678 #define MATMFFD_WP  "wp"
1679 
1680 extern PetscErrorCode  MatMFFDSetType(Mat,const MatMFFDType);
1681 extern PetscErrorCode  MatMFFDRegister(const char[],const char[],const char[],PetscErrorCode (*)(MatMFFD));
1682 
1683 /*MC
1684    MatMFFDRegisterDynamic - Adds a method to the MatMFFD registry.
1685 
1686    Synopsis:
1687    PetscErrorCode MatMFFDRegisterDynamic(const char *name_solver,const char *path,const char *name_create,PetscErrorCode (*routine_create)(MatMFFD))
1688 
1689    Not Collective
1690 
1691    Input Parameters:
1692 +  name_solver - name of a new user-defined compute-h module
1693 .  path - path (either absolute or relative) the library containing this solver
1694 .  name_create - name of routine to create method context
1695 -  routine_create - routine to create method context
1696 
1697    Level: developer
1698 
1699    Notes:
1700    MatMFFDRegisterDynamic() may be called multiple times to add several user-defined solvers.
1701 
1702    If dynamic libraries are used, then the fourth input argument (routine_create)
1703    is ignored.
1704 
1705    Sample usage:
1706 .vb
1707    MatMFFDRegisterDynamic("my_h",/home/username/my_lib/lib/libO/solaris/mylib.a,
1708                "MyHCreate",MyHCreate);
1709 .ve
1710 
1711    Then, your solver can be chosen with the procedural interface via
1712 $     MatMFFDSetType(mfctx,"my_h")
1713    or at runtime via the option
1714 $     -snes_mf_type my_h
1715 
1716 .keywords: MatMFFD, register
1717 
1718 .seealso: MatMFFDRegisterAll(), MatMFFDRegisterDestroy()
1719 M*/
1720 #if defined(PETSC_USE_DYNAMIC_LIBRARIES)
1721 #define MatMFFDRegisterDynamic(a,b,c,d) MatMFFDRegister(a,b,c,0)
1722 #else
1723 #define MatMFFDRegisterDynamic(a,b,c,d) MatMFFDRegister(a,b,c,d)
1724 #endif
1725 
1726 extern PetscErrorCode  MatMFFDRegisterAll(const char[]);
1727 extern PetscErrorCode  MatMFFDRegisterDestroy(void);
1728 extern PetscErrorCode  MatMFFDDSSetUmin(Mat,PetscReal);
1729 extern PetscErrorCode  MatMFFDWPSetComputeNormU(Mat,PetscBool );
1730 
1731 
1732 extern PetscErrorCode  PetscViewerMathematicaPutMatrix(PetscViewer, PetscInt, PetscInt, PetscReal *);
1733 extern PetscErrorCode  PetscViewerMathematicaPutCSRMatrix(PetscViewer, PetscInt, PetscInt, PetscInt *, PetscInt *, PetscReal *);
1734 
1735 /*
1736    PETSc interface to MUMPS
1737 */
1738 #ifdef PETSC_HAVE_MUMPS
1739 extern PetscErrorCode  MatSetMumpsIcntl(Mat,PetscInt,PetscInt);
1740 #endif
1741 
1742 extern PetscErrorCode  MatCreateNest(MPI_Comm comm,PetscInt,const IS[],PetscInt,const IS[],const Mat[],Mat*);
1743 extern PetscErrorCode  MatNestGetSize(Mat A,PetscInt *M,PetscInt *N);
1744 extern PetscErrorCode  MatNestGetSubMats(Mat A,PetscInt *M,PetscInt *N,Mat ***mat);
1745 extern PetscErrorCode  MatNestGetSubMat(Mat A,PetscInt idxm,PetscInt jdxm,Mat *sub);
1746 extern PetscErrorCode  MatNestSetVecType(Mat,const VecType);
1747 
1748 PETSC_EXTERN_CXX_END
1749 #endif
1750