xref: /petsc/include/petscmat.h (revision 77c8aea5b73b50700a790636621daa7beaee9d6e)
1 /*
2      Include file for the matrix component of PETSc
3 */
4 #ifndef __PETSCMAT_H
5 #define __PETSCMAT_H
6 #include "petscvec.h"
7 PETSC_EXTERN_CXX_BEGIN
8 
9 /*S
10      Mat - Abstract PETSc matrix object
11 
12    Level: beginner
13 
14   Concepts: matrix; linear operator
15 
16 .seealso:  MatCreate(), MatType, MatSetType()
17 S*/
18 typedef struct _p_Mat*           Mat;
19 
20 /*J
21     MatType - String with the name of a PETSc matrix or the creation function
22        with an optional dynamic library name, for example
23        http://www.mcs.anl.gov/petsc/lib.a:mymatcreate()
24 
25    Level: beginner
26 
27 .seealso: MatSetType(), Mat, MatSolverPackage
28 J*/
29 #define MatType char*
30 #define MATSAME            "same"
31 #define MATMAIJ            "maij"
32 #define MATSEQMAIJ         "seqmaij"
33 #define MATMPIMAIJ         "mpimaij"
34 #define MATIS              "is"
35 #define MATAIJ             "aij"
36 #define MATSEQAIJ          "seqaij"
37 #define MATSEQAIJPTHREAD   "seqaijpthread"
38 #define MATAIJPTHREAD      "aijpthread"
39 #define MATMPIAIJ          "mpiaij"
40 #define MATAIJCRL          "aijcrl"
41 #define MATSEQAIJCRL       "seqaijcrl"
42 #define MATMPIAIJCRL       "mpiaijcrl"
43 #define MATAIJCUSP         "aijcusp"
44 #define MATSEQAIJCUSP      "seqaijcusp"
45 #define MATMPIAIJCUSP      "mpiaijcusp"
46 #define MATAIJPERM         "aijperm"
47 #define MATSEQAIJPERM      "seqaijperm"
48 #define MATMPIAIJPERM      "mpiaijperm"
49 #define MATSHELL           "shell"
50 #define MATDENSE           "dense"
51 #define MATSEQDENSE        "seqdense"
52 #define MATMPIDENSE        "mpidense"
53 #define MATBAIJ            "baij"
54 #define MATSEQBAIJ         "seqbaij"
55 #define MATMPIBAIJ         "mpibaij"
56 #define MATMPIADJ          "mpiadj"
57 #define MATSBAIJ           "sbaij"
58 #define MATSEQSBAIJ        "seqsbaij"
59 #define MATMPISBAIJ        "mpisbaij"
60 #define MATSEQBSTRM        "seqbstrm"
61 #define MATMPIBSTRM        "mpibstrm"
62 #define MATBSTRM           "bstrm"
63 #define MATSEQSBSTRM       "seqsbstrm"
64 #define MATMPISBSTRM       "mpisbstrm"
65 #define MATSBSTRM          "sbstrm"
66 #define MATDAAD            "daad"
67 #define MATMFFD            "mffd"
68 #define MATNORMAL          "normal"
69 #define MATLRC             "lrc"
70 #define MATSCATTER         "scatter"
71 #define MATBLOCKMAT        "blockmat"
72 #define MATCOMPOSITE       "composite"
73 #define MATFFT             "fft"
74 #define MATFFTW            "fftw"
75 #define MATSEQCUFFT        "seqcufft"
76 #define MATTRANSPOSEMAT    "transpose"
77 #define MATSCHURCOMPLEMENT "schurcomplement"
78 #define MATPYTHON          "python"
79 #define MATHYPRESTRUCT     "hyprestruct"
80 #define MATHYPRESSTRUCT    "hypresstruct"
81 #define MATSUBMATRIX       "submatrix"
82 #define MATLOCALREF        "localref"
83 #define MATNEST            "nest"
84 #define MATIJ              "ij"
85 
86 /*J
87     MatSolverPackage - String with the name of a PETSc matrix solver type.
88 
89     For example: "petsc" indicates what PETSc provides, "superlu" indicates either
90        SuperLU or SuperLU_Dist etc.
91 
92 
93    Level: beginner
94 
95 .seealso: MatGetFactor(), Mat, MatSetType(), MatType
96 J*/
97 #define MatSolverPackage char*
98 #define MATSOLVERSPOOLES      "spooles"
99 #define MATSOLVERSUPERLU      "superlu"
100 #define MATSOLVERSUPERLU_DIST "superlu_dist"
101 #define MATSOLVERUMFPACK      "umfpack"
102 #define MATSOLVERCHOLMOD      "cholmod"
103 #define MATSOLVERESSL         "essl"
104 #define MATSOLVERLUSOL        "lusol"
105 #define MATSOLVERMUMPS        "mumps"
106 #define MATSOLVERPASTIX       "pastix"
107 #define MATSOLVERMATLAB       "matlab"
108 #define MATSOLVERPETSC        "petsc"
109 #define MATSOLVERPLAPACK      "plapack"
110 #define MATSOLVERBAS          "bas"
111 
112 #define MATSOLVERBSTRM        "bstrm"
113 #define MATSOLVERSBSTRM       "sbstrm"
114 
115 /*E
116     MatFactorType - indicates what type of factorization is requested
117 
118     Level: beginner
119 
120    Any additions/changes here MUST also be made in include/finclude/petscmat.h
121 
122 .seealso: MatSolverPackage, MatGetFactor()
123 E*/
124 typedef enum {MAT_FACTOR_NONE, MAT_FACTOR_LU, MAT_FACTOR_CHOLESKY, MAT_FACTOR_ILU, MAT_FACTOR_ICC,MAT_FACTOR_ILUDT} MatFactorType;
125 extern const char *const MatFactorTypes[];
126 
127 extern PetscErrorCode  MatGetFactor(Mat,const MatSolverPackage,MatFactorType,Mat*);
128 extern PetscErrorCode  MatGetFactorAvailable(Mat,const MatSolverPackage,MatFactorType,PetscBool *);
129 extern PetscErrorCode  MatFactorGetSolverPackage(Mat,const MatSolverPackage*);
130 extern PetscErrorCode  MatGetFactorType(Mat,MatFactorType*);
131 
132 /* Logging support */
133 #define    MAT_FILE_CLASSID 1211216    /* used to indicate matrices in binary files */
134 extern PetscClassId  MAT_CLASSID;
135 extern PetscClassId  MAT_FDCOLORING_CLASSID;
136 extern PetscClassId  MAT_TRANSPOSECOLORING_CLASSID;
137 extern PetscClassId  MAT_PARTITIONING_CLASSID;
138 extern PetscClassId  MAT_COARSEN_CLASSID;
139 extern PetscClassId  MAT_NULLSPACE_CLASSID;
140 extern PetscClassId  MATMFFD_CLASSID;
141 
142 /*E
143     MatReuse - Indicates if matrices obtained from a previous call to MatGetSubMatrices()
144      or MatGetSubMatrix() are to be reused to store the new matrix values. For MatConvert() is used to indicate
145      that the input matrix is to be replaced with the converted matrix.
146 
147     Level: beginner
148 
149    Any additions/changes here MUST also be made in include/finclude/petscmat.h
150 
151 .seealso: MatGetSubMatrices(), MatGetSubMatrix(), MatDestroyMatrices(), MatConvert()
152 E*/
153 typedef enum {MAT_INITIAL_MATRIX,MAT_REUSE_MATRIX,MAT_IGNORE_MATRIX} MatReuse;
154 
155 /*E
156     MatGetSubMatrixOption - Indicates if matrices obtained from a call to MatGetSubMatrices()
157      include the matrix values. Currently it is only used by MatGetSeqNonzerostructure().
158 
159     Level: beginner
160 
161 .seealso: MatGetSeqNonzerostructure()
162 E*/
163 typedef enum {MAT_DO_NOT_GET_VALUES,MAT_GET_VALUES} MatGetSubMatrixOption;
164 
165 extern PetscErrorCode  MatInitializePackage(const char[]);
166 
167 extern PetscErrorCode  MatCreate(MPI_Comm,Mat*);
168 PetscPolymorphicFunction(MatCreate,(MPI_Comm comm),(comm,&A),Mat,A)
169 PetscPolymorphicFunction(MatCreate,(),(PETSC_COMM_WORLD,&A),Mat,A)
170 extern PetscErrorCode  MatSetSizes(Mat,PetscInt,PetscInt,PetscInt,PetscInt);
171 extern PetscErrorCode  MatSetType(Mat,const MatType);
172 extern PetscErrorCode  MatSetFromOptions(Mat);
173 extern PetscErrorCode  MatRegisterAll(const char[]);
174 extern PetscErrorCode  MatRegister(const char[],const char[],const char[],PetscErrorCode(*)(Mat));
175 extern PetscErrorCode  MatRegisterBaseName(const char[],const char[],const char[]);
176 extern PetscErrorCode  MatSetOptionsPrefix(Mat,const char[]);
177 extern PetscErrorCode  MatAppendOptionsPrefix(Mat,const char[]);
178 extern PetscErrorCode  MatGetOptionsPrefix(Mat,const char*[]);
179 
180 /*MC
181    MatRegisterDynamic - Adds a new matrix type
182 
183    Synopsis:
184    PetscErrorCode MatRegisterDynamic(const char *name,const char *path,const char *name_create,PetscErrorCode (*routine_create)(Mat))
185 
186    Not Collective
187 
188    Input Parameters:
189 +  name - name of a new user-defined matrix type
190 .  path - path (either absolute or relative) the library containing this solver
191 .  name_create - name of routine to create method context
192 -  routine_create - routine to create method context
193 
194    Notes:
195    MatRegisterDynamic() may be called multiple times to add several user-defined solvers.
196 
197    If dynamic libraries are used, then the fourth input argument (routine_create)
198    is ignored.
199 
200    Sample usage:
201 .vb
202    MatRegisterDynamic("my_mat",/home/username/my_lib/lib/libO/solaris/mylib.a,
203                "MyMatCreate",MyMatCreate);
204 .ve
205 
206    Then, your solver can be chosen with the procedural interface via
207 $     MatSetType(Mat,"my_mat")
208    or at runtime via the option
209 $     -mat_type my_mat
210 
211    Level: advanced
212 
213    Notes: ${PETSC_ARCH} occuring in pathname will be replaced with appropriate values.
214          If your function is not being put into a shared library then use VecRegister() instead
215 
216 .keywords: Mat, register
217 
218 .seealso: MatRegisterAll(), MatRegisterDestroy()
219 
220 M*/
221 #if defined(PETSC_USE_DYNAMIC_LIBRARIES)
222 #define MatRegisterDynamic(a,b,c,d) MatRegister(a,b,c,0)
223 #else
224 #define MatRegisterDynamic(a,b,c,d) MatRegister(a,b,c,d)
225 #endif
226 
227 extern PetscBool  MatRegisterAllCalled;
228 extern PetscFList MatList;
229 extern PetscFList MatColoringList;
230 extern PetscFList MatPartitioningList;
231 extern PetscFList MatCoarsenList;
232 
233 /*E
234     MatStructure - Indicates if the matrix has the same nonzero structure
235 
236     Level: beginner
237 
238    Any additions/changes here MUST also be made in include/finclude/petscmat.h
239 
240 .seealso: MatCopy(), KSPSetOperators(), PCSetOperators()
241 E*/
242 typedef enum {DIFFERENT_NONZERO_PATTERN,SUBSET_NONZERO_PATTERN,SAME_NONZERO_PATTERN,SAME_PRECONDITIONER} MatStructure;
243 
244 extern PetscErrorCode  MatCreateSeqDense(MPI_Comm,PetscInt,PetscInt,PetscScalar[],Mat*);
245 extern PetscErrorCode  MatCreateDense(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,PetscScalar[],Mat*);
246 extern PetscErrorCode  MatCreateSeqAIJ(MPI_Comm,PetscInt,PetscInt,PetscInt,const PetscInt[],Mat*);
247 PetscPolymorphicFunction(MatCreateSeqAIJ,(PetscInt m,PetscInt n,PetscInt nz,const PetscInt nnz[]),(PETSC_COMM_SELF,m,n,nz,nnz,&A),Mat,A)
248 PetscPolymorphicFunction(MatCreateSeqAIJ,(PetscInt m,PetscInt n,PetscInt nz),(PETSC_COMM_SELF,m,n,nz,PETSC_NULL,&A),Mat,A)
249 PetscPolymorphicFunction(MatCreateSeqAIJ,(PetscInt m,PetscInt n,const PetscInt nnz[]),(PETSC_COMM_SELF,m,n,0,nnz,&A),Mat,A)
250 PetscPolymorphicFunction(MatCreateSeqAIJ,(PetscInt m,PetscInt n),(PETSC_COMM_SELF,m,n,0,PETSC_NULL,&A),Mat,A)
251 PetscPolymorphicSubroutine(MatCreateSeqAIJ,(PetscInt m,PetscInt n,PetscInt nz,Mat *A),(PETSC_COMM_SELF,m,n,nz,PETSC_NULL,A))
252 PetscPolymorphicSubroutine(MatCreateSeqAIJ,(PetscInt m,PetscInt n,const PetscInt nnz[],Mat *A),(PETSC_COMM_SELF,m,n,0,nnz,A))
253 PetscPolymorphicSubroutine(MatCreateSeqAIJ,(PetscInt m,PetscInt n,Mat *A),(PETSC_COMM_SELF,m,n,0,PETSC_NULL,A))
254 extern PetscErrorCode  MatCreateAIJ(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt,const PetscInt[],PetscInt,const PetscInt[],Mat*);
255 PetscPolymorphicFunction(MatCreateAIJ,(MPI_Comm comm,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,const PetscInt nnz[],PetscInt onz,const PetscInt onnz[]),(comm,m,n,M,N,nz,nnz,onz,onnz,&A),Mat,A)
256 PetscPolymorphicFunction(MatCreateAIJ,(MPI_Comm comm,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,PetscInt nnz),(comm,m,n,M,N,nz,PETSC_NULL,nnz,PETSC_NULL,&A),Mat,A)
257 PetscPolymorphicFunction(MatCreateAIJ,(MPI_Comm comm,PetscInt m,PetscInt n,PetscInt M,PetscInt N,const PetscInt nnz[],const PetscInt onz[]),(comm,m,n,M,N,0,nnz,0,onz,&A),Mat,A)
258 PetscPolymorphicFunction(MatCreateAIJ,(MPI_Comm comm,PetscInt m,PetscInt n,PetscInt M,PetscInt N),(comm,m,n,M,N,0,PETSC_NULL,0,PETSC_NULL,&A),Mat,A)
259 PetscPolymorphicSubroutine(MatCreateAIJ,(MPI_Comm comm,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,PetscInt nnz,Mat *A),(comm,m,n,M,N,nz,PETSC_NULL,nnz,PETSC_NULL,A))
260 PetscPolymorphicSubroutine(MatCreateAIJ,(MPI_Comm comm,PetscInt m,PetscInt n,PetscInt M,PetscInt N,const PetscInt nnz[],const PetscInt onz[],Mat *A),(comm,m,n,M,N,0,nnz,0,onz,A))
261 PetscPolymorphicSubroutine(MatCreateAIJ,(MPI_Comm comm,PetscInt m,PetscInt n,PetscInt M,PetscInt N,Mat *A),(comm,m,n,M,N,0,PETSC_NULL,0,PETSC_NULL,A))
262 PetscPolymorphicFunction(MatCreateAIJ,(PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,const PetscInt nnz[],PetscInt onz,const PetscInt onnz[]),(PETSC_COMM_WORLD,m,n,M,N,nz,nnz,onz,onnz,&A),Mat,A)
263 PetscPolymorphicFunction(MatCreateAIJ,(PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,PetscInt nnz),(PETSC_COMM_WORLD,m,n,M,N,nz,PETSC_NULL,nnz,PETSC_NULL,&A),Mat,A)
264 PetscPolymorphicFunction(MatCreateAIJ,(PetscInt m,PetscInt n,PetscInt M,PetscInt N,const PetscInt nnz[],const PetscInt onz[]),(PETSC_COMM_WORLD,m,n,M,N,0,nnz,0,onz,&A),Mat,A)
265 PetscPolymorphicFunction(MatCreateAIJ,(PetscInt m,PetscInt n,PetscInt M,PetscInt N),(PETSC_COMM_WORLD,m,n,M,N,0,PETSC_NULL,0,PETSC_NULL,&A),Mat,A)
266 PetscPolymorphicSubroutine(MatCreateAIJ,(PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,PetscInt nnz,Mat *A),(PETSC_COMM_WORLD,m,n,M,N,nz,PETSC_NULL,nnz,PETSC_NULL,A))
267 PetscPolymorphicSubroutine(MatCreateAIJ,(PetscInt m,PetscInt n,PetscInt M,PetscInt N,const PetscInt nnz[],const PetscInt onz[],Mat *A),(PETSC_COMM_WORLD,m,n,M,N,0,nnz,0,onz,A))
268 PetscPolymorphicSubroutine(MatCreateAIJ,(PetscInt m,PetscInt n,PetscInt M,PetscInt N,Mat *A),(PETSC_COMM_WORLD,m,n,M,N,0,PETSC_NULL,0,PETSC_NULL,A))
269 extern PetscErrorCode  MatCreateMPIAIJWithArrays(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,const PetscInt[],const PetscInt[],const PetscScalar[],Mat *);
270 extern PetscErrorCode  MatCreateMPIAIJWithSplitArrays(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt[],PetscInt[],PetscScalar[],PetscInt[],PetscInt[],PetscScalar[],Mat*);
271 
272 extern PetscErrorCode  MatCreateSeqBAIJ(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,const PetscInt[],Mat*);
273 PetscPolymorphicFunction(MatCreateSeqBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt nz,const PetscInt nnz[]),(PETSC_COMM_SELF,bs,m,n,nz,nnz,&A),Mat,A)
274 PetscPolymorphicFunction(MatCreateSeqBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt nz),(PETSC_COMM_SELF,bs,m,n,nz,PETSC_NULL,&A),Mat,A)
275 PetscPolymorphicFunction(MatCreateSeqBAIJ,(PetscInt bs,PetscInt m,PetscInt n,const PetscInt nnz[]),(PETSC_COMM_SELF,bs,m,n,0,nnz,&A),Mat,A)
276 PetscPolymorphicFunction(MatCreateSeqBAIJ,(PetscInt bs,PetscInt m,PetscInt n),(PETSC_COMM_SELF,bs,m,n,0,PETSC_NULL,&A),Mat,A)
277 PetscPolymorphicSubroutine(MatCreateSeqBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt nz,Mat *A),(PETSC_COMM_SELF,bs,m,n,nz,PETSC_NULL,A))
278 PetscPolymorphicSubroutine(MatCreateSeqBAIJ,(PetscInt bs,PetscInt m,PetscInt n,const PetscInt nnz[],Mat *A),(PETSC_COMM_SELF,bs,m,n,0,nnz,A))
279 PetscPolymorphicSubroutine(MatCreateSeqBAIJ,(PetscInt bs,PetscInt m,PetscInt n,Mat *A),(PETSC_COMM_SELF,bs,m,n,0,PETSC_NULL,A))
280 extern PetscErrorCode  MatCreateBAIJ(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt,const PetscInt[],PetscInt,const PetscInt[],Mat*);
281 PetscPolymorphicFunction(MatCreateBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,const PetscInt nnz[],PetscInt onz,const PetscInt onnz[]),(comm,bs,m,n,M,N,nz,nnz,onz,onnz,&A),Mat,A)
282 PetscPolymorphicFunction(MatCreateBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,PetscInt nnz),(comm,bs,m,n,M,N,nz,PETSC_NULL,nnz,PETSC_NULL,&A),Mat,A)
283 PetscPolymorphicFunction(MatCreateBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,const PetscInt nnz[],const PetscInt onz[]),(comm,bs,m,n,M,N,0,nnz,0,onz,&A),Mat,A)
284 PetscPolymorphicFunction(MatCreateBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N),(comm,bs,m,n,M,N,0,PETSC_NULL,0,PETSC_NULL,&A),Mat,A)
285 PetscPolymorphicSubroutine(MatCreateBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,PetscInt nnz,Mat *A),(comm,bs,m,n,M,N,nz,PETSC_NULL,nnz,PETSC_NULL,A))
286 PetscPolymorphicSubroutine(MatCreateBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,const PetscInt nnz[],const PetscInt onz[],Mat *A),(comm,bs,m,n,M,N,0,nnz,0,onz,A))
287 PetscPolymorphicSubroutine(MatCreateBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,Mat *A),(comm,bs,m,n,M,N,0,PETSC_NULL,0,PETSC_NULL,A))
288 PetscPolymorphicFunction(MatCreateBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,const PetscInt nnz[],PetscInt onz,const PetscInt onnz[]),(PETSC_COMM_WORLD,bs,m,n,M,N,nz,nnz,onz,onnz,&A),Mat,A)
289 PetscPolymorphicFunction(MatCreateBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,PetscInt nnz),(PETSC_COMM_WORLD,bs,m,n,M,N,nz,PETSC_NULL,nnz,PETSC_NULL,&A),Mat,A)
290 PetscPolymorphicFunction(MatCreateBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,const PetscInt nnz[],const PetscInt onz[]),(PETSC_COMM_WORLD,bs,m,n,M,N,0,nnz,0,onz,&A),Mat,A)
291 PetscPolymorphicFunction(MatCreateBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N),(PETSC_COMM_WORLD,bs,m,n,M,N,0,PETSC_NULL,0,PETSC_NULL,&A),Mat,A)
292 PetscPolymorphicSubroutine(MatCreateBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,PetscInt nnz,Mat *A),(PETSC_COMM_WORLD,bs,m,n,M,N,nz,PETSC_NULL,nnz,PETSC_NULL,A))
293 PetscPolymorphicSubroutine(MatCreateBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,const PetscInt nnz[],const PetscInt onz[],Mat *A),(PETSC_COMM_WORLD,bs,m,n,M,N,0,nnz,0,onz,A))
294 PetscPolymorphicSubroutine(MatCreateBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,Mat *A),(PETSC_COMM_WORLD,bs,m,n,M,N,0,PETSC_NULL,0,PETSC_NULL,A))
295 extern PetscErrorCode  MatCreateMPIBAIJWithArrays(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt,const PetscInt[],const PetscInt[],const PetscScalar[],Mat*);
296 
297 extern PetscErrorCode  MatCreateMPIAdj(MPI_Comm,PetscInt,PetscInt,PetscInt[],PetscInt[],PetscInt[],Mat*);
298 extern PetscErrorCode  MatCreateSeqSBAIJ(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,const PetscInt[],Mat*);
299 PetscPolymorphicFunction(MatCreateSeqSBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt nz,const PetscInt nnz[]),(PETSC_COMM_SELF,bs,m,n,nz,nnz,&A),Mat,A)
300 PetscPolymorphicFunction(MatCreateSeqSBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt nz),(PETSC_COMM_SELF,bs,m,n,nz,PETSC_NULL,&A),Mat,A)
301 PetscPolymorphicFunction(MatCreateSeqSBAIJ,(PetscInt bs,PetscInt m,PetscInt n,const PetscInt nnz[]),(PETSC_COMM_SELF,bs,m,n,0,nnz,&A),Mat,A)
302 PetscPolymorphicFunction(MatCreateSeqSBAIJ,(PetscInt bs,PetscInt m,PetscInt n),(PETSC_COMM_SELF,bs,m,n,0,PETSC_NULL,&A),Mat,A)
303 PetscPolymorphicSubroutine(MatCreateSeqSBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt nz,Mat *A),(PETSC_COMM_SELF,bs,m,n,nz,PETSC_NULL,A))
304 PetscPolymorphicSubroutine(MatCreateSeqSBAIJ,(PetscInt bs,PetscInt m,PetscInt n,const PetscInt nnz[],Mat *A),(PETSC_COMM_SELF,bs,m,n,0,nnz,A))
305 PetscPolymorphicSubroutine(MatCreateSeqSBAIJ,(PetscInt bs,PetscInt m,PetscInt n,Mat *A),(PETSC_COMM_SELF,bs,m,n,0,PETSC_NULL,A))
306 
307 extern PetscErrorCode  MatCreateSBAIJ(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt,const PetscInt[],PetscInt,const PetscInt[],Mat*);
308 PetscPolymorphicFunction(MatCreateSBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,const PetscInt nnz[],PetscInt onz,const PetscInt onnz[]),(comm,bs,m,n,M,N,nz,nnz,onz,onnz,&A),Mat,A)
309 PetscPolymorphicFunction(MatCreateSBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,PetscInt nnz),(comm,bs,m,n,M,N,nz,PETSC_NULL,nnz,PETSC_NULL,&A),Mat,A)
310 PetscPolymorphicFunction(MatCreateSBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,const PetscInt nnz[],const PetscInt onz[]),(comm,bs,m,n,M,N,0,nnz,0,onz,&A),Mat,A)
311 PetscPolymorphicFunction(MatCreateSBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N),(comm,bs,m,n,M,N,0,PETSC_NULL,0,PETSC_NULL,&A),Mat,A)
312 PetscPolymorphicSubroutine(MatCreateSBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,PetscInt nnz,Mat *A),(comm,bs,m,n,M,N,nz,PETSC_NULL,nnz,PETSC_NULL,A))
313 PetscPolymorphicSubroutine(MatCreateSBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,const PetscInt nnz[],const PetscInt onz[],Mat *A),(comm,bs,m,n,M,N,0,nnz,0,onz,A))
314 PetscPolymorphicSubroutine(MatCreateSBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,Mat *A),(comm,bs,m,n,M,N,0,PETSC_NULL,0,PETSC_NULL,A))
315 PetscPolymorphicFunction(MatCreateSBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,const PetscInt nnz[],PetscInt onz,const PetscInt onnz[]),(PETSC_COMM_WORLD,bs,m,n,M,N,nz,nnz,onz,onnz,&A),Mat,A)
316 PetscPolymorphicFunction(MatCreateSBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,PetscInt nnz),(PETSC_COMM_WORLD,bs,m,n,M,N,nz,PETSC_NULL,nnz,PETSC_NULL,&A),Mat,A)
317 PetscPolymorphicFunction(MatCreateSBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,const PetscInt nnz[],const PetscInt onz[]),(PETSC_COMM_WORLD,bs,m,n,M,N,0,nnz,0,onz,&A),Mat,A)
318 PetscPolymorphicFunction(MatCreateSBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N),(PETSC_COMM_WORLD,bs,m,n,M,N,0,PETSC_NULL,0,PETSC_NULL,&A),Mat,A)
319 PetscPolymorphicSubroutine(MatCreateSBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,PetscInt nnz,Mat *A),(PETSC_COMM_WORLD,bs,m,n,M,N,nz,PETSC_NULL,nnz,PETSC_NULL,A))
320 PetscPolymorphicSubroutine(MatCreateSBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,const PetscInt nnz[],const PetscInt onz[],Mat *A),(PETSC_COMM_WORLD,bs,m,n,M,N,0,nnz,0,onz,A))
321 PetscPolymorphicSubroutine(MatCreateSBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,Mat *A),(PETSC_COMM_WORLD,bs,m,n,M,N,0,PETSC_NULL,0,PETSC_NULL,A))
322 extern PetscErrorCode  MatCreateMPISBAIJWithArrays(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt,const PetscInt[],const PetscInt[],const PetscScalar[],Mat *);
323 extern PetscErrorCode  MatMPISBAIJSetPreallocationCSR(Mat,PetscInt,const PetscInt[],const PetscInt[],const PetscScalar[]);
324 extern PetscErrorCode  MatXAIJSetPreallocation(Mat,PetscInt,const PetscInt*,const PetscInt*,const PetscInt*,const PetscInt*);
325 
326 extern PetscErrorCode  MatCreateShell(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,void *,Mat*);
327 PetscPolymorphicFunction(MatCreateShell,(MPI_Comm comm,PetscInt m,PetscInt n,PetscInt M,PetscInt N,void *ctx),(comm,m,n,M,N,ctx,&A),Mat,A)
328 PetscPolymorphicFunction(MatCreateShell,(PetscInt m,PetscInt n,PetscInt M,PetscInt N,void *ctx),(PETSC_COMM_WORLD,m,n,M,N,ctx,&A),Mat,A)
329 extern PetscErrorCode  MatCreateAdic(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt,void (*)(void),Mat*);
330 extern PetscErrorCode  MatCreateNormal(Mat,Mat*);
331 PetscPolymorphicFunction(MatCreateNormal,(Mat mat),(mat,&A),Mat,A)
332 extern PetscErrorCode  MatCreateLRC(Mat,Mat,Mat,Mat*);
333 extern PetscErrorCode  MatCreateIS(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,ISLocalToGlobalMapping,Mat*);
334 extern PetscErrorCode  MatCreateSeqAIJCRL(MPI_Comm,PetscInt,PetscInt,PetscInt,const PetscInt[],Mat*);
335 extern PetscErrorCode  MatCreateMPIAIJCRL(MPI_Comm,PetscInt,PetscInt,PetscInt,const PetscInt[],PetscInt,const PetscInt[],Mat*);
336 
337 extern PetscErrorCode  MatCreateSeqBSTRM(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,const PetscInt[],Mat*);
338 extern PetscErrorCode  MatCreateMPIBSTRM(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt,const PetscInt[],PetscInt,const PetscInt[],Mat*);
339 extern PetscErrorCode  MatCreateSeqSBSTRM(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,const PetscInt[],Mat*);
340 extern PetscErrorCode  MatCreateMPISBSTRM(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt,const PetscInt[],PetscInt,const PetscInt[],Mat*);
341 
342 extern PetscErrorCode  MatCreateScatter(MPI_Comm,VecScatter,Mat*);
343 extern PetscErrorCode  MatScatterSetVecScatter(Mat,VecScatter);
344 extern PetscErrorCode  MatScatterGetVecScatter(Mat,VecScatter*);
345 extern PetscErrorCode  MatCreateBlockMat(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt*,Mat*);
346 extern PetscErrorCode  MatCompositeAddMat(Mat,Mat);
347 extern PetscErrorCode  MatCompositeMerge(Mat);
348 extern PetscErrorCode  MatCreateComposite(MPI_Comm,PetscInt,const Mat*,Mat*);
349 typedef enum {MAT_COMPOSITE_ADDITIVE,MAT_COMPOSITE_MULTIPLICATIVE} MatCompositeType;
350 extern PetscErrorCode  MatCompositeSetType(Mat,MatCompositeType);
351 
352 extern PetscErrorCode  MatCreateFFT(MPI_Comm,PetscInt,const PetscInt[],const MatType,Mat*);
353 extern PetscErrorCode  MatCreateSeqCUFFT(MPI_Comm,PetscInt,const PetscInt[],Mat*);
354 
355 extern PetscErrorCode  MatCreateTranspose(Mat,Mat*);
356 extern PetscErrorCode  MatCreateSubMatrix(Mat,IS,IS,Mat*);
357 extern PetscErrorCode  MatSubMatrixUpdate(Mat,Mat,IS,IS);
358 extern PetscErrorCode  MatCreateLocalRef(Mat,IS,IS,Mat*);
359 
360 extern PetscErrorCode  MatPythonSetType(Mat,const char[]);
361 
362 extern PetscErrorCode  MatSetUp(Mat);
363 extern PetscErrorCode  MatDestroy(Mat*);
364 
365 extern PetscErrorCode  MatConjugate(Mat);
366 extern PetscErrorCode  MatRealPart(Mat);
367 extern PetscErrorCode  MatImaginaryPart(Mat);
368 extern PetscErrorCode  MatGetDiagonalBlock(Mat,Mat*);
369 extern PetscErrorCode  MatGetTrace(Mat,PetscScalar*);
370 extern PetscErrorCode  MatInvertBlockDiagonal(Mat,PetscScalar **);
371 
372 /* ------------------------------------------------------------*/
373 extern PetscErrorCode  MatSetValues(Mat,PetscInt,const PetscInt[],PetscInt,const PetscInt[],const PetscScalar[],InsertMode);
374 extern PetscErrorCode  MatSetValuesBlocked(Mat,PetscInt,const PetscInt[],PetscInt,const PetscInt[],const PetscScalar[],InsertMode);
375 extern PetscErrorCode  MatSetValuesRow(Mat,PetscInt,const PetscScalar[]);
376 extern PetscErrorCode  MatSetValuesRowLocal(Mat,PetscInt,const PetscScalar[]);
377 extern PetscErrorCode  MatSetValuesBatch(Mat,PetscInt,PetscInt,PetscInt[],const PetscScalar[]);
378 
379 /*S
380      MatStencil - Data structure (C struct) for storing information about a single row or
381         column of a matrix as indexed on an associated grid.
382 
383    Fortran usage is different, see MatSetValuesStencil() for details.
384 
385    Level: beginner
386 
387   Concepts: matrix; linear operator
388 
389 .seealso:  MatSetValuesStencil(), MatSetStencil(), MatSetValuesBlockedStencil()
390 S*/
391 typedef struct {
392   PetscInt k,j,i,c;
393 } MatStencil;
394 
395 extern PetscErrorCode  MatSetValuesStencil(Mat,PetscInt,const MatStencil[],PetscInt,const MatStencil[],const PetscScalar[],InsertMode);
396 extern PetscErrorCode  MatSetValuesBlockedStencil(Mat,PetscInt,const MatStencil[],PetscInt,const MatStencil[],const PetscScalar[],InsertMode);
397 extern PetscErrorCode  MatSetStencil(Mat,PetscInt,const PetscInt[],const PetscInt[],PetscInt);
398 
399 extern PetscErrorCode  MatSetColoring(Mat,ISColoring);
400 extern PetscErrorCode  MatSetValuesAdic(Mat,void*);
401 extern PetscErrorCode  MatSetValuesAdifor(Mat,PetscInt,void*);
402 
403 /*E
404     MatAssemblyType - Indicates if the matrix is now to be used, or if you plan
405      to continue to add values to it
406 
407     Level: beginner
408 
409 .seealso: MatAssemblyBegin(), MatAssemblyEnd()
410 E*/
411 typedef enum {MAT_FLUSH_ASSEMBLY=1,MAT_FINAL_ASSEMBLY=0} MatAssemblyType;
412 extern PetscErrorCode  MatAssemblyBegin(Mat,MatAssemblyType);
413 extern PetscErrorCode  MatAssemblyEnd(Mat,MatAssemblyType);
414 extern PetscErrorCode  MatAssembled(Mat,PetscBool *);
415 
416 
417 
418 /*E
419     MatOption - Options that may be set for a matrix and its behavior or storage
420 
421     Level: beginner
422 
423    Any additions/changes here MUST also be made in include/finclude/petscmat.h
424 
425 .seealso: MatSetOption()
426 E*/
427 typedef enum {MAT_ROW_ORIENTED,MAT_NEW_NONZERO_LOCATIONS,
428               MAT_SYMMETRIC,
429               MAT_STRUCTURALLY_SYMMETRIC,
430               MAT_NEW_DIAGONALS,MAT_IGNORE_OFF_PROC_ENTRIES,
431               MAT_NEW_NONZERO_LOCATION_ERR,
432               MAT_NEW_NONZERO_ALLOCATION_ERR,MAT_USE_HASH_TABLE,
433               MAT_KEEP_NONZERO_PATTERN,MAT_IGNORE_ZERO_ENTRIES,
434               MAT_USE_INODES,
435               MAT_HERMITIAN,
436               MAT_SYMMETRY_ETERNAL,
437               MAT_CHECK_COMPRESSED_ROW,
438               MAT_IGNORE_LOWER_TRIANGULAR,MAT_ERROR_LOWER_TRIANGULAR,
439               MAT_GETROW_UPPERTRIANGULAR,MAT_UNUSED_NONZERO_LOCATION_ERR,
440               MAT_SPD,MAT_NO_OFF_PROC_ENTRIES,MAT_NO_OFF_PROC_ZERO_ROWS,
441               NUM_MAT_OPTIONS} MatOption;
442 extern const char *MatOptions[];
443 extern PetscErrorCode  MatSetOption(Mat,MatOption,PetscBool );
444 extern PetscErrorCode  MatGetType(Mat,const MatType*);
445 PetscPolymorphicFunction(MatGetType,(Mat mat),(mat,&t),const MatType,t)
446 
447 extern PetscErrorCode  MatGetValues(Mat,PetscInt,const PetscInt[],PetscInt,const PetscInt[],PetscScalar[]);
448 extern PetscErrorCode  MatGetRow(Mat,PetscInt,PetscInt *,const PetscInt *[],const PetscScalar*[]);
449 extern PetscErrorCode  MatRestoreRow(Mat,PetscInt,PetscInt *,const PetscInt *[],const PetscScalar*[]);
450 extern PetscErrorCode  MatGetRowUpperTriangular(Mat);
451 extern PetscErrorCode  MatRestoreRowUpperTriangular(Mat);
452 extern PetscErrorCode  MatGetColumn(Mat,PetscInt,PetscInt *,const PetscInt *[],const PetscScalar*[]);
453 extern PetscErrorCode  MatRestoreColumn(Mat,PetscInt,PetscInt *,const PetscInt *[],const PetscScalar*[]);
454 extern PetscErrorCode  MatGetColumnVector(Mat,Vec,PetscInt);
455 extern PetscErrorCode  MatGetArray(Mat,PetscScalar *[]);
456 PetscPolymorphicFunction(MatGetArray,(Mat mat),(mat,&a),PetscScalar*,a)
457 extern PetscErrorCode  MatRestoreArray(Mat,PetscScalar *[]);
458 extern PetscErrorCode  MatGetBlockSize(Mat,PetscInt *);
459 PetscPolymorphicFunction(MatGetBlockSize,(Mat mat),(mat,&a),PetscInt,a)
460 extern PetscErrorCode  MatSetBlockSize(Mat,PetscInt);
461 extern PetscErrorCode MatSetNThreads(Mat,PetscInt);
462 extern PetscErrorCode MatGetNThreads(Mat,PetscInt*);
463 
464 extern PetscErrorCode  MatMult(Mat,Vec,Vec);
465 extern PetscErrorCode  MatMultDiagonalBlock(Mat,Vec,Vec);
466 extern PetscErrorCode  MatMultAdd(Mat,Vec,Vec,Vec);
467 PetscPolymorphicSubroutine(MatMultAdd,(Mat A,Vec x,Vec y),(A,x,y,y))
468 extern PetscErrorCode  MatMultTranspose(Mat,Vec,Vec);
469 extern PetscErrorCode  MatMultHermitianTranspose(Mat,Vec,Vec);
470 extern PetscErrorCode  MatIsTranspose(Mat,Mat,PetscReal,PetscBool *);
471 PetscPolymorphicFunction(MatIsTranspose,(Mat A,Mat B,PetscReal tol),(A,B,tol,&t),PetscBool ,t)
472 PetscPolymorphicFunction(MatIsTranspose,(Mat A,Mat B),(A,B,0,&t),PetscBool ,t)
473 extern PetscErrorCode  MatIsHermitianTranspose(Mat,Mat,PetscReal,PetscBool *);
474 extern PetscErrorCode  MatMultTransposeAdd(Mat,Vec,Vec,Vec);
475 extern PetscErrorCode  MatMultHermitianTransposeAdd(Mat,Vec,Vec,Vec);
476 PetscPolymorphicSubroutine(MatMultTransposeAdd,(Mat A,Vec x,Vec y),(A,x,y,y))
477 extern PetscErrorCode  MatMultConstrained(Mat,Vec,Vec);
478 extern PetscErrorCode  MatMultTransposeConstrained(Mat,Vec,Vec);
479 extern PetscErrorCode  MatMatSolve(Mat,Mat,Mat);
480 
481 /*E
482     MatDuplicateOption - Indicates if a duplicated sparse matrix should have
483   its numerical values copied over or just its nonzero structure.
484 
485     Level: beginner
486 
487    Any additions/changes here MUST also be made in include/finclude/petscmat.h
488 
489 $   MAT_SHARE_NONZERO_PATTERN - the i and j arrays in the new matrix will be shared with the original matrix
490 $                               this also triggers the MAT_DO_NOT_COPY_VALUES option. This is used when you
491 $                               have several matrices with the same nonzero pattern.
492 
493 .seealso: MatDuplicate()
494 E*/
495 typedef enum {MAT_DO_NOT_COPY_VALUES,MAT_COPY_VALUES,MAT_SHARE_NONZERO_PATTERN} MatDuplicateOption;
496 
497 extern PetscErrorCode  MatConvert(Mat,const MatType,MatReuse,Mat*);
498 PetscPolymorphicFunction(MatConvert,(Mat A,const MatType t),(A,t,MAT_INITIAL_MATRIX,&a),Mat,a)
499 extern PetscErrorCode  MatDuplicate(Mat,MatDuplicateOption,Mat*);
500 PetscPolymorphicFunction(MatDuplicate,(Mat A,MatDuplicateOption o),(A,o,&a),Mat,a)
501 PetscPolymorphicFunction(MatDuplicate,(Mat A),(A,MAT_COPY_VALUES,&a),Mat,a)
502 
503 
504 extern PetscErrorCode  MatCopy(Mat,Mat,MatStructure);
505 extern PetscErrorCode  MatView(Mat,PetscViewer);
506 extern PetscErrorCode  MatIsSymmetric(Mat,PetscReal,PetscBool *);
507 PetscPolymorphicFunction(MatIsSymmetric,(Mat A,PetscReal tol),(A,tol,&t),PetscBool ,t)
508 PetscPolymorphicFunction(MatIsSymmetric,(Mat A),(A,0,&t),PetscBool ,t)
509 extern PetscErrorCode  MatIsStructurallySymmetric(Mat,PetscBool *);
510 PetscPolymorphicFunction(MatIsStructurallySymmetric,(Mat A),(A,&t),PetscBool ,t)
511 extern PetscErrorCode  MatIsHermitian(Mat,PetscReal,PetscBool *);
512 PetscPolymorphicFunction(MatIsHermitian,(Mat A),(A,0,&t),PetscBool ,t)
513 extern PetscErrorCode  MatIsSymmetricKnown(Mat,PetscBool *,PetscBool *);
514 extern PetscErrorCode  MatIsHermitianKnown(Mat,PetscBool *,PetscBool *);
515 extern PetscErrorCode  MatMissingDiagonal(Mat,PetscBool  *,PetscInt *);
516 extern PetscErrorCode  MatLoad(Mat, PetscViewer);
517 
518 extern PetscErrorCode  MatGetRowIJ(Mat,PetscInt,PetscBool ,PetscBool ,PetscInt*,PetscInt *[],PetscInt *[],PetscBool  *);
519 extern PetscErrorCode  MatRestoreRowIJ(Mat,PetscInt,PetscBool ,PetscBool ,PetscInt *,PetscInt *[],PetscInt *[],PetscBool  *);
520 extern PetscErrorCode  MatGetColumnIJ(Mat,PetscInt,PetscBool ,PetscBool ,PetscInt*,PetscInt *[],PetscInt *[],PetscBool  *);
521 extern PetscErrorCode  MatRestoreColumnIJ(Mat,PetscInt,PetscBool ,PetscBool ,PetscInt *,PetscInt *[],PetscInt *[],PetscBool  *);
522 
523 /*S
524      MatInfo - Context of matrix information, used with MatGetInfo()
525 
526    In Fortran this is simply a double precision array of dimension MAT_INFO_SIZE
527 
528    Level: intermediate
529 
530   Concepts: matrix^nonzero information
531 
532 .seealso:  MatGetInfo(), MatInfoType
533 S*/
534 typedef struct {
535   PetscLogDouble block_size;                         /* block size */
536   PetscLogDouble nz_allocated,nz_used,nz_unneeded;   /* number of nonzeros */
537   PetscLogDouble memory;                             /* memory allocated */
538   PetscLogDouble assemblies;                         /* number of matrix assemblies called */
539   PetscLogDouble mallocs;                            /* number of mallocs during MatSetValues() */
540   PetscLogDouble fill_ratio_given,fill_ratio_needed; /* fill ratio for LU/ILU */
541   PetscLogDouble factor_mallocs;                     /* number of mallocs during factorization */
542 } MatInfo;
543 
544 /*E
545     MatInfoType - Indicates if you want information about the local part of the matrix,
546      the entire parallel matrix or the maximum over all the local parts.
547 
548     Level: beginner
549 
550    Any additions/changes here MUST also be made in include/finclude/petscmat.h
551 
552 .seealso: MatGetInfo(), MatInfo
553 E*/
554 typedef enum {MAT_LOCAL=1,MAT_GLOBAL_MAX=2,MAT_GLOBAL_SUM=3} MatInfoType;
555 extern PetscErrorCode  MatGetInfo(Mat,MatInfoType,MatInfo*);
556 extern PetscErrorCode  MatGetDiagonal(Mat,Vec);
557 extern PetscErrorCode  MatGetRowMax(Mat,Vec,PetscInt[]);
558 extern PetscErrorCode  MatGetRowMin(Mat,Vec,PetscInt[]);
559 extern PetscErrorCode  MatGetRowMaxAbs(Mat,Vec,PetscInt[]);
560 extern PetscErrorCode  MatGetRowMinAbs(Mat,Vec,PetscInt[]);
561 extern PetscErrorCode  MatGetRowSum(Mat,Vec);
562 extern PetscErrorCode  MatTranspose(Mat,MatReuse,Mat*);
563 PetscPolymorphicFunction(MatTranspose,(Mat A),(A,MAT_INITIAL_MATRIX,&t),Mat,t)
564 extern PetscErrorCode  MatHermitianTranspose(Mat,MatReuse,Mat*);
565 extern PetscErrorCode  MatPermute(Mat,IS,IS,Mat *);
566 PetscPolymorphicFunction(MatPermute,(Mat A,IS is1,IS is2),(A,is1,is2,&t),Mat,t)
567 extern PetscErrorCode  MatDiagonalScale(Mat,Vec,Vec);
568 extern PetscErrorCode  MatDiagonalSet(Mat,Vec,InsertMode);
569 extern PetscErrorCode  MatEqual(Mat,Mat,PetscBool *);
570 PetscPolymorphicFunction(MatEqual,(Mat A,Mat B),(A,B,&t),PetscBool ,t)
571 extern PetscErrorCode  MatMultEqual(Mat,Mat,PetscInt,PetscBool *);
572 extern PetscErrorCode  MatMultAddEqual(Mat,Mat,PetscInt,PetscBool *);
573 extern PetscErrorCode  MatMultTransposeEqual(Mat,Mat,PetscInt,PetscBool *);
574 extern PetscErrorCode  MatMultTransposeAddEqual(Mat,Mat,PetscInt,PetscBool *);
575 
576 extern PetscErrorCode  MatNorm(Mat,NormType,PetscReal *);
577 PetscPolymorphicFunction(MatNorm,(Mat A,NormType t),(A,t,&n),PetscReal,n)
578 extern PetscErrorCode  MatGetColumnNorms(Mat,NormType,PetscReal *);
579 extern PetscErrorCode  MatZeroEntries(Mat);
580 extern PetscErrorCode  MatZeroRows(Mat,PetscInt,const PetscInt [],PetscScalar,Vec,Vec);
581 extern PetscErrorCode  MatZeroRowsIS(Mat,IS,PetscScalar,Vec,Vec);
582 extern PetscErrorCode  MatZeroRowsStencil(Mat,PetscInt,const MatStencil [],PetscScalar,Vec,Vec);
583 extern PetscErrorCode  MatZeroRowsColumnsStencil(Mat,PetscInt,const MatStencil[],PetscScalar,Vec,Vec);
584 extern PetscErrorCode  MatZeroRowsColumns(Mat,PetscInt,const PetscInt [],PetscScalar,Vec,Vec);
585 extern PetscErrorCode  MatZeroRowsColumnsIS(Mat,IS,PetscScalar,Vec,Vec);
586 
587 extern PetscErrorCode  MatUseScaledForm(Mat,PetscBool );
588 extern PetscErrorCode  MatScaleSystem(Mat,Vec,Vec);
589 extern PetscErrorCode  MatUnScaleSystem(Mat,Vec,Vec);
590 
591 extern PetscErrorCode  MatGetSize(Mat,PetscInt*,PetscInt*);
592 extern PetscErrorCode  MatGetLocalSize(Mat,PetscInt*,PetscInt*);
593 extern PetscErrorCode  MatGetOwnershipRange(Mat,PetscInt*,PetscInt*);
594 extern PetscErrorCode  MatGetOwnershipRanges(Mat,const PetscInt**);
595 extern PetscErrorCode  MatGetOwnershipRangeColumn(Mat,PetscInt*,PetscInt*);
596 extern PetscErrorCode  MatGetOwnershipRangesColumn(Mat,const PetscInt**);
597 
598 extern PetscErrorCode  MatGetSubMatrices(Mat,PetscInt,const IS[],const IS[],MatReuse,Mat *[]);
599 extern PetscErrorCode  MatGetSubMatricesParallel(Mat,PetscInt,const IS[],const IS[],MatReuse,Mat *[]);
600 extern PetscErrorCode  MatDestroyMatrices(PetscInt,Mat *[]);
601 extern PetscErrorCode  MatGetSubMatrix(Mat,IS,IS,MatReuse,Mat *);
602 extern PetscErrorCode  MatGetLocalSubMatrix(Mat,IS,IS,Mat*);
603 extern PetscErrorCode  MatRestoreLocalSubMatrix(Mat,IS,IS,Mat*);
604 extern PetscErrorCode  MatGetSeqNonzeroStructure(Mat,Mat*);
605 extern PetscErrorCode  MatDestroySeqNonzeroStructure(Mat*);
606 
607 extern PetscErrorCode  MatMerge(MPI_Comm,Mat,PetscInt,MatReuse,Mat*);
608 extern PetscErrorCode  MatMergeSymbolic(MPI_Comm,Mat,PetscInt,Mat*);
609 extern PetscErrorCode  MatMergeNumeric(MPI_Comm,Mat,PetscInt,Mat);
610 extern PetscErrorCode  MatMerge_SeqsToMPI(MPI_Comm,Mat,PetscInt,PetscInt,MatReuse,Mat*);
611 extern PetscErrorCode  MatMerge_SeqsToMPISymbolic(MPI_Comm,Mat,PetscInt,PetscInt,Mat*);
612 extern PetscErrorCode  MatMerge_SeqsToMPINumeric(Mat,Mat);
613 extern PetscErrorCode  MatMPIAIJGetLocalMat(Mat,MatReuse,Mat*);
614 extern PetscErrorCode  MatMPIAIJGetLocalMatCondensed(Mat,MatReuse,IS*,IS*,Mat*);
615 extern PetscErrorCode  MatGetBrowsOfAcols(Mat,Mat,MatReuse,IS*,IS*,Mat*);
616 #if defined (PETSC_USE_CTABLE)
617 extern PetscErrorCode  MatGetCommunicationStructs(Mat, Vec *, PetscTable *, VecScatter *);
618 #else
619 extern PetscErrorCode  MatGetCommunicationStructs(Mat, Vec *, PetscInt *[], VecScatter *);
620 #endif
621 extern PetscErrorCode  MatGetGhosts(Mat, PetscInt *,const PetscInt *[]);
622 
623 extern PetscErrorCode  MatIncreaseOverlap(Mat,PetscInt,IS[],PetscInt);
624 
625 extern PetscErrorCode  MatMatMult(Mat,Mat,MatReuse,PetscReal,Mat*);
626 extern PetscErrorCode  MatMatMultSymbolic(Mat,Mat,PetscReal,Mat*);
627 extern PetscErrorCode  MatMatMultNumeric(Mat,Mat,Mat);
628 
629 extern PetscErrorCode  MatPtAP(Mat,Mat,MatReuse,PetscReal,Mat*);
630 extern PetscErrorCode  MatPtAPSymbolic(Mat,Mat,PetscReal,Mat*);
631 extern PetscErrorCode  MatPtAPNumeric(Mat,Mat,Mat);
632 extern PetscErrorCode  MatRARt(Mat,Mat,MatReuse,PetscReal,Mat*);
633 extern PetscErrorCode  MatRARtSymbolic(Mat,Mat,PetscReal,Mat*);
634 extern PetscErrorCode  MatRARtNumeric(Mat,Mat,Mat);
635 
636 extern PetscErrorCode  MatTransposeMatMult(Mat,Mat,MatReuse,PetscReal,Mat*);
637 extern PetscErrorCode  MatTransposetMatMultSymbolic(Mat,Mat,PetscReal,Mat*);
638 extern PetscErrorCode  MatTransposetMatMultNumeric(Mat,Mat,Mat);
639 extern PetscErrorCode  MatMatTransposeMult(Mat,Mat,MatReuse,PetscReal,Mat*);
640 extern PetscErrorCode  MatMatTransposeMultSymbolic(Mat,Mat,PetscReal,Mat*);
641 extern PetscErrorCode  MatMatTransposeMultNumeric(Mat,Mat,Mat);
642 
643 extern PetscErrorCode  MatAXPY(Mat,PetscScalar,Mat,MatStructure);
644 extern PetscErrorCode  MatAYPX(Mat,PetscScalar,Mat,MatStructure);
645 
646 extern PetscErrorCode  MatScale(Mat,PetscScalar);
647 extern PetscErrorCode  MatShift(Mat,PetscScalar);
648 
649 extern PetscErrorCode  MatSetLocalToGlobalMapping(Mat,ISLocalToGlobalMapping,ISLocalToGlobalMapping);
650 extern PetscErrorCode  MatSetLocalToGlobalMappingBlock(Mat,ISLocalToGlobalMapping,ISLocalToGlobalMapping);
651 extern PetscErrorCode  MatGetLocalToGlobalMapping(Mat,ISLocalToGlobalMapping*,ISLocalToGlobalMapping*);
652 extern PetscErrorCode  MatGetLocalToGlobalMappingBlock(Mat,ISLocalToGlobalMapping*,ISLocalToGlobalMapping*);
653 extern PetscErrorCode  MatZeroRowsLocal(Mat,PetscInt,const PetscInt [],PetscScalar,Vec,Vec);
654 extern PetscErrorCode  MatZeroRowsLocalIS(Mat,IS,PetscScalar,Vec,Vec);
655 extern PetscErrorCode  MatZeroRowsColumnsLocal(Mat,PetscInt,const PetscInt [],PetscScalar,Vec,Vec);
656 extern PetscErrorCode  MatZeroRowsColumnsLocalIS(Mat,IS,PetscScalar,Vec,Vec);
657 extern PetscErrorCode  MatSetValuesLocal(Mat,PetscInt,const PetscInt[],PetscInt,const PetscInt[],const PetscScalar[],InsertMode);
658 extern PetscErrorCode  MatSetValuesBlockedLocal(Mat,PetscInt,const PetscInt[],PetscInt,const PetscInt[],const PetscScalar[],InsertMode);
659 
660 extern PetscErrorCode  MatStashSetInitialSize(Mat,PetscInt,PetscInt);
661 extern PetscErrorCode  MatStashGetInfo(Mat,PetscInt*,PetscInt*,PetscInt*,PetscInt*);
662 
663 extern PetscErrorCode  MatInterpolate(Mat,Vec,Vec);
664 extern PetscErrorCode  MatInterpolateAdd(Mat,Vec,Vec,Vec);
665 PetscPolymorphicSubroutine(MatInterpolateAdd,(Mat A,Vec x,Vec y),(A,x,y,y))
666 extern PetscErrorCode  MatRestrict(Mat,Vec,Vec);
667 extern PetscErrorCode  MatGetVecs(Mat,Vec*,Vec*);
668 extern PetscErrorCode  MatGetRedundantMatrix(Mat,PetscInt,MPI_Comm,PetscInt,MatReuse,Mat*);
669 extern PetscErrorCode  MatGetMultiProcBlock(Mat,MPI_Comm,MatReuse,Mat*);
670 extern PetscErrorCode  MatFindZeroDiagonals(Mat,IS*);
671 
672 /*MC
673    MatSetValue - Set a single entry into a matrix.
674 
675    Not collective
676 
677    Input Parameters:
678 +  m - the matrix
679 .  row - the row location of the entry
680 .  col - the column location of the entry
681 .  value - the value to insert
682 -  mode - either INSERT_VALUES or ADD_VALUES
683 
684    Notes:
685    For efficiency one should use MatSetValues() and set several or many
686    values simultaneously if possible.
687 
688    Level: beginner
689 
690 .seealso: MatSetValues(), MatSetValueLocal()
691 M*/
692 PETSC_STATIC_INLINE PetscErrorCode MatSetValue(Mat v,PetscInt i,PetscInt j,PetscScalar va,InsertMode mode) {return MatSetValues(v,1,&i,1,&j,&va,mode);}
693 
694 PETSC_STATIC_INLINE PetscErrorCode MatGetValue(Mat v,PetscInt i,PetscInt j,PetscScalar *va) {return MatGetValues(v,1,&i,1,&j,va);}
695 
696 PETSC_STATIC_INLINE PetscErrorCode MatSetValueLocal(Mat v,PetscInt i,PetscInt j,PetscScalar va,InsertMode mode) {return MatSetValuesLocal(v,1,&i,1,&j,&va,mode);}
697 
698 /*MC
699    MatPreallocateInitialize - Begins the block of code that will count the number of nonzeros per
700        row in a matrix providing the data that one can use to correctly preallocate the matrix.
701 
702    Synopsis:
703    PetscErrorCode MatPreallocateInitialize(MPI_Comm comm, PetscInt nrows, PetscInt ncols, PetscInt *dnz, PetscInt *onz)
704 
705    Collective on MPI_Comm
706 
707    Input Parameters:
708 +  comm - the communicator that will share the eventually allocated matrix
709 .  nrows - the number of LOCAL rows in the matrix
710 -  ncols - the number of LOCAL columns in the matrix
711 
712    Output Parameters:
713 +  dnz - the array that will be passed to the matrix preallocation routines
714 -  ozn - the other array passed to the matrix preallocation routines
715 
716 
717    Level: intermediate
718 
719    Notes:
720     See the <A href="../../docs/manual.pdf#nameddest=ch_performance">Hints for Performance Improvment</A> chapter in the users manual for more details.
721 
722    Do not malloc or free dnz and onz, that is handled internally by these routines
723 
724    Use MatPreallocateInitializeSymmetric() for symmetric matrices (MPISBAIJ matrices)
725 
726    This is a MACRO not a function because it has a leading { that is closed by PetscPreallocateFinalize().
727 
728   Concepts: preallocation^Matrix
729 
730 .seealso: MatPreallocateFinalize(), MatPreallocateSet(), MatPreallocateSymmetricSet(), MatPreallocateSetLocal(),
731           MatPreallocateInitializeSymmetric(), MatPreallocateSymmetricSetLocal()
732 M*/
733 #define MatPreallocateInitialize(comm,nrows,ncols,dnz,onz) 0; \
734 { \
735   PetscErrorCode _4_ierr; PetscInt __nrows = (nrows),__ctmp = (ncols),__rstart,__start,__end; \
736   _4_ierr = PetscMalloc2(__nrows,PetscInt,&dnz,__nrows,PetscInt,&onz);CHKERRQ(_4_ierr); \
737   _4_ierr = PetscMemzero(dnz,__nrows*sizeof(PetscInt));CHKERRQ(_4_ierr);\
738   _4_ierr = PetscMemzero(onz,__nrows*sizeof(PetscInt));CHKERRQ(_4_ierr);\
739   _4_ierr = MPI_Scan(&__ctmp,&__end,1,MPIU_INT,MPI_SUM,comm);CHKERRQ(_4_ierr); __start = __end - __ctmp;\
740   _4_ierr = MPI_Scan(&__nrows,&__rstart,1,MPIU_INT,MPI_SUM,comm);CHKERRQ(_4_ierr); __rstart = __rstart - __nrows;
741 
742 /*MC
743    MatPreallocateSymmetricInitialize - Begins the block of code that will count the number of nonzeros per
744        row in a matrix providing the data that one can use to correctly preallocate the matrix.
745 
746    Synopsis:
747    PetscErrorCode MatPreallocateSymmetricInitialize(MPI_Comm comm, PetscInt nrows, PetscInt ncols, PetscInt *dnz, PetscInt *onz)
748 
749    Collective on MPI_Comm
750 
751    Input Parameters:
752 +  comm - the communicator that will share the eventually allocated matrix
753 .  nrows - the number of LOCAL rows in the matrix
754 -  ncols - the number of LOCAL columns in the matrix
755 
756    Output Parameters:
757 +  dnz - the array that will be passed to the matrix preallocation routines
758 -  ozn - the other array passed to the matrix preallocation routines
759 
760 
761    Level: intermediate
762 
763    Notes:
764     See the <A href="../../docs/manual.pdf#nameddest=ch_performance">Hints for Performance Improvment</A> chapter in the users manual for more details.
765 
766    Do not malloc or free dnz and onz, that is handled internally by these routines
767 
768    This is a MACRO not a function because it has a leading { that is closed by PetscPreallocateFinalize().
769 
770   Concepts: preallocation^Matrix
771 
772 .seealso: MatPreallocateFinalize(), MatPreallocateSet(), MatPreallocateSymmetricSet(), MatPreallocateSetLocal(),
773           MatPreallocateInitialize(), MatPreallocateSymmetricSetLocal()
774 M*/
775 #define MatPreallocateSymmetricInitialize(comm,nrows,ncols,dnz,onz) 0; \
776 { \
777   PetscErrorCode _4_ierr; PetscInt __nrows = (nrows),__ctmp = (ncols),__rstart,__end; \
778   _4_ierr = PetscMalloc2(__nrows,PetscInt,&dnz,__nrows,PetscInt,&onz);CHKERRQ(_4_ierr); \
779   _4_ierr = PetscMemzero(dnz,__nrows*sizeof(PetscInt));CHKERRQ(_4_ierr);\
780   _4_ierr = PetscMemzero(onz,__nrows*sizeof(PetscInt));CHKERRQ(_4_ierr);\
781   _4_ierr = MPI_Scan(&__ctmp,&__end,1,MPIU_INT,MPI_SUM,comm);CHKERRQ(_4_ierr);\
782   _4_ierr = MPI_Scan(&__nrows,&__rstart,1,MPIU_INT,MPI_SUM,comm);CHKERRQ(_4_ierr); __rstart = __rstart - __nrows;
783 
784 /*MC
785    MatPreallocateSetLocal - Indicates the locations (rows and columns) in the matrix where nonzeros will be
786        inserted using a local number of the rows and columns
787 
788    Synopsis:
789    PetscErrorCode MatPreallocateSetLocal(ISLocalToGlobalMappping map,PetscInt nrows, PetscInt *rows,PetscInt ncols, PetscInt *cols,PetscInt *dnz, PetscInt *onz)
790 
791    Not Collective
792 
793    Input Parameters:
794 +  map - the row mapping from local numbering to global numbering
795 .  nrows - the number of rows indicated
796 .  rows - the indices of the rows
797 .  cmap - the column mapping from local to global numbering
798 .  ncols - the number of columns in the matrix
799 .  cols - the columns indicated
800 .  dnz - the array that will be passed to the matrix preallocation routines
801 -  ozn - the other array passed to the matrix preallocation routines
802 
803 
804    Level: intermediate
805 
806    Notes:
807     See the <A href="../../docs/manual.pdf#nameddest=ch_performance">Hints for Performance Improvment</A> chapter in the users manual for more details.
808 
809    Do not malloc or free dnz and onz, that is handled internally by these routines
810 
811   Concepts: preallocation^Matrix
812 
813 .seealso: MatPreallocateFinalize(), MatPreallocateSet(), MatPreallocateSymmetricSet(), MatPreallocateInitialize(),
814           MatPreallocateInitialize(), MatPreallocateSymmetricSetLocal()
815 M*/
816 #define MatPreallocateSetLocal(rmap,nrows,rows,cmap,ncols,cols,dnz,onz) 0; \
817 {\
818   PetscInt __l;\
819   _4_ierr = ISLocalToGlobalMappingApply(rmap,nrows,rows,rows);CHKERRQ(_4_ierr);\
820   _4_ierr = ISLocalToGlobalMappingApply(cmap,ncols,cols,cols);CHKERRQ(_4_ierr);\
821   for (__l=0;__l<nrows;__l++) {\
822     _4_ierr = MatPreallocateSet((rows)[__l],ncols,cols,dnz,onz);CHKERRQ(_4_ierr);\
823   }\
824 }
825 
826 /*MC
827    MatPreallocateSymmetricSetLocal - Indicates the locations (rows and columns) in the matrix where nonzeros will be
828        inserted using a local number of the rows and columns
829 
830    Synopsis:
831    PetscErrorCode MatPreallocateSymmetricSetLocal(ISLocalToGlobalMappping map,PetscInt nrows, PetscInt *rows,PetscInt ncols, PetscInt *cols,PetscInt *dnz, PetscInt *onz)
832 
833    Not Collective
834 
835    Input Parameters:
836 +  map - the mapping between local numbering and global numbering
837 .  nrows - the number of rows indicated
838 .  rows - the indices of the rows
839 .  ncols - the number of columns in the matrix
840 .  cols - the columns indicated
841 .  dnz - the array that will be passed to the matrix preallocation routines
842 -  ozn - the other array passed to the matrix preallocation routines
843 
844 
845    Level: intermediate
846 
847    Notes:
848     See the <A href="../../docs/manual.pdf#nameddest=ch_performance">Hints for Performance Improvment</A> chapter in the users manual for more details.
849 
850    Do not malloc or free dnz and onz that is handled internally by these routines
851 
852   Concepts: preallocation^Matrix
853 
854 .seealso: MatPreallocateFinalize(), MatPreallocateSet(), MatPreallocateSymmetricSet(), MatPreallocateInitialize(),
855           MatPreallocateInitialize(), MatPreallocateSymmetricSetLocal(), MatPreallocateSetLocal()
856 M*/
857 #define MatPreallocateSymmetricSetLocal(map,nrows,rows,ncols,cols,dnz,onz) 0;\
858 {\
859   PetscInt __l;\
860   _4_ierr = ISLocalToGlobalMappingApply(map,nrows,rows,rows);CHKERRQ(_4_ierr);\
861   _4_ierr = ISLocalToGlobalMappingApply(map,ncols,cols,cols);CHKERRQ(_4_ierr);\
862   for (__l=0;__l<nrows;__l++) {\
863     _4_ierr = MatPreallocateSymmetricSet((rows)[__l],ncols,cols,dnz,onz);CHKERRQ(_4_ierr);\
864   }\
865 }
866 
867 /*MC
868    MatPreallocateSet - Indicates the locations (rows and columns) in the matrix where nonzeros will be
869        inserted using a local number of the rows and columns
870 
871    Synopsis:
872    PetscErrorCode MatPreallocateSet(PetscInt nrows, PetscInt *rows,PetscInt ncols, PetscInt *cols,PetscInt *dnz, PetscInt *onz)
873 
874    Not Collective
875 
876    Input Parameters:
877 +  row - the row
878 .  ncols - the number of columns in the matrix
879 -  cols - the columns indicated
880 
881    Output Parameters:
882 +  dnz - the array that will be passed to the matrix preallocation routines
883 -  ozn - the other array passed to the matrix preallocation routines
884 
885 
886    Level: intermediate
887 
888    Notes:
889     See the <A href="../../docs/manual.pdf#nameddest=ch_performance">Hints for Performance Improvment</A> chapter in the users manual for more details.
890 
891    Do not malloc or free dnz and onz that is handled internally by these routines
892 
893    This is a MACRO not a function because it uses variables declared in MatPreallocateInitialize().
894 
895   Concepts: preallocation^Matrix
896 
897 .seealso: MatPreallocateFinalize(), MatPreallocateSet(), MatPreallocateSymmetricSet(), MatPreallocateInitialize(),
898           MatPreallocateInitialize(), MatPreallocateSymmetricSetLocal(), MatPreallocateSetLocal()
899 M*/
900 #define MatPreallocateSet(row,nc,cols,dnz,onz) 0;\
901 { PetscInt __i; \
902   if (row < __rstart) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Trying to set preallocation for row %D less than first local row %D",row,__rstart);\
903   if (row >= __rstart+__nrows) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Trying to set preallocation for row %D greater than last local row %D",row,__rstart+__nrows-1);\
904   for (__i=0; __i<nc; __i++) {\
905     if ((cols)[__i] < __start || (cols)[__i] >= __end) onz[row - __rstart]++; \
906     else dnz[row - __rstart]++;\
907   }\
908 }
909 
910 /*MC
911    MatPreallocateSymmetricSet - Indicates the locations (rows and columns) in the matrix where nonzeros will be
912        inserted using a local number of the rows and columns
913 
914    Synopsis:
915    PetscErrorCode MatPreallocateSymmetricSet(PetscInt nrows, PetscInt *rows,PetscInt ncols, PetscInt *cols,PetscInt *dnz, PetscInt *onz)
916 
917    Not Collective
918 
919    Input Parameters:
920 +  nrows - the number of rows indicated
921 .  rows - the indices of the rows
922 .  ncols - the number of columns in the matrix
923 .  cols - the columns indicated
924 .  dnz - the array that will be passed to the matrix preallocation routines
925 -  ozn - the other array passed to the matrix preallocation routines
926 
927 
928    Level: intermediate
929 
930    Notes:
931     See the <A href="../../docs/manual.pdf#nameddest=ch_performance">Hints for Performance Improvment</A> chapter in the users manual for more details.
932 
933    Do not malloc or free dnz and onz that is handled internally by these routines
934 
935    This is a MACRO not a function because it uses variables declared in MatPreallocateInitialize().
936 
937   Concepts: preallocation^Matrix
938 
939 .seealso: MatPreallocateFinalize(), MatPreallocateSet(), MatPreallocateSymmetricSet(), MatPreallocateInitialize(),
940           MatPreallocateInitialize(), MatPreallocateSymmetricSetLocal(), MatPreallocateSetLocal()
941 M*/
942 #define MatPreallocateSymmetricSet(row,nc,cols,dnz,onz) 0;\
943 { PetscInt __i; \
944   for (__i=0; __i<nc; __i++) {\
945     if (cols[__i] >= __end) onz[row - __rstart]++; \
946     else if (cols[__i] >= row) dnz[row - __rstart]++;\
947   }\
948 }
949 
950 /*MC
951    MatPreallocateLocation -  An alternative to MatPreallocationSet() that puts the nonzero locations into the matrix if it exists
952 
953    Synopsis:
954    PetscErrorCode MatPreallocateLocations(Mat A,PetscInt row,PetscInt ncols,PetscInt *cols,PetscInt *dnz,PetscInt *onz)
955 
956    Not Collective
957 
958    Input Parameters:
959 .  A - matrix
960 .  row - row where values exist (must be local to this process)
961 .  ncols - number of columns
962 .  cols - columns with nonzeros
963 .  dnz - the array that will be passed to the matrix preallocation routines
964 -  ozn - the other array passed to the matrix preallocation routines
965 
966 
967    Level: intermediate
968 
969    Notes:
970     See the <A href="../../docs/manual.pdf#nameddest=ch_performance">Hints for Performance Improvment</A> chapter in the users manual for more details.
971 
972    Do not malloc or free dnz and onz that is handled internally by these routines
973 
974    This is a MACRO not a function because it uses a bunch of variables private to the MatPreallocation.... routines.
975 
976   Concepts: preallocation^Matrix
977 
978 .seealso: MatPreallocateInitialize(), MatPreallocateSet(), MatPreallocateSymmetricSet(), MatPreallocateSetLocal(),
979           MatPreallocateSymmetricInitialize(), MatPreallocateSymmetricSetLocal()
980 M*/
981 #define MatPreallocateLocation(A,row,ncols,cols,dnz,onz) 0;if (A) {ierr = MatSetValues(A,1,&row,ncols,cols,PETSC_NULL,INSERT_VALUES);CHKERRQ(ierr);} else {ierr =  MatPreallocateSet(row,ncols,cols,dnz,onz);CHKERRQ(ierr);}
982 
983 
984 /*MC
985    MatPreallocateFinalize - Ends the block of code that will count the number of nonzeros per
986        row in a matrix providing the data that one can use to correctly preallocate the matrix.
987 
988    Synopsis:
989    PetscErrorCode MatPreallocateFinalize(PetscInt *dnz, PetscInt *onz)
990 
991    Collective on MPI_Comm
992 
993    Input Parameters:
994 +  dnz - the array that was be passed to the matrix preallocation routines
995 -  ozn - the other array passed to the matrix preallocation routines
996 
997 
998    Level: intermediate
999 
1000    Notes:
1001     See the <A href="../../docs/manual.pdf#nameddest=ch_performance">Hints for Performance Improvment</A> chapter in the users manual for more details.
1002 
1003    Do not malloc or free dnz and onz that is handled internally by these routines
1004 
1005    This is a MACRO not a function because it closes the { started in MatPreallocateInitialize().
1006 
1007   Concepts: preallocation^Matrix
1008 
1009 .seealso: MatPreallocateInitialize(), MatPreallocateSet(), MatPreallocateSymmetricSet(), MatPreallocateSetLocal(),
1010           MatPreallocateSymmetricInitialize(), MatPreallocateSymmetricSetLocal()
1011 M*/
1012 #define MatPreallocateFinalize(dnz,onz) 0;_4_ierr = PetscFree2(dnz,onz);CHKERRQ(_4_ierr);}
1013 
1014 
1015 
1016 /* Routines unique to particular data structures */
1017 extern PetscErrorCode  MatShellGetContext(Mat,void *);
1018 PetscPolymorphicFunction(MatShellGetContext,(Mat A),(A,&t),void*,t)
1019 
1020 extern PetscErrorCode  MatInodeAdjustForInodes(Mat,IS*,IS*);
1021 extern PetscErrorCode  MatInodeGetInodeSizes(Mat,PetscInt *,PetscInt *[],PetscInt *);
1022 
1023 extern PetscErrorCode  MatSeqAIJSetColumnIndices(Mat,PetscInt[]);
1024 extern PetscErrorCode  MatSeqBAIJSetColumnIndices(Mat,PetscInt[]);
1025 extern PetscErrorCode  MatCreateSeqAIJWithArrays(MPI_Comm,PetscInt,PetscInt,PetscInt[],PetscInt[],PetscScalar[],Mat*);
1026 extern PetscErrorCode  MatCreateSeqBAIJWithArrays(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt[],PetscInt[],PetscScalar[],Mat*);
1027 extern PetscErrorCode  MatCreateSeqSBAIJWithArrays(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt[],PetscInt[],PetscScalar[],Mat*);
1028 extern PetscErrorCode MatCreateSeqAIJFromTriple(MPI_Comm,PetscInt,PetscInt,PetscInt[],PetscInt[],PetscScalar[],Mat*,PetscInt,PetscBool);
1029 
1030 #define MAT_SKIP_ALLOCATION -4
1031 
1032 extern PetscErrorCode  MatSeqBAIJSetPreallocation(Mat,PetscInt,PetscInt,const PetscInt[]);
1033 PetscPolymorphicSubroutine(MatSeqBAIJSetPreallocation,(Mat A,PetscInt bs,const PetscInt nnz[]),(A,bs,0,nnz))
1034 extern PetscErrorCode  MatSeqSBAIJSetPreallocation(Mat,PetscInt,PetscInt,const PetscInt[]);
1035 PetscPolymorphicSubroutine(MatSeqSBAIJSetPreallocation,(Mat A,PetscInt bs,const PetscInt nnz[]),(A,bs,0,nnz))
1036 extern PetscErrorCode  MatSeqAIJSetPreallocation(Mat,PetscInt,const PetscInt[]);
1037 PetscPolymorphicSubroutine(MatSeqAIJSetPreallocation,(Mat A,const PetscInt nnz[]),(A,0,nnz))
1038 
1039 extern PetscErrorCode  MatMPIBAIJSetPreallocation(Mat,PetscInt,PetscInt,const PetscInt[],PetscInt,const PetscInt[]);
1040 PetscPolymorphicSubroutine(MatMPIBAIJSetPreallocation,(Mat A,PetscInt bs,const PetscInt nnz[],const PetscInt onz[]),(A,bs,0,nnz,0,onz))
1041 extern PetscErrorCode  MatMPISBAIJSetPreallocation(Mat,PetscInt,PetscInt,const PetscInt[],PetscInt,const PetscInt[]);
1042 extern PetscErrorCode  MatMPIAIJSetPreallocation(Mat,PetscInt,const PetscInt[],PetscInt,const PetscInt[]);
1043 extern PetscErrorCode  MatSeqAIJSetPreallocationCSR(Mat,const PetscInt [],const PetscInt [],const PetscScalar []);
1044 extern PetscErrorCode  MatSeqBAIJSetPreallocationCSR(Mat,PetscInt,const PetscInt[],const PetscInt[],const PetscScalar[]);
1045 extern PetscErrorCode  MatMPIAIJSetPreallocationCSR(Mat,const PetscInt[],const PetscInt[],const PetscScalar[]);
1046 extern PetscErrorCode  MatMPIBAIJSetPreallocationCSR(Mat,PetscInt,const PetscInt[],const PetscInt[],const PetscScalar[]);
1047 extern PetscErrorCode  MatMPIAdjSetPreallocation(Mat,PetscInt[],PetscInt[],PetscInt[]);
1048 extern PetscErrorCode  MatMPIDenseSetPreallocation(Mat,PetscScalar[]);
1049 extern PetscErrorCode  MatSeqDenseSetPreallocation(Mat,PetscScalar[]);
1050 extern PetscErrorCode  MatMPIAIJGetSeqAIJ(Mat,Mat*,Mat*,PetscInt*[]);
1051 extern PetscErrorCode  MatMPIBAIJGetSeqBAIJ(Mat,Mat*,Mat*,PetscInt*[]);
1052 extern PetscErrorCode  MatAdicSetLocalFunction(Mat,void (*)(void));
1053 extern PetscErrorCode  MatMPIAdjCreateNonemptySubcommMat(Mat,Mat*);
1054 
1055 extern PetscErrorCode  MatSeqDenseSetLDA(Mat,PetscInt);
1056 extern PetscErrorCode  MatDenseGetLocalMatrix(Mat,Mat*);
1057 
1058 extern PetscErrorCode  MatStoreValues(Mat);
1059 extern PetscErrorCode  MatRetrieveValues(Mat);
1060 
1061 extern PetscErrorCode  MatDAADSetCtx(Mat,void*);
1062 
1063 extern PetscErrorCode  MatFindNonzeroRows(Mat,IS*);
1064 /*
1065   These routines are not usually accessed directly, rather solving is
1066   done through the KSP and PC interfaces.
1067 */
1068 
1069 /*J
1070     MatOrderingType - String with the name of a PETSc matrix ordering or the creation function
1071        with an optional dynamic library name, for example
1072        http://www.mcs.anl.gov/petsc/lib.a:orderingcreate()
1073 
1074    Level: beginner
1075 
1076    Cannot use const because the PC objects manipulate the string
1077 
1078 .seealso: MatGetOrdering()
1079 J*/
1080 #define MatOrderingType char*
1081 #define MATORDERINGNATURAL     "natural"
1082 #define MATORDERINGND          "nd"
1083 #define MATORDERING1WD         "1wd"
1084 #define MATORDERINGRCM         "rcm"
1085 #define MATORDERINGQMD         "qmd"
1086 #define MATORDERINGROWLENGTH   "rowlength"
1087 #define MATORDERINGAMD         "amd"            /* only works if UMFPACK is installed with PETSc */
1088 
1089 extern PetscErrorCode  MatGetOrdering(Mat,const MatOrderingType,IS*,IS*);
1090 extern PetscErrorCode  MatGetOrderingList(PetscFList *list);
1091 extern PetscErrorCode  MatOrderingRegister(const char[],const char[],const char[],PetscErrorCode(*)(Mat,const MatOrderingType,IS*,IS*));
1092 
1093 /*MC
1094    MatOrderingRegisterDynamic - Adds a new sparse matrix ordering to the matrix package.
1095 
1096    Synopsis:
1097    PetscErrorCode MatOrderingRegisterDynamic(const char *name_ordering,const char *path,const char *name_create,PetscErrorCode (*routine_create)(MatOrdering))
1098 
1099    Not Collective
1100 
1101    Input Parameters:
1102 +  sname - name of ordering (for example MATORDERINGND)
1103 .  path - location of library where creation routine is
1104 .  name - name of function that creates the ordering type,a string
1105 -  function - function pointer that creates the ordering
1106 
1107    Level: developer
1108 
1109    If dynamic libraries are used, then the fourth input argument (function)
1110    is ignored.
1111 
1112    Sample usage:
1113 .vb
1114    MatOrderingRegisterDynamic("my_order",/home/username/my_lib/lib/libO/solaris/mylib.a,
1115                "MyOrder",MyOrder);
1116 .ve
1117 
1118    Then, your partitioner can be chosen with the procedural interface via
1119 $     MatOrderingSetType(part,"my_order)
1120    or at runtime via the option
1121 $     -pc_factor_mat_ordering_type my_order
1122 
1123    ${PETSC_ARCH} occuring in pathname will be replaced with appropriate values.
1124 
1125 .keywords: matrix, ordering, register
1126 
1127 .seealso: MatOrderingRegisterDestroy(), MatOrderingRegisterAll()
1128 M*/
1129 #if defined(PETSC_USE_DYNAMIC_LIBRARIES)
1130 #define MatOrderingRegisterDynamic(a,b,c,d) MatOrderingRegister(a,b,c,0)
1131 #else
1132 #define MatOrderingRegisterDynamic(a,b,c,d) MatOrderingRegister(a,b,c,d)
1133 #endif
1134 
1135 extern PetscErrorCode  MatOrderingRegisterDestroy(void);
1136 extern PetscErrorCode  MatOrderingRegisterAll(const char[]);
1137 extern PetscBool  MatOrderingRegisterAllCalled;
1138 extern PetscFList MatOrderingList;
1139 
1140 extern PetscErrorCode  MatReorderForNonzeroDiagonal(Mat,PetscReal,IS,IS);
1141 
1142 /*S
1143     MatFactorShiftType - Numeric Shift.
1144 
1145    Level: beginner
1146 
1147 S*/
1148 typedef enum {MAT_SHIFT_NONE,MAT_SHIFT_NONZERO,MAT_SHIFT_POSITIVE_DEFINITE,MAT_SHIFT_INBLOCKS} MatFactorShiftType;
1149 extern const char *MatFactorShiftTypes[];
1150 
1151 /*S
1152    MatFactorInfo - Data passed into the matrix factorization routines
1153 
1154    In Fortran these are simply double precision arrays of size MAT_FACTORINFO_SIZE, that is use
1155 $     MatFactorInfo  info(MAT_FACTORINFO_SIZE)
1156 
1157    Notes: These are not usually directly used by users, instead use PC type of LU, ILU, CHOLESKY or ICC.
1158 
1159       You can use MatFactorInfoInitialize() to set default values.
1160 
1161    Level: developer
1162 
1163 .seealso: MatLUFactorSymbolic(), MatILUFactorSymbolic(), MatCholeskyFactorSymbolic(), MatICCFactorSymbolic(), MatICCFactor(),
1164           MatFactorInfoInitialize()
1165 
1166 S*/
1167 typedef struct {
1168   PetscReal     diagonal_fill;  /* force diagonal to fill in if initially not filled */
1169   PetscReal     usedt;
1170   PetscReal     dt;             /* drop tolerance */
1171   PetscReal     dtcol;          /* tolerance for pivoting */
1172   PetscReal     dtcount;        /* maximum nonzeros to be allowed per row */
1173   PetscReal     fill;           /* expected fill, nonzeros in factored matrix/nonzeros in original matrix */
1174   PetscReal     levels;         /* ICC/ILU(levels) */
1175   PetscReal     pivotinblocks;  /* for BAIJ and SBAIJ matrices pivot in factorization on blocks, default 1.0
1176                                    factorization may be faster if do not pivot */
1177   PetscReal     zeropivot;      /* pivot is called zero if less than this */
1178   PetscReal     shifttype;      /* type of shift added to matrix factor to prevent zero pivots */
1179   PetscReal     shiftamount;     /* how large the shift is */
1180 } MatFactorInfo;
1181 
1182 extern PetscErrorCode  MatFactorInfoInitialize(MatFactorInfo*);
1183 extern PetscErrorCode  MatCholeskyFactor(Mat,IS,const MatFactorInfo*);
1184 extern PetscErrorCode  MatCholeskyFactorSymbolic(Mat,Mat,IS,const MatFactorInfo*);
1185 extern PetscErrorCode  MatCholeskyFactorNumeric(Mat,Mat,const MatFactorInfo*);
1186 extern PetscErrorCode  MatLUFactor(Mat,IS,IS,const MatFactorInfo*);
1187 extern PetscErrorCode  MatILUFactor(Mat,IS,IS,const MatFactorInfo*);
1188 extern PetscErrorCode  MatLUFactorSymbolic(Mat,Mat,IS,IS,const MatFactorInfo*);
1189 extern PetscErrorCode  MatILUFactorSymbolic(Mat,Mat,IS,IS,const MatFactorInfo*);
1190 extern PetscErrorCode  MatICCFactorSymbolic(Mat,Mat,IS,const MatFactorInfo*);
1191 extern PetscErrorCode  MatICCFactor(Mat,IS,const MatFactorInfo*);
1192 extern PetscErrorCode  MatLUFactorNumeric(Mat,Mat,const MatFactorInfo*);
1193 extern PetscErrorCode  MatGetInertia(Mat,PetscInt*,PetscInt*,PetscInt*);
1194 extern PetscErrorCode  MatSolve(Mat,Vec,Vec);
1195 extern PetscErrorCode  MatForwardSolve(Mat,Vec,Vec);
1196 extern PetscErrorCode  MatBackwardSolve(Mat,Vec,Vec);
1197 extern PetscErrorCode  MatSolveAdd(Mat,Vec,Vec,Vec);
1198 extern PetscErrorCode  MatSolveTranspose(Mat,Vec,Vec);
1199 extern PetscErrorCode  MatSolveTransposeAdd(Mat,Vec,Vec,Vec);
1200 extern PetscErrorCode  MatSolves(Mat,Vecs,Vecs);
1201 
1202 extern PetscErrorCode  MatSetUnfactored(Mat);
1203 
1204 /*E
1205     MatSORType - What type of (S)SOR to perform
1206 
1207     Level: beginner
1208 
1209    May be bitwise ORd together
1210 
1211    Any additions/changes here MUST also be made in include/finclude/petscmat.h
1212 
1213    MatSORType may be bitwise ORd together, so do not change the numbers
1214 
1215 .seealso: MatSOR()
1216 E*/
1217 typedef enum {SOR_FORWARD_SWEEP=1,SOR_BACKWARD_SWEEP=2,SOR_SYMMETRIC_SWEEP=3,
1218               SOR_LOCAL_FORWARD_SWEEP=4,SOR_LOCAL_BACKWARD_SWEEP=8,
1219               SOR_LOCAL_SYMMETRIC_SWEEP=12,SOR_ZERO_INITIAL_GUESS=16,
1220               SOR_EISENSTAT=32,SOR_APPLY_UPPER=64,SOR_APPLY_LOWER=128} MatSORType;
1221 extern PetscErrorCode  MatSOR(Mat,Vec,PetscReal,MatSORType,PetscReal,PetscInt,PetscInt,Vec);
1222 
1223 /*
1224     These routines are for efficiently computing Jacobians via finite differences.
1225 */
1226 
1227 /*J
1228     MatColoringType - String with the name of a PETSc matrix coloring or the creation function
1229        with an optional dynamic library name, for example
1230        http://www.mcs.anl.gov/petsc/lib.a:coloringcreate()
1231 
1232    Level: beginner
1233 
1234 .seealso: MatGetColoring()
1235 J*/
1236 #define MatColoringType char*
1237 #define MATCOLORINGNATURAL "natural"
1238 #define MATCOLORINGSL      "sl"
1239 #define MATCOLORINGLF      "lf"
1240 #define MATCOLORINGID      "id"
1241 
1242 extern PetscErrorCode  MatGetColoring(Mat,const MatColoringType,ISColoring*);
1243 extern PetscErrorCode  MatColoringRegister(const char[],const char[],const char[],PetscErrorCode(*)(Mat,MatColoringType,ISColoring *));
1244 
1245 /*MC
1246    MatColoringRegisterDynamic - Adds a new sparse matrix coloring to the
1247                                matrix package.
1248 
1249    Synopsis:
1250    PetscErrorCode MatColoringRegisterDynamic(const char *name_coloring,const char *path,const char *name_create,PetscErrorCode (*routine_create)(MatColoring))
1251 
1252    Not Collective
1253 
1254    Input Parameters:
1255 +  sname - name of Coloring (for example MATCOLORINGSL)
1256 .  path - location of library where creation routine is
1257 .  name - name of function that creates the Coloring type, a string
1258 -  function - function pointer that creates the coloring
1259 
1260    Level: developer
1261 
1262    If dynamic libraries are used, then the fourth input argument (function)
1263    is ignored.
1264 
1265    Sample usage:
1266 .vb
1267    MatColoringRegisterDynamic("my_color",/home/username/my_lib/lib/libO/solaris/mylib.a,
1268                "MyColor",MyColor);
1269 .ve
1270 
1271    Then, your partitioner can be chosen with the procedural interface via
1272 $     MatColoringSetType(part,"my_color")
1273    or at runtime via the option
1274 $     -mat_coloring_type my_color
1275 
1276    $PETSC_ARCH occuring in pathname will be replaced with appropriate values.
1277 
1278 .keywords: matrix, Coloring, register
1279 
1280 .seealso: MatColoringRegisterDestroy(), MatColoringRegisterAll()
1281 M*/
1282 #if defined(PETSC_USE_DYNAMIC_LIBRARIES)
1283 #define MatColoringRegisterDynamic(a,b,c,d) MatColoringRegister(a,b,c,0)
1284 #else
1285 #define MatColoringRegisterDynamic(a,b,c,d) MatColoringRegister(a,b,c,d)
1286 #endif
1287 
1288 extern PetscBool  MatColoringRegisterAllCalled;
1289 
1290 extern PetscErrorCode  MatColoringRegisterAll(const char[]);
1291 extern PetscErrorCode  MatColoringRegisterDestroy(void);
1292 extern PetscErrorCode  MatColoringPatch(Mat,PetscInt,PetscInt,ISColoringValue[],ISColoring*);
1293 
1294 /*S
1295      MatFDColoring - Object for computing a sparse Jacobian via finite differences
1296         and coloring
1297 
1298    Level: beginner
1299 
1300   Concepts: coloring, sparse Jacobian, finite differences
1301 
1302 .seealso:  MatFDColoringCreate()
1303 S*/
1304 typedef struct _p_MatFDColoring* MatFDColoring;
1305 
1306 extern PetscErrorCode  MatFDColoringCreate(Mat,ISColoring,MatFDColoring *);
1307 extern PetscErrorCode  MatFDColoringDestroy(MatFDColoring*);
1308 extern PetscErrorCode  MatFDColoringView(MatFDColoring,PetscViewer);
1309 extern PetscErrorCode  MatFDColoringSetFunction(MatFDColoring,PetscErrorCode (*)(void),void*);
1310 extern PetscErrorCode  MatFDColoringGetFunction(MatFDColoring,PetscErrorCode (**)(void),void**);
1311 extern PetscErrorCode  MatFDColoringSetParameters(MatFDColoring,PetscReal,PetscReal);
1312 extern PetscErrorCode  MatFDColoringSetFromOptions(MatFDColoring);
1313 extern PetscErrorCode  MatFDColoringApply(Mat,MatFDColoring,Vec,MatStructure*,void *);
1314 extern PetscErrorCode  MatFDColoringSetF(MatFDColoring,Vec);
1315 extern PetscErrorCode  MatFDColoringGetPerturbedColumns(MatFDColoring,PetscInt*,PetscInt*[]);
1316 
1317 /*S
1318      MatTransposeColoring - Object for computing a sparse matrix product C=A*B^T via coloring
1319 
1320    Level: beginner
1321 
1322   Concepts: coloring, sparse matrix product
1323 
1324 .seealso:  MatTransposeColoringCreate()
1325 S*/
1326 typedef struct _p_MatTransposeColoring* MatTransposeColoring;
1327 
1328 extern PetscErrorCode MatTransposeColoringCreate(Mat,ISColoring,MatTransposeColoring *);
1329 extern PetscErrorCode MatTransColoringApplySpToDen(MatTransposeColoring,Mat,Mat);
1330 extern PetscErrorCode MatTransColoringApplyDenToSp(MatTransposeColoring,Mat,Mat);
1331 extern PetscErrorCode MatTransposeColoringDestroy(MatTransposeColoring*);
1332 
1333 /*
1334     These routines are for partitioning matrices: currently used only
1335   for adjacency matrix, MatCreateMPIAdj().
1336 */
1337 
1338 /*S
1339      MatPartitioning - Object for managing the partitioning of a matrix or graph
1340 
1341    Level: beginner
1342 
1343   Concepts: partitioning
1344 
1345 .seealso:  MatPartitioningCreate(), MatPartitioningType
1346 S*/
1347 typedef struct _p_MatPartitioning* MatPartitioning;
1348 
1349 /*J
1350     MatPartitioningType - String with the name of a PETSc matrix partitioning or the creation function
1351        with an optional dynamic library name, for example
1352        http://www.mcs.anl.gov/petsc/lib.a:partitioningcreate()
1353 
1354    Level: beginner
1355 dm
1356 .seealso: MatPartitioningCreate(), MatPartitioning
1357 J*/
1358 #define MatPartitioningType char*
1359 #define MATPARTITIONINGCURRENT  "current"
1360 #define MATPARTITIONINGSQUARE   "square"
1361 #define MATPARTITIONINGPARMETIS "parmetis"
1362 #define MATPARTITIONINGCHACO    "chaco"
1363 #define MATPARTITIONINGPARTY    "party"
1364 #define MATPARTITIONINGPTSCOTCH "ptscotch"
1365 
1366 
1367 extern PetscErrorCode  MatPartitioningCreate(MPI_Comm,MatPartitioning*);
1368 extern PetscErrorCode  MatPartitioningSetType(MatPartitioning,const MatPartitioningType);
1369 extern PetscErrorCode  MatPartitioningSetNParts(MatPartitioning,PetscInt);
1370 extern PetscErrorCode  MatPartitioningSetAdjacency(MatPartitioning,Mat);
1371 extern PetscErrorCode  MatPartitioningSetVertexWeights(MatPartitioning,const PetscInt[]);
1372 extern PetscErrorCode  MatPartitioningSetPartitionWeights(MatPartitioning,const PetscReal []);
1373 extern PetscErrorCode  MatPartitioningApply(MatPartitioning,IS*);
1374 extern PetscErrorCode  MatPartitioningDestroy(MatPartitioning*);
1375 
1376 extern PetscErrorCode  MatPartitioningRegister(const char[],const char[],const char[],PetscErrorCode (*)(MatPartitioning));
1377 
1378 /*MC
1379    MatPartitioningRegisterDynamic - Adds a new sparse matrix partitioning to the
1380    matrix package.
1381 
1382    Synopsis:
1383    PetscErrorCode MatPartitioningRegisterDynamic(const char *name_partitioning,const char *path,const char *name_create,PetscErrorCode (*routine_create)(MatPartitioning))
1384 
1385    Not Collective
1386 
1387    Input Parameters:
1388 +  sname - name of partitioning (for example MATPARTITIONINGCURRENT) or parmetis
1389 .  path - location of library where creation routine is
1390 .  name - name of function that creates the partitioning type, a string
1391 -  function - function pointer that creates the partitioning type
1392 
1393    Level: developer
1394 
1395    If dynamic libraries are used, then the fourth input argument (function)
1396    is ignored.
1397 
1398    Sample usage:
1399 .vb
1400    MatPartitioningRegisterDynamic("my_part",/home/username/my_lib/lib/libO/solaris/mylib.a,
1401                "MyPartCreate",MyPartCreate);
1402 .ve
1403 
1404    Then, your partitioner can be chosen with the procedural interface via
1405 $     MatPartitioningSetType(part,"my_part")
1406    or at runtime via the option
1407 $     -mat_partitioning_type my_part
1408 
1409    $PETSC_ARCH occuring in pathname will be replaced with appropriate values.
1410 
1411 .keywords: matrix, partitioning, register
1412 
1413 .seealso: MatPartitioningRegisterDestroy(), MatPartitioningRegisterAll()
1414 M*/
1415 #if defined(PETSC_USE_DYNAMIC_LIBRARIES)
1416 #define MatPartitioningRegisterDynamic(a,b,c,d) MatPartitioningRegister(a,b,c,0)
1417 #else
1418 #define MatPartitioningRegisterDynamic(a,b,c,d) MatPartitioningRegister(a,b,c,d)
1419 #endif
1420 
1421 extern PetscBool  MatPartitioningRegisterAllCalled;
1422 
1423 extern PetscErrorCode  MatPartitioningRegisterAll(const char[]);
1424 extern PetscErrorCode  MatPartitioningRegisterDestroy(void);
1425 
1426 extern PetscErrorCode  MatPartitioningView(MatPartitioning,PetscViewer);
1427 extern PetscErrorCode  MatPartitioningSetFromOptions(MatPartitioning);
1428 extern PetscErrorCode  MatPartitioningGetType(MatPartitioning,const MatPartitioningType*);
1429 
1430 extern PetscErrorCode  MatPartitioningParmetisSetCoarseSequential(MatPartitioning);
1431 extern PetscErrorCode  MatPartitioningParmetisGetEdgeCut(MatPartitioning, PetscInt *);
1432 
1433 typedef enum { MP_CHACO_MULTILEVEL=1,MP_CHACO_SPECTRAL=2,MP_CHACO_LINEAR=4,MP_CHACO_RANDOM=5,MP_CHACO_SCATTERED=6 } MPChacoGlobalType;
1434 extern const char *MPChacoGlobalTypes[];
1435 typedef enum { MP_CHACO_KERNIGHAN=1,MP_CHACO_NONE=2 } MPChacoLocalType;
1436 extern const char *MPChacoLocalTypes[];
1437 typedef enum { MP_CHACO_LANCZOS=0,MP_CHACO_RQI=1 } MPChacoEigenType;
1438 extern const char *MPChacoEigenTypes[];
1439 
1440 extern PetscErrorCode  MatPartitioningChacoSetGlobal(MatPartitioning,MPChacoGlobalType);
1441 extern PetscErrorCode  MatPartitioningChacoGetGlobal(MatPartitioning,MPChacoGlobalType*);
1442 extern PetscErrorCode  MatPartitioningChacoSetLocal(MatPartitioning,MPChacoLocalType);
1443 extern PetscErrorCode  MatPartitioningChacoGetLocal(MatPartitioning,MPChacoLocalType*);
1444 extern PetscErrorCode  MatPartitioningChacoSetCoarseLevel(MatPartitioning,PetscReal);
1445 extern PetscErrorCode  MatPartitioningChacoSetEigenSolver(MatPartitioning,MPChacoEigenType);
1446 extern PetscErrorCode  MatPartitioningChacoGetEigenSolver(MatPartitioning,MPChacoEigenType*);
1447 extern PetscErrorCode  MatPartitioningChacoSetEigenTol(MatPartitioning,PetscReal);
1448 extern PetscErrorCode  MatPartitioningChacoGetEigenTol(MatPartitioning,PetscReal*);
1449 extern PetscErrorCode  MatPartitioningChacoSetEigenNumber(MatPartitioning,PetscInt);
1450 extern PetscErrorCode  MatPartitioningChacoGetEigenNumber(MatPartitioning,PetscInt*);
1451 
1452 #define MP_PARTY_OPT "opt"
1453 #define MP_PARTY_LIN "lin"
1454 #define MP_PARTY_SCA "sca"
1455 #define MP_PARTY_RAN "ran"
1456 #define MP_PARTY_GBF "gbf"
1457 #define MP_PARTY_GCF "gcf"
1458 #define MP_PARTY_BUB "bub"
1459 #define MP_PARTY_DEF "def"
1460 extern PetscErrorCode  MatPartitioningPartySetGlobal(MatPartitioning,const char*);
1461 #define MP_PARTY_HELPFUL_SETS "hs"
1462 #define MP_PARTY_KERNIGHAN_LIN "kl"
1463 #define MP_PARTY_NONE "no"
1464 extern PetscErrorCode  MatPartitioningPartySetLocal(MatPartitioning,const char*);
1465 extern PetscErrorCode  MatPartitioningPartySetCoarseLevel(MatPartitioning,PetscReal);
1466 extern PetscErrorCode  MatPartitioningPartySetBipart(MatPartitioning,PetscBool);
1467 extern PetscErrorCode  MatPartitioningPartySetMatchOptimization(MatPartitioning,PetscBool);
1468 
1469 typedef enum { MP_PTSCOTCH_QUALITY,MP_PTSCOTCH_SPEED,MP_PTSCOTCH_BALANCE,MP_PTSCOTCH_SAFETY,MP_PTSCOTCH_SCALABILITY } MPPTScotchStrategyType;
1470 extern const char *MPPTScotchStrategyTypes[];
1471 
1472 extern PetscErrorCode MatPartitioningPTScotchSetImbalance(MatPartitioning,PetscReal);
1473 extern PetscErrorCode MatPartitioningPTScotchGetImbalance(MatPartitioning,PetscReal*);
1474 extern PetscErrorCode MatPartitioningPTScotchSetStrategy(MatPartitioning,MPPTScotchStrategyType);
1475 extern PetscErrorCode MatPartitioningPTScotchGetStrategy(MatPartitioning,MPPTScotchStrategyType*);
1476 
1477 /*
1478     These routines are for coarsening matrices:
1479 */
1480 
1481 /*S
1482      MatCoarsen - Object for managing the coarsening of a graph (symmetric matrix)
1483 
1484    Level: beginner
1485 
1486   Concepts: coarsen
1487 
1488 .seealso:  MatCoarsenCreate), MatCoarsenType
1489 S*/
1490 typedef struct _p_MatCoarsen* MatCoarsen;
1491 
1492 /*J
1493     MatCoarsenType - String with the name of a PETSc matrix coarsen or the creation function
1494        with an optional dynamic library name, for example
1495        http://www.mcs.anl.gov/petsc/lib.a:coarsencreate()
1496 
1497    Level: beginner
1498 dm
1499 .seealso: MatCoarsenCreate(), MatCoarsen
1500 J*/
1501 #define MatCoarsenType char*
1502 #define MATCOARSENMIS  "mis"
1503 #define MATCOARSENHEM  "hem"
1504 
1505 /* linked list for aggregates */
1506 typedef struct _LLIntNode{
1507   struct _LLIntNode *next;
1508   PetscInt   gid;
1509 }LLIntNode;
1510 
1511 /* only used by node pool */
1512 typedef struct _LLArrNode{
1513   struct _LLArrNode *next;
1514   struct _LLIntNode *array;
1515 }LLArrNode;
1516 
1517 typedef struct _PetscCoarsenData{
1518   /* node pool */
1519   LLArrNode  pool_list;
1520   LLIntNode *new_node;
1521   PetscInt   new_left;
1522   PetscInt   chk_sz;
1523   LLIntNode *extra_nodes;
1524   /* Array of lists */
1525   LLIntNode **array;
1526   PetscInt size;
1527   /* cache a Mat for communication data */
1528   Mat mat;
1529 }PetscCoarsenData;
1530 
1531 extern PetscErrorCode  MatCoarsenCreate(MPI_Comm,MatCoarsen*);
1532 extern PetscErrorCode  MatCoarsenSetType(MatCoarsen,const MatCoarsenType);
1533 extern PetscErrorCode  MatCoarsenSetAdjacency(MatCoarsen,Mat);
1534 extern PetscErrorCode  MatCoarsenSetGreedyOrdering(MatCoarsen,const IS);
1535 extern PetscErrorCode  MatCoarsenSetStrictAggs(MatCoarsen,PetscBool);
1536 extern PetscErrorCode  MatCoarsenSetVerbose(MatCoarsen,PetscInt);
1537 extern PetscErrorCode  MatCoarsenGetData( MatCoarsen, PetscCoarsenData ** );
1538 extern PetscErrorCode  MatCoarsenApply(MatCoarsen);
1539 extern PetscErrorCode  MatCoarsenDestroy(MatCoarsen*);
1540 
1541 extern PetscErrorCode  MatCoarsenRegister(const char[],const char[],const char[],PetscErrorCode (*)(MatCoarsen));
1542 
1543 /*MC
1544    MatCoarsenRegisterDynamic - Adds a new sparse matrix coarsen to the
1545    matrix package.
1546 
1547    Synopsis:
1548    PetscErrorCode MatCoarsenRegisterDynamic(const char *name_coarsen,const char *path,const char *name_create,PetscErrorCode (*routine_create)(MatCoarsen))
1549 
1550    Not Collective
1551 
1552    Input Parameters:
1553 +  sname - name of coarsen (for example MATCOARSENMIS)
1554 .  path - location of library where creation routine is
1555 .  name - name of function that creates the coarsen type, a string
1556 -  function - function pointer that creates the coarsen type
1557 
1558    Level: developer
1559 
1560    If dynamic libraries are used, then the fourth input argument (function)
1561    is ignored.
1562 
1563    Sample usage:
1564 .vb
1565    MatCoarsenRegisterDynamic("my_agg",/home/username/my_lib/lib/libO/solaris/mylib.a,
1566                "MyAggCreate",MyAggCreate);
1567 .ve
1568 
1569    Then, your aggregator can be chosen with the procedural interface via
1570 $     MatCoarsenSetType(agg,"my_agg")
1571    or at runtime via the option
1572 $     -mat_coarsen_type my_agg
1573 
1574    $PETSC_ARCH occuring in pathname will be replaced with appropriate values.
1575 
1576 .keywords: matrix, coarsen, register
1577 
1578 .seealso: MatCoarsenRegisterDestroy(), MatCoarsenRegisterAll()
1579 M*/
1580 #if defined(PETSC_USE_DYNAMIC_LIBRARIES)
1581 #define MatCoarsenRegisterDynamic(a,b,c,d) MatCoarsenRegister(a,b,c,0)
1582 #else
1583 #define MatCoarsenRegisterDynamic(a,b,c,d) MatCoarsenRegister(a,b,c,d)
1584 #endif
1585 
1586 extern PetscBool  MatCoarsenRegisterAllCalled;
1587 
1588 extern PetscErrorCode  MatCoarsenRegisterAll(const char[]);
1589 extern PetscErrorCode  MatCoarsenRegisterDestroy(void);
1590 
1591 extern PetscErrorCode  MatCoarsenView(MatCoarsen,PetscViewer);
1592 extern PetscErrorCode  MatCoarsenSetFromOptions(MatCoarsen);
1593 extern PetscErrorCode  MatCoarsenGetType(MatCoarsen,const MatCoarsenType*);
1594 
1595 
1596 extern PetscErrorCode MatMeshToVertexGraph(Mat,PetscInt,Mat*);
1597 extern PetscErrorCode MatMeshToCellGraph(Mat,PetscInt,Mat*);
1598 
1599 /*
1600     If you add entries here you must also add them to finclude/petscmat.h
1601 */
1602 typedef enum { MATOP_SET_VALUES=0,
1603                MATOP_GET_ROW=1,
1604                MATOP_RESTORE_ROW=2,
1605                MATOP_MULT=3,
1606                MATOP_MULT_ADD=4,
1607                MATOP_MULT_TRANSPOSE=5,
1608                MATOP_MULT_TRANSPOSE_ADD=6,
1609                MATOP_SOLVE=7,
1610                MATOP_SOLVE_ADD=8,
1611                MATOP_SOLVE_TRANSPOSE=9,
1612                MATOP_SOLVE_TRANSPOSE_ADD=10,
1613                MATOP_LUFACTOR=11,
1614                MATOP_CHOLESKYFACTOR=12,
1615                MATOP_SOR=13,
1616                MATOP_TRANSPOSE=14,
1617                MATOP_GETINFO=15,
1618                MATOP_EQUAL=16,
1619                MATOP_GET_DIAGONAL=17,
1620                MATOP_DIAGONAL_SCALE=18,
1621                MATOP_NORM=19,
1622                MATOP_ASSEMBLY_BEGIN=20,
1623                MATOP_ASSEMBLY_END=21,
1624                MATOP_SET_OPTION=22,
1625                MATOP_ZERO_ENTRIES=23,
1626                MATOP_ZERO_ROWS=24,
1627                MATOP_LUFACTOR_SYMBOLIC=25,
1628                MATOP_LUFACTOR_NUMERIC=26,
1629                MATOP_CHOLESKY_FACTOR_SYMBOLIC=27,
1630                MATOP_CHOLESKY_FACTOR_NUMERIC=28,
1631                MATOP_SETUP_PREALLOCATION=29,
1632                MATOP_ILUFACTOR_SYMBOLIC=30,
1633                MATOP_ICCFACTOR_SYMBOLIC=31,
1634                MATOP_GET_ARRAY=32,
1635                MATOP_RESTORE_ARRAY=33,
1636                MATOP_DUPLICATE=34,
1637                MATOP_FORWARD_SOLVE=35,
1638                MATOP_BACKWARD_SOLVE=36,
1639                MATOP_ILUFACTOR=37,
1640                MATOP_ICCFACTOR=38,
1641                MATOP_AXPY=39,
1642                MATOP_GET_SUBMATRICES=40,
1643                MATOP_INCREASE_OVERLAP=41,
1644                MATOP_GET_VALUES=42,
1645                MATOP_COPY=43,
1646                MATOP_GET_ROW_MAX=44,
1647                MATOP_SCALE=45,
1648                MATOP_SHIFT=46,
1649                MATOP_DIAGONAL_SET=47,
1650                MATOP_ILUDT_FACTOR=48,
1651                MATOP_SET_BLOCK_SIZE=49,
1652                MATOP_GET_ROW_IJ=50,
1653                MATOP_RESTORE_ROW_IJ=51,
1654                MATOP_GET_COLUMN_IJ=52,
1655                MATOP_RESTORE_COLUMN_IJ=53,
1656                MATOP_FDCOLORING_CREATE=54,
1657                MATOP_COLORING_PATCH=55,
1658                MATOP_SET_UNFACTORED=56,
1659                MATOP_PERMUTE=57,
1660                MATOP_SET_VALUES_BLOCKED=58,
1661                MATOP_GET_SUBMATRIX=59,
1662                MATOP_DESTROY=60,
1663                MATOP_VIEW=61,
1664                MATOP_CONVERT_FROM=62,
1665                MATOP_USE_SCALED_FORM=63,
1666                MATOP_SCALE_SYSTEM=64,
1667                MATOP_UNSCALE_SYSTEM=65,
1668                MATOP_SET_LOCAL_TO_GLOBAL_MAP=66,
1669                MATOP_SET_VALUES_LOCAL=67,
1670                MATOP_ZERO_ROWS_LOCAL=68,
1671                MATOP_GET_ROW_MAX_ABS=69,
1672                MATOP_GET_ROW_MIN_ABS=70,
1673                MATOP_CONVERT=71,
1674                MATOP_SET_COLORING=72,
1675                MATOP_SET_VALUES_ADIC=73,
1676                MATOP_SET_VALUES_ADIFOR=74,
1677                MATOP_FD_COLORING_APPLY=75,
1678                MATOP_SET_FROM_OPTIONS=76,
1679                MATOP_MULT_CON=77,
1680                MATOP_MULT_TRANSPOSE_CON=78,
1681                MATOP_PERMUTE_SPARSIFY=79,
1682                MATOP_MULT_MULTIPLE=80,
1683                MATOP_SOLVE_MULTIPLE=81,
1684                MATOP_GET_INERTIA=82,
1685                MATOP_LOAD=83,
1686                MATOP_IS_SYMMETRIC=84,
1687                MATOP_IS_HERMITIAN=85,
1688                MATOP_IS_STRUCTURALLY_SYMMETRIC=86,
1689                MATOP_DUMMY=87,
1690                MATOP_GET_VECS=88,
1691                MATOP_MAT_MULT=89,
1692                MATOP_MAT_MULT_SYMBOLIC=90,
1693                MATOP_MAT_MULT_NUMERIC=91,
1694                MATOP_PTAP=92,
1695                MATOP_PTAP_SYMBOLIC=93,
1696                MATOP_PTAP_NUMERIC=94,
1697                MATOP_MAT_MULTTRANSPOSE=95,
1698                MATOP_MAT_MULTTRANSPOSE_SYM=96,
1699                MATOP_MAT_MULTTRANSPOSE_NUM=97,
1700                MATOP_PTAP_SYMBOLIC_SEQAIJ=98,
1701                MATOP_PTAP_NUMERIC_SEQAIJ=99,
1702                MATOP_PTAP_SYMBOLIC_MPIAIJ=100,
1703                MATOP_PTAP_NUMERIC_MPIAIJ=101,
1704                MATOP_CONJUGATE=102,
1705                MATOP_SET_SIZES=103,
1706                MATOP_SET_VALUES_ROW=104,
1707                MATOP_REAL_PART=105,
1708                MATOP_IMAG_PART=106,
1709                MATOP_GET_ROW_UTRIANGULAR=107,
1710                MATOP_RESTORE_ROW_UTRIANGULAR=108,
1711                MATOP_MATSOLVE=109,
1712                MATOP_GET_REDUNDANTMATRIX=110,
1713                MATOP_GET_ROW_MIN=111,
1714                MATOP_GET_COLUMN_VEC=112,
1715                MATOP_MISSING_DIAGONAL=113,
1716                MATOP_MATGETSEQNONZEROSTRUCTURE=114,
1717                MATOP_CREATE=115,
1718                MATOP_GET_GHOSTS=116,
1719                MATOP_GET_LOCALSUBMATRIX=117,
1720                MATOP_RESTORE_LOCALSUBMATRIX=118,
1721                MATOP_MULT_DIAGONAL_BLOCK=119,
1722                MATOP_HERMITIANTRANSPOSE=120,
1723                MATOP_MULTHERMITIANTRANSPOSE=121,
1724                MATOP_MULTHERMITIANTRANSPOSEADD=122,
1725                MATOP_GETMULTIPROCBLOCK=123,
1726                MATOP_GETCOLUMNNORMS=125,
1727 	       MATOP_GET_SUBMATRICES_PARALLEL=128,
1728                MATOP_SET_VALUES_BATCH=129,
1729                MATOP_TRANSPOSEMATMULT=130,
1730                MATOP_TRANSPOSEMATMULT_SYMBOLIC=131,
1731                MATOP_TRANSPOSEMATMULT_NUMERIC=132,
1732                MATOP_TRANSPOSECOLORING_CREATE=133,
1733                MATOP_TRANSCOLORING_APPLY_SPTODEN=134,
1734                MATOP_TRANSCOLORING_APPLY_DENTOSP=135,
1735                MATOP_RARt=136,
1736                MATOP_RARt_SYMBOLIC=137,
1737                MATOP_RARt_NUMERIC=138
1738              } MatOperation;
1739 extern PetscErrorCode  MatHasOperation(Mat,MatOperation,PetscBool *);
1740 extern PetscErrorCode  MatShellSetOperation(Mat,MatOperation,void(*)(void));
1741 extern PetscErrorCode  MatShellGetOperation(Mat,MatOperation,void(**)(void));
1742 extern PetscErrorCode  MatShellSetContext(Mat,void*);
1743 
1744 /*
1745    Codes for matrices stored on disk. By default they are
1746    stored in a universal format. By changing the format with
1747    PetscViewerSetFormat(viewer,PETSC_VIEWER_NATIVE); the matrices will
1748    be stored in a way natural for the matrix, for example dense matrices
1749    would be stored as dense. Matrices stored this way may only be
1750    read into matrices of the same type.
1751 */
1752 #define MATRIX_BINARY_FORMAT_DENSE -1
1753 
1754 extern PetscErrorCode  MatMPIBAIJSetHashTableFactor(Mat,PetscReal);
1755 extern PetscErrorCode  MatISGetLocalMat(Mat,Mat*);
1756 extern PetscErrorCode  MatISSetLocalMat(Mat,Mat);
1757 
1758 /*S
1759      MatNullSpace - Object that removes a null space from a vector, i.e.
1760          orthogonalizes the vector to a subsapce
1761 
1762    Level: advanced
1763 
1764   Concepts: matrix; linear operator, null space
1765 
1766   Users manual sections:
1767 .   sec_singular
1768 
1769 .seealso:  MatNullSpaceCreate()
1770 S*/
1771 typedef struct _p_MatNullSpace* MatNullSpace;
1772 
1773 extern PetscErrorCode  MatNullSpaceCreate(MPI_Comm,PetscBool ,PetscInt,const Vec[],MatNullSpace*);
1774 extern PetscErrorCode  MatNullSpaceSetFunction(MatNullSpace,PetscErrorCode (*)(MatNullSpace,Vec,void*),void*);
1775 extern PetscErrorCode  MatNullSpaceDestroy(MatNullSpace*);
1776 extern PetscErrorCode  MatNullSpaceRemove(MatNullSpace,Vec,Vec*);
1777 extern PetscErrorCode  MatGetNullSpace(Mat, MatNullSpace *);
1778 extern PetscErrorCode  MatSetNullSpace(Mat,MatNullSpace);
1779 extern PetscErrorCode  MatSetNearNullSpace(Mat,MatNullSpace);
1780 extern PetscErrorCode  MatNullSpaceTest(MatNullSpace,Mat,PetscBool  *);
1781 extern PetscErrorCode  MatNullSpaceView(MatNullSpace,PetscViewer);
1782 
1783 extern PetscErrorCode  MatReorderingSeqSBAIJ(Mat,IS);
1784 extern PetscErrorCode  MatMPISBAIJSetHashTableFactor(Mat,PetscReal);
1785 extern PetscErrorCode  MatSeqSBAIJSetColumnIndices(Mat,PetscInt *);
1786 extern PetscErrorCode  MatSeqBAIJInvertBlockDiagonal(Mat);
1787 
1788 extern PetscErrorCode  MatCreateMAIJ(Mat,PetscInt,Mat*);
1789 extern PetscErrorCode  MatMAIJRedimension(Mat,PetscInt,Mat*);
1790 extern PetscErrorCode  MatMAIJGetAIJ(Mat,Mat*);
1791 
1792 extern PetscErrorCode  MatComputeExplicitOperator(Mat,Mat*);
1793 
1794 extern PetscErrorCode  MatDiagonalScaleLocal(Mat,Vec);
1795 
1796 extern PetscErrorCode  MatCreateMFFD(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,Mat*);
1797 extern PetscErrorCode  MatMFFDSetBase(Mat,Vec,Vec);
1798 extern PetscErrorCode  MatMFFDSetFunction(Mat,PetscErrorCode(*)(void*,Vec,Vec),void*);
1799 extern PetscErrorCode  MatMFFDSetFunctioni(Mat,PetscErrorCode (*)(void*,PetscInt,Vec,PetscScalar*));
1800 extern PetscErrorCode  MatMFFDSetFunctioniBase(Mat,PetscErrorCode (*)(void*,Vec));
1801 extern PetscErrorCode  MatMFFDAddNullSpace(Mat,MatNullSpace);
1802 extern PetscErrorCode  MatMFFDSetHHistory(Mat,PetscScalar[],PetscInt);
1803 extern PetscErrorCode  MatMFFDResetHHistory(Mat);
1804 extern PetscErrorCode  MatMFFDSetFunctionError(Mat,PetscReal);
1805 extern PetscErrorCode  MatMFFDSetPeriod(Mat,PetscInt);
1806 extern PetscErrorCode  MatMFFDGetH(Mat,PetscScalar *);
1807 extern PetscErrorCode  MatMFFDSetOptionsPrefix(Mat,const char[]);
1808 extern PetscErrorCode  MatMFFDCheckPositivity(void*,Vec,Vec,PetscScalar*);
1809 extern PetscErrorCode  MatMFFDSetCheckh(Mat,PetscErrorCode (*)(void*,Vec,Vec,PetscScalar*),void*);
1810 
1811 /*S
1812     MatMFFD - A data structured used to manage the computation of the h differencing parameter for matrix-free
1813               Jacobian vector products
1814 
1815     Notes: MATMFFD is a specific MatType which uses the MatMFFD data structure
1816 
1817            MatMFFD*() methods actually take the Mat as their first argument. Not a MatMFFD data structure
1818 
1819     Level: developer
1820 
1821 .seealso: MATMFFD, MatCreateMFFD(), MatMFFDSetFuction(), MatMFFDSetType(), MatMFFDRegister()
1822 S*/
1823 typedef struct _p_MatMFFD* MatMFFD;
1824 
1825 /*J
1826     MatMFFDType - algorithm used to compute the h used in computing matrix-vector products via differencing of the function
1827 
1828    Level: beginner
1829 
1830 .seealso: MatMFFDSetType(), MatMFFDRegister()
1831 J*/
1832 #define MatMFFDType char*
1833 #define MATMFFD_DS  "ds"
1834 #define MATMFFD_WP  "wp"
1835 
1836 extern PetscErrorCode  MatMFFDSetType(Mat,const MatMFFDType);
1837 extern PetscErrorCode  MatMFFDRegister(const char[],const char[],const char[],PetscErrorCode (*)(MatMFFD));
1838 
1839 /*MC
1840    MatMFFDRegisterDynamic - Adds a method to the MatMFFD registry.
1841 
1842    Synopsis:
1843    PetscErrorCode MatMFFDRegisterDynamic(const char *name_solver,const char *path,const char *name_create,PetscErrorCode (*routine_create)(MatMFFD))
1844 
1845    Not Collective
1846 
1847    Input Parameters:
1848 +  name_solver - name of a new user-defined compute-h module
1849 .  path - path (either absolute or relative) the library containing this solver
1850 .  name_create - name of routine to create method context
1851 -  routine_create - routine to create method context
1852 
1853    Level: developer
1854 
1855    Notes:
1856    MatMFFDRegisterDynamic() may be called multiple times to add several user-defined solvers.
1857 
1858    If dynamic libraries are used, then the fourth input argument (routine_create)
1859    is ignored.
1860 
1861    Sample usage:
1862 .vb
1863    MatMFFDRegisterDynamic("my_h",/home/username/my_lib/lib/libO/solaris/mylib.a,
1864                "MyHCreate",MyHCreate);
1865 .ve
1866 
1867    Then, your solver can be chosen with the procedural interface via
1868 $     MatMFFDSetType(mfctx,"my_h")
1869    or at runtime via the option
1870 $     -snes_mf_type my_h
1871 
1872 .keywords: MatMFFD, register
1873 
1874 .seealso: MatMFFDRegisterAll(), MatMFFDRegisterDestroy()
1875 M*/
1876 #if defined(PETSC_USE_DYNAMIC_LIBRARIES)
1877 #define MatMFFDRegisterDynamic(a,b,c,d) MatMFFDRegister(a,b,c,0)
1878 #else
1879 #define MatMFFDRegisterDynamic(a,b,c,d) MatMFFDRegister(a,b,c,d)
1880 #endif
1881 
1882 extern PetscErrorCode  MatMFFDRegisterAll(const char[]);
1883 extern PetscErrorCode  MatMFFDRegisterDestroy(void);
1884 extern PetscErrorCode  MatMFFDDSSetUmin(Mat,PetscReal);
1885 extern PetscErrorCode  MatMFFDWPSetComputeNormU(Mat,PetscBool );
1886 
1887 
1888 extern PetscErrorCode  PetscViewerMathematicaPutMatrix(PetscViewer, PetscInt, PetscInt, PetscReal *);
1889 extern PetscErrorCode  PetscViewerMathematicaPutCSRMatrix(PetscViewer, PetscInt, PetscInt, PetscInt *, PetscInt *, PetscReal *);
1890 
1891 /*
1892    PETSc interface to MUMPS
1893 */
1894 #ifdef PETSC_HAVE_MUMPS
1895 extern PetscErrorCode  MatMumpsSetIcntl(Mat,PetscInt,PetscInt);
1896 #endif
1897 
1898 /*
1899    PETSc interface to SUPERLU
1900 */
1901 #ifdef PETSC_HAVE_SUPERLU
1902 extern PetscErrorCode  MatSuperluSetILUDropTol(Mat,PetscReal);
1903 #endif
1904 
1905 #if defined(PETSC_HAVE_CUSP)
1906 extern PetscErrorCode  MatCreateSeqAIJCUSP(MPI_Comm,PetscInt,PetscInt,PetscInt,const PetscInt[],Mat*);
1907 extern PetscErrorCode  MatCreateAIJCUSP(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt,const PetscInt[],PetscInt,const PetscInt[],Mat*);
1908 #endif
1909 
1910 /*
1911    PETSc interface to FFTW
1912 */
1913 #if defined(PETSC_HAVE_FFTW)
1914 extern PetscErrorCode VecScatterPetscToFFTW(Mat,Vec,Vec);
1915 extern PetscErrorCode VecScatterFFTWToPetsc(Mat,Vec,Vec);
1916 extern PetscErrorCode MatGetVecsFFTW(Mat,Vec*,Vec*,Vec*);
1917 #endif
1918 
1919 extern PetscErrorCode MatCreateNest(MPI_Comm,PetscInt,const IS[],PetscInt,const IS[],const Mat[],Mat*);
1920 extern PetscErrorCode MatNestGetSize(Mat,PetscInt*,PetscInt*);
1921 extern PetscErrorCode MatNestGetISs(Mat,IS[],IS[]);
1922 extern PetscErrorCode MatNestGetLocalISs(Mat,IS[],IS[]);
1923 extern PetscErrorCode MatNestGetSubMats(Mat,PetscInt*,PetscInt*,Mat***);
1924 extern PetscErrorCode MatNestGetSubMat(Mat,PetscInt,PetscInt,Mat*);
1925 extern PetscErrorCode MatNestSetVecType(Mat,const VecType);
1926 extern PetscErrorCode MatNestSetSubMats(Mat,PetscInt,const IS[],PetscInt,const IS[],const Mat[]);
1927 extern PetscErrorCode MatNestSetSubMat(Mat,PetscInt,PetscInt,Mat);
1928 
1929 /*
1930  MatIJ:
1931  An unweighted directed pseudograph
1932  An interpretation of this matrix as a (pseudo)graph allows us to define additional operations on it:
1933  A MatIJ can act on sparse arrays: arrays of indices, or index arrays of integers, scalars, or integer-scalar pairs
1934  by mapping the indices to the indices connected to them by the (pseudo)graph ed
1935  */
1936 typedef enum {MATIJ_LOCAL, MATIJ_GLOBAL} MatIJIndexType;
1937 extern  PetscErrorCode MatIJSetMultivalued(Mat, PetscBool);
1938 extern  PetscErrorCode MatIJGetMultivalued(Mat, PetscBool*);
1939 extern  PetscErrorCode MatIJSetEdges(Mat, PetscInt, const PetscInt*, const PetscInt*);
1940 extern  PetscErrorCode MatIJGetEdges(Mat, PetscInt *, PetscInt **, PetscInt **);
1941 extern  PetscErrorCode MatIJSetEdgesIS(Mat, IS, IS);
1942 extern  PetscErrorCode MatIJGetEdgesIS(Mat, IS*, IS*);
1943 extern  PetscErrorCode MatIJGetRowSizes(Mat, MatIJIndexType, PetscInt, const PetscInt *, PetscInt **);
1944 extern  PetscErrorCode MatIJGetMinRowSize(Mat, PetscInt *);
1945 extern  PetscErrorCode MatIJGetMaxRowSize(Mat, PetscInt *);
1946 extern  PetscErrorCode MatIJGetSupport(Mat,  PetscInt *, PetscInt **);
1947 extern  PetscErrorCode MatIJGetSupportIS(Mat, IS *);
1948 extern  PetscErrorCode MatIJGetImage(Mat, PetscInt*, PetscInt**);
1949 extern  PetscErrorCode MatIJGetImageIS(Mat, IS *);
1950 extern  PetscErrorCode MatIJGetSupportSize(Mat, PetscInt *);
1951 extern  PetscErrorCode MatIJGetImageSize(Mat, PetscInt *);
1952 
1953 extern  PetscErrorCode MatIJBinRenumber(Mat, Mat*);
1954 
1955 extern  PetscErrorCode MatIJMap(Mat, MatIJIndexType, PetscInt,const PetscInt*,const PetscInt*,const PetscScalar*, MatIJIndexType,PetscInt*,PetscInt**,PetscInt**,PetscScalar**,PetscInt**);
1956 extern  PetscErrorCode MatIJBin(Mat, MatIJIndexType, PetscInt,const PetscInt*,const PetscInt*,const PetscScalar*,PetscInt*,PetscInt**,PetscInt**,PetscScalar**,PetscInt**);
1957 extern  PetscErrorCode MatIJBinMap(Mat,Mat, MatIJIndexType,PetscInt,const PetscInt*,const PetscInt*,const PetscScalar*,MatIJIndexType,PetscInt*,PetscInt**,PetscInt**,PetscScalar**,PetscInt**);
1958 
1959 PETSC_EXTERN_CXX_END
1960 #endif
1961