xref: /petsc/include/petscmat.h (revision be7c243fa330abc10ff5da07cb1acea58678985d)
1 /*
2      Include file for the matrix component of PETSc
3 */
4 #ifndef __PETSCMAT_H
5 #define __PETSCMAT_H
6 #include "petscvec.h"
7 PETSC_EXTERN_CXX_BEGIN
8 
9 /*S
10      Mat - Abstract PETSc matrix object
11 
12    Level: beginner
13 
14   Concepts: matrix; linear operator
15 
16 .seealso:  MatCreate(), MatType, MatSetType()
17 S*/
18 typedef struct _p_Mat*           Mat;
19 
20 /*J
21     MatType - String with the name of a PETSc matrix or the creation function
22        with an optional dynamic library name, for example
23        http://www.mcs.anl.gov/petsc/lib.a:mymatcreate()
24 
25    Level: beginner
26 
27 .seealso: MatSetType(), Mat, MatSolverPackage
28 J*/
29 #define MatType char*
30 #define MATSAME            "same"
31 #define MATMAIJ            "maij"
32 #define MATSEQMAIJ         "seqmaij"
33 #define MATMPIMAIJ         "mpimaij"
34 #define MATIS              "is"
35 #define MATAIJ             "aij"
36 #define MATSEQAIJ          "seqaij"
37 #define MATSEQAIJPTHREAD   "seqaijpthread"
38 #define MATAIJPTHREAD      "aijpthread"
39 #define MATMPIAIJ          "mpiaij"
40 #define MATAIJCRL          "aijcrl"
41 #define MATSEQAIJCRL       "seqaijcrl"
42 #define MATMPIAIJCRL       "mpiaijcrl"
43 #define MATAIJCUSP         "aijcusp"
44 #define MATSEQAIJCUSP      "seqaijcusp"
45 #define MATMPIAIJCUSP      "mpiaijcusp"
46 #define MATAIJPERM         "aijperm"
47 #define MATSEQAIJPERM      "seqaijperm"
48 #define MATMPIAIJPERM      "mpiaijperm"
49 #define MATSHELL           "shell"
50 #define MATDENSE           "dense"
51 #define MATSEQDENSE        "seqdense"
52 #define MATMPIDENSE        "mpidense"
53 #define MATBAIJ            "baij"
54 #define MATSEQBAIJ         "seqbaij"
55 #define MATMPIBAIJ         "mpibaij"
56 #define MATMPIADJ          "mpiadj"
57 #define MATSBAIJ           "sbaij"
58 #define MATSEQSBAIJ        "seqsbaij"
59 #define MATMPISBAIJ        "mpisbaij"
60 #define MATSEQBSTRM        "seqbstrm"
61 #define MATMPIBSTRM        "mpibstrm"
62 #define MATBSTRM           "bstrm"
63 #define MATSEQSBSTRM       "seqsbstrm"
64 #define MATMPISBSTRM       "mpisbstrm"
65 #define MATSBSTRM          "sbstrm"
66 #define MATDAAD            "daad"
67 #define MATMFFD            "mffd"
68 #define MATNORMAL          "normal"
69 #define MATLRC             "lrc"
70 #define MATSCATTER         "scatter"
71 #define MATBLOCKMAT        "blockmat"
72 #define MATCOMPOSITE       "composite"
73 #define MATFFT             "fft"
74 #define MATFFTW            "fftw"
75 #define MATSEQCUFFT        "seqcufft"
76 #define MATTRANSPOSEMAT    "transpose"
77 #define MATSCHURCOMPLEMENT "schurcomplement"
78 #define MATPYTHON          "python"
79 #define MATHYPRESTRUCT     "hyprestruct"
80 #define MATHYPRESSTRUCT    "hypresstruct"
81 #define MATSUBMATRIX       "submatrix"
82 #define MATLOCALREF        "localref"
83 #define MATNEST            "nest"
84 #define MATIJ              "ij"
85 
86 /*J
87     MatSolverPackage - String with the name of a PETSc matrix solver type.
88 
89     For example: "petsc" indicates what PETSc provides, "superlu" indicates either
90        SuperLU or SuperLU_Dist etc.
91 
92 
93    Level: beginner
94 
95 .seealso: MatGetFactor(), Mat, MatSetType(), MatType
96 J*/
97 #define MatSolverPackage char*
98 #define MATSOLVERSPOOLES      "spooles"
99 #define MATSOLVERSUPERLU      "superlu"
100 #define MATSOLVERSUPERLU_DIST "superlu_dist"
101 #define MATSOLVERUMFPACK      "umfpack"
102 #define MATSOLVERCHOLMOD      "cholmod"
103 #define MATSOLVERESSL         "essl"
104 #define MATSOLVERLUSOL        "lusol"
105 #define MATSOLVERMUMPS        "mumps"
106 #define MATSOLVERPASTIX       "pastix"
107 #define MATSOLVERMATLAB       "matlab"
108 #define MATSOLVERPETSC        "petsc"
109 #define MATSOLVERPLAPACK      "plapack"
110 #define MATSOLVERBAS          "bas"
111 
112 #define MATSOLVERBSTRM        "bstrm"
113 #define MATSOLVERSBSTRM       "sbstrm"
114 
115 /*E
116     MatFactorType - indicates what type of factorization is requested
117 
118     Level: beginner
119 
120    Any additions/changes here MUST also be made in include/finclude/petscmat.h
121 
122 .seealso: MatSolverPackage, MatGetFactor()
123 E*/
124 typedef enum {MAT_FACTOR_NONE, MAT_FACTOR_LU, MAT_FACTOR_CHOLESKY, MAT_FACTOR_ILU, MAT_FACTOR_ICC,MAT_FACTOR_ILUDT} MatFactorType;
125 extern const char *const MatFactorTypes[];
126 
127 extern PetscErrorCode  MatGetFactor(Mat,const MatSolverPackage,MatFactorType,Mat*);
128 extern PetscErrorCode  MatGetFactorAvailable(Mat,const MatSolverPackage,MatFactorType,PetscBool *);
129 extern PetscErrorCode  MatFactorGetSolverPackage(Mat,const MatSolverPackage*);
130 extern PetscErrorCode  MatGetFactorType(Mat,MatFactorType*);
131 
132 /* Logging support */
133 #define    MAT_FILE_CLASSID 1211216    /* used to indicate matrices in binary files */
134 extern PetscClassId  MAT_CLASSID;
135 extern PetscClassId  MAT_FDCOLORING_CLASSID;
136 extern PetscClassId  MAT_TRANSPOSECOLORING_CLASSID;
137 extern PetscClassId  MAT_PARTITIONING_CLASSID;
138 extern PetscClassId  MAT_NULLSPACE_CLASSID;
139 extern PetscClassId  MATMFFD_CLASSID;
140 
141 /*E
142     MatReuse - Indicates if matrices obtained from a previous call to MatGetSubMatrices()
143      or MatGetSubMatrix() are to be reused to store the new matrix values. For MatConvert() is used to indicate
144      that the input matrix is to be replaced with the converted matrix.
145 
146     Level: beginner
147 
148    Any additions/changes here MUST also be made in include/finclude/petscmat.h
149 
150 .seealso: MatGetSubMatrices(), MatGetSubMatrix(), MatDestroyMatrices(), MatConvert()
151 E*/
152 typedef enum {MAT_INITIAL_MATRIX,MAT_REUSE_MATRIX,MAT_IGNORE_MATRIX} MatReuse;
153 
154 /*E
155     MatGetSubMatrixOption - Indicates if matrices obtained from a call to MatGetSubMatrices()
156      include the matrix values. Currently it is only used by MatGetSeqNonzerostructure().
157 
158     Level: beginner
159 
160 .seealso: MatGetSeqNonzerostructure()
161 E*/
162 typedef enum {MAT_DO_NOT_GET_VALUES,MAT_GET_VALUES} MatGetSubMatrixOption;
163 
164 extern PetscErrorCode  MatInitializePackage(const char[]);
165 
166 extern PetscErrorCode  MatCreate(MPI_Comm,Mat*);
167 PetscPolymorphicFunction(MatCreate,(MPI_Comm comm),(comm,&A),Mat,A)
168 PetscPolymorphicFunction(MatCreate,(),(PETSC_COMM_WORLD,&A),Mat,A)
169 extern PetscErrorCode  MatSetSizes(Mat,PetscInt,PetscInt,PetscInt,PetscInt);
170 extern PetscErrorCode  MatSetType(Mat,const MatType);
171 extern PetscErrorCode  MatSetFromOptions(Mat);
172 extern PetscErrorCode  MatRegisterAll(const char[]);
173 extern PetscErrorCode  MatRegister(const char[],const char[],const char[],PetscErrorCode(*)(Mat));
174 extern PetscErrorCode  MatRegisterBaseName(const char[],const char[],const char[]);
175 extern PetscErrorCode  MatSetOptionsPrefix(Mat,const char[]);
176 extern PetscErrorCode  MatAppendOptionsPrefix(Mat,const char[]);
177 extern PetscErrorCode  MatGetOptionsPrefix(Mat,const char*[]);
178 
179 /*MC
180    MatRegisterDynamic - Adds a new matrix type
181 
182    Synopsis:
183    PetscErrorCode MatRegisterDynamic(const char *name,const char *path,const char *name_create,PetscErrorCode (*routine_create)(Mat))
184 
185    Not Collective
186 
187    Input Parameters:
188 +  name - name of a new user-defined matrix type
189 .  path - path (either absolute or relative) the library containing this solver
190 .  name_create - name of routine to create method context
191 -  routine_create - routine to create method context
192 
193    Notes:
194    MatRegisterDynamic() may be called multiple times to add several user-defined solvers.
195 
196    If dynamic libraries are used, then the fourth input argument (routine_create)
197    is ignored.
198 
199    Sample usage:
200 .vb
201    MatRegisterDynamic("my_mat",/home/username/my_lib/lib/libO/solaris/mylib.a,
202                "MyMatCreate",MyMatCreate);
203 .ve
204 
205    Then, your solver can be chosen with the procedural interface via
206 $     MatSetType(Mat,"my_mat")
207    or at runtime via the option
208 $     -mat_type my_mat
209 
210    Level: advanced
211 
212    Notes: ${PETSC_ARCH} occuring in pathname will be replaced with appropriate values.
213          If your function is not being put into a shared library then use VecRegister() instead
214 
215 .keywords: Mat, register
216 
217 .seealso: MatRegisterAll(), MatRegisterDestroy()
218 
219 M*/
220 #if defined(PETSC_USE_DYNAMIC_LIBRARIES)
221 #define MatRegisterDynamic(a,b,c,d) MatRegister(a,b,c,0)
222 #else
223 #define MatRegisterDynamic(a,b,c,d) MatRegister(a,b,c,d)
224 #endif
225 
226 extern PetscBool  MatRegisterAllCalled;
227 extern PetscFList MatList;
228 extern PetscFList MatColoringList;
229 extern PetscFList MatPartitioningList;
230 
231 /*E
232     MatStructure - Indicates if the matrix has the same nonzero structure
233 
234     Level: beginner
235 
236    Any additions/changes here MUST also be made in include/finclude/petscmat.h
237 
238 .seealso: MatCopy(), KSPSetOperators(), PCSetOperators()
239 E*/
240 typedef enum {DIFFERENT_NONZERO_PATTERN,SUBSET_NONZERO_PATTERN,SAME_NONZERO_PATTERN,SAME_PRECONDITIONER} MatStructure;
241 
242 extern PetscErrorCode  MatCreateSeqDense(MPI_Comm,PetscInt,PetscInt,PetscScalar[],Mat*);
243 extern PetscErrorCode  MatCreateMPIDense(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,PetscScalar[],Mat*);
244 extern PetscErrorCode  MatCreateSeqAIJ(MPI_Comm,PetscInt,PetscInt,PetscInt,const PetscInt[],Mat*);
245 PetscPolymorphicFunction(MatCreateSeqAIJ,(PetscInt m,PetscInt n,PetscInt nz,const PetscInt nnz[]),(PETSC_COMM_SELF,m,n,nz,nnz,&A),Mat,A)
246 PetscPolymorphicFunction(MatCreateSeqAIJ,(PetscInt m,PetscInt n,PetscInt nz),(PETSC_COMM_SELF,m,n,nz,PETSC_NULL,&A),Mat,A)
247 PetscPolymorphicFunction(MatCreateSeqAIJ,(PetscInt m,PetscInt n,const PetscInt nnz[]),(PETSC_COMM_SELF,m,n,0,nnz,&A),Mat,A)
248 PetscPolymorphicFunction(MatCreateSeqAIJ,(PetscInt m,PetscInt n),(PETSC_COMM_SELF,m,n,0,PETSC_NULL,&A),Mat,A)
249 PetscPolymorphicSubroutine(MatCreateSeqAIJ,(PetscInt m,PetscInt n,PetscInt nz,Mat *A),(PETSC_COMM_SELF,m,n,nz,PETSC_NULL,A))
250 PetscPolymorphicSubroutine(MatCreateSeqAIJ,(PetscInt m,PetscInt n,const PetscInt nnz[],Mat *A),(PETSC_COMM_SELF,m,n,0,nnz,A))
251 PetscPolymorphicSubroutine(MatCreateSeqAIJ,(PetscInt m,PetscInt n,Mat *A),(PETSC_COMM_SELF,m,n,0,PETSC_NULL,A))
252 extern PetscErrorCode  MatCreateMPIAIJ(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt,const PetscInt[],PetscInt,const PetscInt[],Mat*);
253 PetscPolymorphicFunction(MatCreateMPIAIJ,(MPI_Comm comm,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,const PetscInt nnz[],PetscInt onz,const PetscInt onnz[]),(comm,m,n,M,N,nz,nnz,onz,onnz,&A),Mat,A)
254 PetscPolymorphicFunction(MatCreateMPIAIJ,(MPI_Comm comm,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,PetscInt nnz),(comm,m,n,M,N,nz,PETSC_NULL,nnz,PETSC_NULL,&A),Mat,A)
255 PetscPolymorphicFunction(MatCreateMPIAIJ,(MPI_Comm comm,PetscInt m,PetscInt n,PetscInt M,PetscInt N,const PetscInt nnz[],const PetscInt onz[]),(comm,m,n,M,N,0,nnz,0,onz,&A),Mat,A)
256 PetscPolymorphicFunction(MatCreateMPIAIJ,(MPI_Comm comm,PetscInt m,PetscInt n,PetscInt M,PetscInt N),(comm,m,n,M,N,0,PETSC_NULL,0,PETSC_NULL,&A),Mat,A)
257 PetscPolymorphicSubroutine(MatCreateMPIAIJ,(MPI_Comm comm,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,PetscInt nnz,Mat *A),(comm,m,n,M,N,nz,PETSC_NULL,nnz,PETSC_NULL,A))
258 PetscPolymorphicSubroutine(MatCreateMPIAIJ,(MPI_Comm comm,PetscInt m,PetscInt n,PetscInt M,PetscInt N,const PetscInt nnz[],const PetscInt onz[],Mat *A),(comm,m,n,M,N,0,nnz,0,onz,A))
259 PetscPolymorphicSubroutine(MatCreateMPIAIJ,(MPI_Comm comm,PetscInt m,PetscInt n,PetscInt M,PetscInt N,Mat *A),(comm,m,n,M,N,0,PETSC_NULL,0,PETSC_NULL,A))
260 PetscPolymorphicFunction(MatCreateMPIAIJ,(PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,const PetscInt nnz[],PetscInt onz,const PetscInt onnz[]),(PETSC_COMM_WORLD,m,n,M,N,nz,nnz,onz,onnz,&A),Mat,A)
261 PetscPolymorphicFunction(MatCreateMPIAIJ,(PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,PetscInt nnz),(PETSC_COMM_WORLD,m,n,M,N,nz,PETSC_NULL,nnz,PETSC_NULL,&A),Mat,A)
262 PetscPolymorphicFunction(MatCreateMPIAIJ,(PetscInt m,PetscInt n,PetscInt M,PetscInt N,const PetscInt nnz[],const PetscInt onz[]),(PETSC_COMM_WORLD,m,n,M,N,0,nnz,0,onz,&A),Mat,A)
263 PetscPolymorphicFunction(MatCreateMPIAIJ,(PetscInt m,PetscInt n,PetscInt M,PetscInt N),(PETSC_COMM_WORLD,m,n,M,N,0,PETSC_NULL,0,PETSC_NULL,&A),Mat,A)
264 PetscPolymorphicSubroutine(MatCreateMPIAIJ,(PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,PetscInt nnz,Mat *A),(PETSC_COMM_WORLD,m,n,M,N,nz,PETSC_NULL,nnz,PETSC_NULL,A))
265 PetscPolymorphicSubroutine(MatCreateMPIAIJ,(PetscInt m,PetscInt n,PetscInt M,PetscInt N,const PetscInt nnz[],const PetscInt onz[],Mat *A),(PETSC_COMM_WORLD,m,n,M,N,0,nnz,0,onz,A))
266 PetscPolymorphicSubroutine(MatCreateMPIAIJ,(PetscInt m,PetscInt n,PetscInt M,PetscInt N,Mat *A),(PETSC_COMM_WORLD,m,n,M,N,0,PETSC_NULL,0,PETSC_NULL,A))
267 extern PetscErrorCode  MatCreateMPIAIJWithArrays(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,const PetscInt[],const PetscInt[],const PetscScalar[],Mat *);
268 extern PetscErrorCode  MatCreateMPIAIJWithSplitArrays(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt[],PetscInt[],PetscScalar[],PetscInt[],PetscInt[],PetscScalar[],Mat*);
269 
270 extern PetscErrorCode  MatCreateSeqBAIJ(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,const PetscInt[],Mat*);
271 PetscPolymorphicFunction(MatCreateSeqBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt nz,const PetscInt nnz[]),(PETSC_COMM_SELF,bs,m,n,nz,nnz,&A),Mat,A)
272 PetscPolymorphicFunction(MatCreateSeqBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt nz),(PETSC_COMM_SELF,bs,m,n,nz,PETSC_NULL,&A),Mat,A)
273 PetscPolymorphicFunction(MatCreateSeqBAIJ,(PetscInt bs,PetscInt m,PetscInt n,const PetscInt nnz[]),(PETSC_COMM_SELF,bs,m,n,0,nnz,&A),Mat,A)
274 PetscPolymorphicFunction(MatCreateSeqBAIJ,(PetscInt bs,PetscInt m,PetscInt n),(PETSC_COMM_SELF,bs,m,n,0,PETSC_NULL,&A),Mat,A)
275 PetscPolymorphicSubroutine(MatCreateSeqBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt nz,Mat *A),(PETSC_COMM_SELF,bs,m,n,nz,PETSC_NULL,A))
276 PetscPolymorphicSubroutine(MatCreateSeqBAIJ,(PetscInt bs,PetscInt m,PetscInt n,const PetscInt nnz[],Mat *A),(PETSC_COMM_SELF,bs,m,n,0,nnz,A))
277 PetscPolymorphicSubroutine(MatCreateSeqBAIJ,(PetscInt bs,PetscInt m,PetscInt n,Mat *A),(PETSC_COMM_SELF,bs,m,n,0,PETSC_NULL,A))
278 extern PetscErrorCode  MatCreateMPIBAIJ(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt,const PetscInt[],PetscInt,const PetscInt[],Mat*);
279 PetscPolymorphicFunction(MatCreateMPIBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,const PetscInt nnz[],PetscInt onz,const PetscInt onnz[]),(comm,bs,m,n,M,N,nz,nnz,onz,onnz,&A),Mat,A)
280 PetscPolymorphicFunction(MatCreateMPIBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,PetscInt nnz),(comm,bs,m,n,M,N,nz,PETSC_NULL,nnz,PETSC_NULL,&A),Mat,A)
281 PetscPolymorphicFunction(MatCreateMPIBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,const PetscInt nnz[],const PetscInt onz[]),(comm,bs,m,n,M,N,0,nnz,0,onz,&A),Mat,A)
282 PetscPolymorphicFunction(MatCreateMPIBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N),(comm,bs,m,n,M,N,0,PETSC_NULL,0,PETSC_NULL,&A),Mat,A)
283 PetscPolymorphicSubroutine(MatCreateMPIBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,PetscInt nnz,Mat *A),(comm,bs,m,n,M,N,nz,PETSC_NULL,nnz,PETSC_NULL,A))
284 PetscPolymorphicSubroutine(MatCreateMPIBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,const PetscInt nnz[],const PetscInt onz[],Mat *A),(comm,bs,m,n,M,N,0,nnz,0,onz,A))
285 PetscPolymorphicSubroutine(MatCreateMPIBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,Mat *A),(comm,bs,m,n,M,N,0,PETSC_NULL,0,PETSC_NULL,A))
286 PetscPolymorphicFunction(MatCreateMPIBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,const PetscInt nnz[],PetscInt onz,const PetscInt onnz[]),(PETSC_COMM_WORLD,bs,m,n,M,N,nz,nnz,onz,onnz,&A),Mat,A)
287 PetscPolymorphicFunction(MatCreateMPIBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,PetscInt nnz),(PETSC_COMM_WORLD,bs,m,n,M,N,nz,PETSC_NULL,nnz,PETSC_NULL,&A),Mat,A)
288 PetscPolymorphicFunction(MatCreateMPIBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,const PetscInt nnz[],const PetscInt onz[]),(PETSC_COMM_WORLD,bs,m,n,M,N,0,nnz,0,onz,&A),Mat,A)
289 PetscPolymorphicFunction(MatCreateMPIBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N),(PETSC_COMM_WORLD,bs,m,n,M,N,0,PETSC_NULL,0,PETSC_NULL,&A),Mat,A)
290 PetscPolymorphicSubroutine(MatCreateMPIBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,PetscInt nnz,Mat *A),(PETSC_COMM_WORLD,bs,m,n,M,N,nz,PETSC_NULL,nnz,PETSC_NULL,A))
291 PetscPolymorphicSubroutine(MatCreateMPIBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,const PetscInt nnz[],const PetscInt onz[],Mat *A),(PETSC_COMM_WORLD,bs,m,n,M,N,0,nnz,0,onz,A))
292 PetscPolymorphicSubroutine(MatCreateMPIBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,Mat *A),(PETSC_COMM_WORLD,bs,m,n,M,N,0,PETSC_NULL,0,PETSC_NULL,A))
293 extern PetscErrorCode  MatCreateMPIBAIJWithArrays(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt,const PetscInt[],const PetscInt[],const PetscScalar[],Mat*);
294 
295 extern PetscErrorCode  MatCreateMPIAdj(MPI_Comm,PetscInt,PetscInt,PetscInt[],PetscInt[],PetscInt[],Mat*);
296 extern PetscErrorCode  MatCreateSeqSBAIJ(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,const PetscInt[],Mat*);
297 PetscPolymorphicFunction(MatCreateSeqSBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt nz,const PetscInt nnz[]),(PETSC_COMM_SELF,bs,m,n,nz,nnz,&A),Mat,A)
298 PetscPolymorphicFunction(MatCreateSeqSBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt nz),(PETSC_COMM_SELF,bs,m,n,nz,PETSC_NULL,&A),Mat,A)
299 PetscPolymorphicFunction(MatCreateSeqSBAIJ,(PetscInt bs,PetscInt m,PetscInt n,const PetscInt nnz[]),(PETSC_COMM_SELF,bs,m,n,0,nnz,&A),Mat,A)
300 PetscPolymorphicFunction(MatCreateSeqSBAIJ,(PetscInt bs,PetscInt m,PetscInt n),(PETSC_COMM_SELF,bs,m,n,0,PETSC_NULL,&A),Mat,A)
301 PetscPolymorphicSubroutine(MatCreateSeqSBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt nz,Mat *A),(PETSC_COMM_SELF,bs,m,n,nz,PETSC_NULL,A))
302 PetscPolymorphicSubroutine(MatCreateSeqSBAIJ,(PetscInt bs,PetscInt m,PetscInt n,const PetscInt nnz[],Mat *A),(PETSC_COMM_SELF,bs,m,n,0,nnz,A))
303 PetscPolymorphicSubroutine(MatCreateSeqSBAIJ,(PetscInt bs,PetscInt m,PetscInt n,Mat *A),(PETSC_COMM_SELF,bs,m,n,0,PETSC_NULL,A))
304 
305 extern PetscErrorCode  MatCreateMPISBAIJ(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt,const PetscInt[],PetscInt,const PetscInt[],Mat*);
306 PetscPolymorphicFunction(MatCreateMPISBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,const PetscInt nnz[],PetscInt onz,const PetscInt onnz[]),(comm,bs,m,n,M,N,nz,nnz,onz,onnz,&A),Mat,A)
307 PetscPolymorphicFunction(MatCreateMPISBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,PetscInt nnz),(comm,bs,m,n,M,N,nz,PETSC_NULL,nnz,PETSC_NULL,&A),Mat,A)
308 PetscPolymorphicFunction(MatCreateMPISBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,const PetscInt nnz[],const PetscInt onz[]),(comm,bs,m,n,M,N,0,nnz,0,onz,&A),Mat,A)
309 PetscPolymorphicFunction(MatCreateMPISBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N),(comm,bs,m,n,M,N,0,PETSC_NULL,0,PETSC_NULL,&A),Mat,A)
310 PetscPolymorphicSubroutine(MatCreateMPISBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,PetscInt nnz,Mat *A),(comm,bs,m,n,M,N,nz,PETSC_NULL,nnz,PETSC_NULL,A))
311 PetscPolymorphicSubroutine(MatCreateMPISBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,const PetscInt nnz[],const PetscInt onz[],Mat *A),(comm,bs,m,n,M,N,0,nnz,0,onz,A))
312 PetscPolymorphicSubroutine(MatCreateMPISBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,Mat *A),(comm,bs,m,n,M,N,0,PETSC_NULL,0,PETSC_NULL,A))
313 PetscPolymorphicFunction(MatCreateMPISBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,const PetscInt nnz[],PetscInt onz,const PetscInt onnz[]),(PETSC_COMM_WORLD,bs,m,n,M,N,nz,nnz,onz,onnz,&A),Mat,A)
314 PetscPolymorphicFunction(MatCreateMPISBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,PetscInt nnz),(PETSC_COMM_WORLD,bs,m,n,M,N,nz,PETSC_NULL,nnz,PETSC_NULL,&A),Mat,A)
315 PetscPolymorphicFunction(MatCreateMPISBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,const PetscInt nnz[],const PetscInt onz[]),(PETSC_COMM_WORLD,bs,m,n,M,N,0,nnz,0,onz,&A),Mat,A)
316 PetscPolymorphicFunction(MatCreateMPISBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N),(PETSC_COMM_WORLD,bs,m,n,M,N,0,PETSC_NULL,0,PETSC_NULL,&A),Mat,A)
317 PetscPolymorphicSubroutine(MatCreateMPISBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,PetscInt nnz,Mat *A),(PETSC_COMM_WORLD,bs,m,n,M,N,nz,PETSC_NULL,nnz,PETSC_NULL,A))
318 PetscPolymorphicSubroutine(MatCreateMPISBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,const PetscInt nnz[],const PetscInt onz[],Mat *A),(PETSC_COMM_WORLD,bs,m,n,M,N,0,nnz,0,onz,A))
319 PetscPolymorphicSubroutine(MatCreateMPISBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,Mat *A),(PETSC_COMM_WORLD,bs,m,n,M,N,0,PETSC_NULL,0,PETSC_NULL,A))
320 extern PetscErrorCode  MatCreateMPISBAIJWithArrays(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt,const PetscInt[],const PetscInt[],const PetscScalar[],Mat *);
321 extern PetscErrorCode  MatMPISBAIJSetPreallocationCSR(Mat,PetscInt,const PetscInt[],const PetscInt[],const PetscScalar[]);
322 extern PetscErrorCode  MatXAIJSetPreallocation(Mat,PetscInt,PetscInt,const PetscInt*,PetscInt,const PetscInt*,PetscInt,const PetscInt*,PetscInt,const PetscInt*);
323 
324 extern PetscErrorCode  MatCreateShell(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,void *,Mat*);
325 PetscPolymorphicFunction(MatCreateShell,(MPI_Comm comm,PetscInt m,PetscInt n,PetscInt M,PetscInt N,void *ctx),(comm,m,n,M,N,ctx,&A),Mat,A)
326 PetscPolymorphicFunction(MatCreateShell,(PetscInt m,PetscInt n,PetscInt M,PetscInt N,void *ctx),(PETSC_COMM_WORLD,m,n,M,N,ctx,&A),Mat,A)
327 extern PetscErrorCode  MatCreateAdic(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt,void (*)(void),Mat*);
328 extern PetscErrorCode  MatCreateNormal(Mat,Mat*);
329 PetscPolymorphicFunction(MatCreateNormal,(Mat mat),(mat,&A),Mat,A)
330 extern PetscErrorCode  MatCreateLRC(Mat,Mat,Mat,Mat*);
331 extern PetscErrorCode  MatCreateIS(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,ISLocalToGlobalMapping,Mat*);
332 extern PetscErrorCode  MatCreateSeqAIJCRL(MPI_Comm,PetscInt,PetscInt,PetscInt,const PetscInt[],Mat*);
333 extern PetscErrorCode  MatCreateMPIAIJCRL(MPI_Comm,PetscInt,PetscInt,PetscInt,const PetscInt[],PetscInt,const PetscInt[],Mat*);
334 
335 extern PetscErrorCode  MatCreateSeqBSTRM(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,const PetscInt[],Mat*);
336 extern PetscErrorCode  MatCreateMPIBSTRM(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt,const PetscInt[],PetscInt,const PetscInt[],Mat*);
337 extern PetscErrorCode  MatCreateSeqSBSTRM(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,const PetscInt[],Mat*);
338 extern PetscErrorCode  MatCreateMPISBSTRM(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt,const PetscInt[],PetscInt,const PetscInt[],Mat*);
339 
340 extern PetscErrorCode  MatCreateScatter(MPI_Comm,VecScatter,Mat*);
341 extern PetscErrorCode  MatScatterSetVecScatter(Mat,VecScatter);
342 extern PetscErrorCode  MatScatterGetVecScatter(Mat,VecScatter*);
343 extern PetscErrorCode  MatCreateBlockMat(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt*,Mat*);
344 extern PetscErrorCode  MatCompositeAddMat(Mat,Mat);
345 extern PetscErrorCode  MatCompositeMerge(Mat);
346 extern PetscErrorCode  MatCreateComposite(MPI_Comm,PetscInt,const Mat*,Mat*);
347 typedef enum {MAT_COMPOSITE_ADDITIVE,MAT_COMPOSITE_MULTIPLICATIVE} MatCompositeType;
348 extern PetscErrorCode  MatCompositeSetType(Mat,MatCompositeType);
349 
350 extern PetscErrorCode  MatCreateFFT(MPI_Comm,PetscInt,const PetscInt[],const MatType,Mat*);
351 extern PetscErrorCode  MatCreateSeqCUFFT(MPI_Comm,PetscInt,const PetscInt[],Mat*);
352 
353 extern PetscErrorCode  MatCreateTranspose(Mat,Mat*);
354 extern PetscErrorCode  MatCreateSubMatrix(Mat,IS,IS,Mat*);
355 extern PetscErrorCode  MatSubMatrixUpdate(Mat,Mat,IS,IS);
356 extern PetscErrorCode  MatCreateLocalRef(Mat,IS,IS,Mat*);
357 
358 extern PetscErrorCode  MatPythonSetType(Mat,const char[]);
359 
360 extern PetscErrorCode  MatSetUp(Mat);
361 extern PetscErrorCode  MatDestroy(Mat*);
362 
363 extern PetscErrorCode  MatConjugate(Mat);
364 extern PetscErrorCode  MatRealPart(Mat);
365 extern PetscErrorCode  MatImaginaryPart(Mat);
366 extern PetscErrorCode  MatGetDiagonalBlock(Mat,Mat*);
367 extern PetscErrorCode  MatGetTrace(Mat,PetscScalar*);
368 extern PetscErrorCode  MatInvertBlockDiagonal(Mat,PetscScalar **);
369 
370 /* ------------------------------------------------------------*/
371 extern PetscErrorCode  MatSetValues(Mat,PetscInt,const PetscInt[],PetscInt,const PetscInt[],const PetscScalar[],InsertMode);
372 extern PetscErrorCode  MatSetValuesBlocked(Mat,PetscInt,const PetscInt[],PetscInt,const PetscInt[],const PetscScalar[],InsertMode);
373 extern PetscErrorCode  MatSetValuesRow(Mat,PetscInt,const PetscScalar[]);
374 extern PetscErrorCode  MatSetValuesRowLocal(Mat,PetscInt,const PetscScalar[]);
375 extern PetscErrorCode  MatSetValuesBatch(Mat,PetscInt,PetscInt,PetscInt[],const PetscScalar[]);
376 
377 /*S
378      MatStencil - Data structure (C struct) for storing information about a single row or
379         column of a matrix as indexed on an associated grid.
380 
381    Fortran usage is different, see MatSetValuesStencil() for details.
382 
383    Level: beginner
384 
385   Concepts: matrix; linear operator
386 
387 .seealso:  MatSetValuesStencil(), MatSetStencil(), MatSetValuesBlockedStencil()
388 S*/
389 typedef struct {
390   PetscInt k,j,i,c;
391 } MatStencil;
392 
393 extern PetscErrorCode  MatSetValuesStencil(Mat,PetscInt,const MatStencil[],PetscInt,const MatStencil[],const PetscScalar[],InsertMode);
394 extern PetscErrorCode  MatSetValuesBlockedStencil(Mat,PetscInt,const MatStencil[],PetscInt,const MatStencil[],const PetscScalar[],InsertMode);
395 extern PetscErrorCode  MatSetStencil(Mat,PetscInt,const PetscInt[],const PetscInt[],PetscInt);
396 
397 extern PetscErrorCode  MatSetColoring(Mat,ISColoring);
398 extern PetscErrorCode  MatSetValuesAdic(Mat,void*);
399 extern PetscErrorCode  MatSetValuesAdifor(Mat,PetscInt,void*);
400 
401 /*E
402     MatAssemblyType - Indicates if the matrix is now to be used, or if you plan
403      to continue to add values to it
404 
405     Level: beginner
406 
407 .seealso: MatAssemblyBegin(), MatAssemblyEnd()
408 E*/
409 typedef enum {MAT_FLUSH_ASSEMBLY=1,MAT_FINAL_ASSEMBLY=0} MatAssemblyType;
410 extern PetscErrorCode  MatAssemblyBegin(Mat,MatAssemblyType);
411 extern PetscErrorCode  MatAssemblyEnd(Mat,MatAssemblyType);
412 extern PetscErrorCode  MatAssembled(Mat,PetscBool *);
413 
414 
415 
416 /*E
417     MatOption - Options that may be set for a matrix and its behavior or storage
418 
419     Level: beginner
420 
421    Any additions/changes here MUST also be made in include/finclude/petscmat.h
422 
423 .seealso: MatSetOption()
424 E*/
425 typedef enum {MAT_ROW_ORIENTED,MAT_NEW_NONZERO_LOCATIONS,
426               MAT_SYMMETRIC,
427               MAT_STRUCTURALLY_SYMMETRIC,
428               MAT_NEW_DIAGONALS,MAT_IGNORE_OFF_PROC_ENTRIES,
429               MAT_NEW_NONZERO_LOCATION_ERR,
430               MAT_NEW_NONZERO_ALLOCATION_ERR,MAT_USE_HASH_TABLE,
431               MAT_KEEP_NONZERO_PATTERN,MAT_IGNORE_ZERO_ENTRIES,
432               MAT_USE_INODES,
433               MAT_HERMITIAN,
434               MAT_SYMMETRY_ETERNAL,
435               MAT_CHECK_COMPRESSED_ROW,
436               MAT_IGNORE_LOWER_TRIANGULAR,MAT_ERROR_LOWER_TRIANGULAR,
437               MAT_GETROW_UPPERTRIANGULAR,MAT_UNUSED_NONZERO_LOCATION_ERR,
438               MAT_SPD,MAT_NO_OFF_PROC_ENTRIES,MAT_NO_OFF_PROC_ZERO_ROWS,
439               NUM_MAT_OPTIONS} MatOption;
440 extern const char *MatOptions[];
441 extern PetscErrorCode  MatSetOption(Mat,MatOption,PetscBool );
442 extern PetscErrorCode  MatGetType(Mat,const MatType*);
443 PetscPolymorphicFunction(MatGetType,(Mat mat),(mat,&t),const MatType,t)
444 
445 extern PetscErrorCode  MatGetValues(Mat,PetscInt,const PetscInt[],PetscInt,const PetscInt[],PetscScalar[]);
446 extern PetscErrorCode  MatGetRow(Mat,PetscInt,PetscInt *,const PetscInt *[],const PetscScalar*[]);
447 extern PetscErrorCode  MatRestoreRow(Mat,PetscInt,PetscInt *,const PetscInt *[],const PetscScalar*[]);
448 extern PetscErrorCode  MatGetRowUpperTriangular(Mat);
449 extern PetscErrorCode  MatRestoreRowUpperTriangular(Mat);
450 extern PetscErrorCode  MatGetColumn(Mat,PetscInt,PetscInt *,const PetscInt *[],const PetscScalar*[]);
451 extern PetscErrorCode  MatRestoreColumn(Mat,PetscInt,PetscInt *,const PetscInt *[],const PetscScalar*[]);
452 extern PetscErrorCode  MatGetColumnVector(Mat,Vec,PetscInt);
453 extern PetscErrorCode  MatGetArray(Mat,PetscScalar *[]);
454 PetscPolymorphicFunction(MatGetArray,(Mat mat),(mat,&a),PetscScalar*,a)
455 extern PetscErrorCode  MatRestoreArray(Mat,PetscScalar *[]);
456 extern PetscErrorCode  MatGetBlockSize(Mat,PetscInt *);
457 PetscPolymorphicFunction(MatGetBlockSize,(Mat mat),(mat,&a),PetscInt,a)
458 extern PetscErrorCode  MatSetBlockSize(Mat,PetscInt);
459 
460 
461 extern PetscErrorCode  MatMult(Mat,Vec,Vec);
462 extern PetscErrorCode  MatMultDiagonalBlock(Mat,Vec,Vec);
463 extern PetscErrorCode  MatMultAdd(Mat,Vec,Vec,Vec);
464 PetscPolymorphicSubroutine(MatMultAdd,(Mat A,Vec x,Vec y),(A,x,y,y))
465 extern PetscErrorCode  MatMultTranspose(Mat,Vec,Vec);
466 extern PetscErrorCode  MatMultHermitianTranspose(Mat,Vec,Vec);
467 extern PetscErrorCode  MatIsTranspose(Mat,Mat,PetscReal,PetscBool *);
468 PetscPolymorphicFunction(MatIsTranspose,(Mat A,Mat B,PetscReal tol),(A,B,tol,&t),PetscBool ,t)
469 PetscPolymorphicFunction(MatIsTranspose,(Mat A,Mat B),(A,B,0,&t),PetscBool ,t)
470 extern PetscErrorCode  MatIsHermitianTranspose(Mat,Mat,PetscReal,PetscBool *);
471 extern PetscErrorCode  MatMultTransposeAdd(Mat,Vec,Vec,Vec);
472 extern PetscErrorCode  MatMultHermitianTransposeAdd(Mat,Vec,Vec,Vec);
473 PetscPolymorphicSubroutine(MatMultTransposeAdd,(Mat A,Vec x,Vec y),(A,x,y,y))
474 extern PetscErrorCode  MatMultConstrained(Mat,Vec,Vec);
475 extern PetscErrorCode  MatMultTransposeConstrained(Mat,Vec,Vec);
476 extern PetscErrorCode  MatMatSolve(Mat,Mat,Mat);
477 
478 /*E
479     MatDuplicateOption - Indicates if a duplicated sparse matrix should have
480   its numerical values copied over or just its nonzero structure.
481 
482     Level: beginner
483 
484    Any additions/changes here MUST also be made in include/finclude/petscmat.h
485 
486 $   MAT_SHARE_NONZERO_PATTERN - the i and j arrays in the new matrix will be shared with the original matrix
487 $                               this also triggers the MAT_DO_NOT_COPY_VALUES option. This is used when you
488 $                               have several matrices with the same nonzero pattern.
489 
490 .seealso: MatDuplicate()
491 E*/
492 typedef enum {MAT_DO_NOT_COPY_VALUES,MAT_COPY_VALUES,MAT_SHARE_NONZERO_PATTERN} MatDuplicateOption;
493 
494 extern PetscErrorCode  MatConvert(Mat,const MatType,MatReuse,Mat*);
495 PetscPolymorphicFunction(MatConvert,(Mat A,const MatType t),(A,t,MAT_INITIAL_MATRIX,&a),Mat,a)
496 extern PetscErrorCode  MatDuplicate(Mat,MatDuplicateOption,Mat*);
497 PetscPolymorphicFunction(MatDuplicate,(Mat A,MatDuplicateOption o),(A,o,&a),Mat,a)
498 PetscPolymorphicFunction(MatDuplicate,(Mat A),(A,MAT_COPY_VALUES,&a),Mat,a)
499 
500 
501 extern PetscErrorCode  MatCopy(Mat,Mat,MatStructure);
502 extern PetscErrorCode  MatView(Mat,PetscViewer);
503 extern PetscErrorCode  MatIsSymmetric(Mat,PetscReal,PetscBool *);
504 PetscPolymorphicFunction(MatIsSymmetric,(Mat A,PetscReal tol),(A,tol,&t),PetscBool ,t)
505 PetscPolymorphicFunction(MatIsSymmetric,(Mat A),(A,0,&t),PetscBool ,t)
506 extern PetscErrorCode  MatIsStructurallySymmetric(Mat,PetscBool *);
507 PetscPolymorphicFunction(MatIsStructurallySymmetric,(Mat A),(A,&t),PetscBool ,t)
508 extern PetscErrorCode  MatIsHermitian(Mat,PetscReal,PetscBool *);
509 PetscPolymorphicFunction(MatIsHermitian,(Mat A),(A,0,&t),PetscBool ,t)
510 extern PetscErrorCode  MatIsSymmetricKnown(Mat,PetscBool *,PetscBool *);
511 extern PetscErrorCode  MatIsHermitianKnown(Mat,PetscBool *,PetscBool *);
512 extern PetscErrorCode  MatMissingDiagonal(Mat,PetscBool  *,PetscInt *);
513 extern PetscErrorCode  MatLoad(Mat, PetscViewer);
514 
515 extern PetscErrorCode  MatGetRowIJ(Mat,PetscInt,PetscBool ,PetscBool ,PetscInt*,PetscInt *[],PetscInt *[],PetscBool  *);
516 extern PetscErrorCode  MatRestoreRowIJ(Mat,PetscInt,PetscBool ,PetscBool ,PetscInt *,PetscInt *[],PetscInt *[],PetscBool  *);
517 extern PetscErrorCode  MatGetColumnIJ(Mat,PetscInt,PetscBool ,PetscBool ,PetscInt*,PetscInt *[],PetscInt *[],PetscBool  *);
518 extern PetscErrorCode  MatRestoreColumnIJ(Mat,PetscInt,PetscBool ,PetscBool ,PetscInt *,PetscInt *[],PetscInt *[],PetscBool  *);
519 
520 /*S
521      MatInfo - Context of matrix information, used with MatGetInfo()
522 
523    In Fortran this is simply a double precision array of dimension MAT_INFO_SIZE
524 
525    Level: intermediate
526 
527   Concepts: matrix^nonzero information
528 
529 .seealso:  MatGetInfo(), MatInfoType
530 S*/
531 typedef struct {
532   PetscLogDouble block_size;                         /* block size */
533   PetscLogDouble nz_allocated,nz_used,nz_unneeded;   /* number of nonzeros */
534   PetscLogDouble memory;                             /* memory allocated */
535   PetscLogDouble assemblies;                         /* number of matrix assemblies called */
536   PetscLogDouble mallocs;                            /* number of mallocs during MatSetValues() */
537   PetscLogDouble fill_ratio_given,fill_ratio_needed; /* fill ratio for LU/ILU */
538   PetscLogDouble factor_mallocs;                     /* number of mallocs during factorization */
539 } MatInfo;
540 
541 /*E
542     MatInfoType - Indicates if you want information about the local part of the matrix,
543      the entire parallel matrix or the maximum over all the local parts.
544 
545     Level: beginner
546 
547    Any additions/changes here MUST also be made in include/finclude/petscmat.h
548 
549 .seealso: MatGetInfo(), MatInfo
550 E*/
551 typedef enum {MAT_LOCAL=1,MAT_GLOBAL_MAX=2,MAT_GLOBAL_SUM=3} MatInfoType;
552 extern PetscErrorCode  MatGetInfo(Mat,MatInfoType,MatInfo*);
553 extern PetscErrorCode  MatGetDiagonal(Mat,Vec);
554 extern PetscErrorCode  MatGetRowMax(Mat,Vec,PetscInt[]);
555 extern PetscErrorCode  MatGetRowMin(Mat,Vec,PetscInt[]);
556 extern PetscErrorCode  MatGetRowMaxAbs(Mat,Vec,PetscInt[]);
557 extern PetscErrorCode  MatGetRowMinAbs(Mat,Vec,PetscInt[]);
558 extern PetscErrorCode  MatGetRowSum(Mat,Vec);
559 extern PetscErrorCode  MatTranspose(Mat,MatReuse,Mat*);
560 PetscPolymorphicFunction(MatTranspose,(Mat A),(A,MAT_INITIAL_MATRIX,&t),Mat,t)
561 extern PetscErrorCode  MatHermitianTranspose(Mat,MatReuse,Mat*);
562 extern PetscErrorCode  MatPermute(Mat,IS,IS,Mat *);
563 PetscPolymorphicFunction(MatPermute,(Mat A,IS is1,IS is2),(A,is1,is2,&t),Mat,t)
564 extern PetscErrorCode  MatDiagonalScale(Mat,Vec,Vec);
565 extern PetscErrorCode  MatDiagonalSet(Mat,Vec,InsertMode);
566 extern PetscErrorCode  MatEqual(Mat,Mat,PetscBool *);
567 PetscPolymorphicFunction(MatEqual,(Mat A,Mat B),(A,B,&t),PetscBool ,t)
568 extern PetscErrorCode  MatMultEqual(Mat,Mat,PetscInt,PetscBool *);
569 extern PetscErrorCode  MatMultAddEqual(Mat,Mat,PetscInt,PetscBool *);
570 extern PetscErrorCode  MatMultTransposeEqual(Mat,Mat,PetscInt,PetscBool *);
571 extern PetscErrorCode  MatMultTransposeAddEqual(Mat,Mat,PetscInt,PetscBool *);
572 
573 extern PetscErrorCode  MatNorm(Mat,NormType,PetscReal *);
574 PetscPolymorphicFunction(MatNorm,(Mat A,NormType t),(A,t,&n),PetscReal,n)
575 extern PetscErrorCode  MatGetColumnNorms(Mat,NormType,PetscReal *);
576 extern PetscErrorCode  MatZeroEntries(Mat);
577 extern PetscErrorCode  MatZeroRows(Mat,PetscInt,const PetscInt [],PetscScalar,Vec,Vec);
578 extern PetscErrorCode  MatZeroRowsIS(Mat,IS,PetscScalar,Vec,Vec);
579 extern PetscErrorCode  MatZeroRowsStencil(Mat,PetscInt,const MatStencil [],PetscScalar,Vec,Vec);
580 extern PetscErrorCode  MatZeroRowsColumnsStencil(Mat,PetscInt,const MatStencil[],PetscScalar,Vec,Vec);
581 extern PetscErrorCode  MatZeroRowsColumns(Mat,PetscInt,const PetscInt [],PetscScalar,Vec,Vec);
582 extern PetscErrorCode  MatZeroRowsColumnsIS(Mat,IS,PetscScalar,Vec,Vec);
583 
584 extern PetscErrorCode  MatUseScaledForm(Mat,PetscBool );
585 extern PetscErrorCode  MatScaleSystem(Mat,Vec,Vec);
586 extern PetscErrorCode  MatUnScaleSystem(Mat,Vec,Vec);
587 
588 extern PetscErrorCode  MatGetSize(Mat,PetscInt*,PetscInt*);
589 extern PetscErrorCode  MatGetLocalSize(Mat,PetscInt*,PetscInt*);
590 extern PetscErrorCode  MatGetOwnershipRange(Mat,PetscInt*,PetscInt*);
591 extern PetscErrorCode  MatGetOwnershipRanges(Mat,const PetscInt**);
592 extern PetscErrorCode  MatGetOwnershipRangeColumn(Mat,PetscInt*,PetscInt*);
593 extern PetscErrorCode  MatGetOwnershipRangesColumn(Mat,const PetscInt**);
594 
595 extern PetscErrorCode  MatGetSubMatrices(Mat,PetscInt,const IS[],const IS[],MatReuse,Mat *[]);
596 extern PetscErrorCode  MatGetSubMatricesParallel(Mat,PetscInt,const IS[],const IS[],MatReuse,Mat *[]);
597 extern PetscErrorCode  MatDestroyMatrices(PetscInt,Mat *[]);
598 extern PetscErrorCode  MatGetSubMatrix(Mat,IS,IS,MatReuse,Mat *);
599 extern PetscErrorCode  MatGetLocalSubMatrix(Mat,IS,IS,Mat*);
600 extern PetscErrorCode  MatRestoreLocalSubMatrix(Mat,IS,IS,Mat*);
601 extern PetscErrorCode  MatGetSeqNonzeroStructure(Mat,Mat*);
602 extern PetscErrorCode  MatDestroySeqNonzeroStructure(Mat*);
603 
604 extern PetscErrorCode  MatMerge(MPI_Comm,Mat,PetscInt,MatReuse,Mat*);
605 extern PetscErrorCode  MatMergeSymbolic(MPI_Comm,Mat,PetscInt,Mat*);
606 extern PetscErrorCode  MatMergeNumeric(MPI_Comm,Mat,PetscInt,Mat);
607 extern PetscErrorCode  MatMerge_SeqsToMPI(MPI_Comm,Mat,PetscInt,PetscInt,MatReuse,Mat*);
608 extern PetscErrorCode  MatMerge_SeqsToMPISymbolic(MPI_Comm,Mat,PetscInt,PetscInt,Mat*);
609 extern PetscErrorCode  MatMerge_SeqsToMPINumeric(Mat,Mat);
610 extern PetscErrorCode  MatMPIAIJGetLocalMat(Mat,MatReuse,Mat*);
611 extern PetscErrorCode  MatMPIAIJGetLocalMatCondensed(Mat,MatReuse,IS*,IS*,Mat*);
612 extern PetscErrorCode  MatGetBrowsOfAcols(Mat,Mat,MatReuse,IS*,IS*,Mat*);
613 #if defined (PETSC_USE_CTABLE)
614 extern PetscErrorCode  MatGetCommunicationStructs(Mat, Vec *, PetscTable *, VecScatter *);
615 #else
616 extern PetscErrorCode  MatGetCommunicationStructs(Mat, Vec *, PetscInt *[], VecScatter *);
617 #endif
618 extern PetscErrorCode  MatGetGhosts(Mat, PetscInt *,const PetscInt *[]);
619 
620 extern PetscErrorCode  MatIncreaseOverlap(Mat,PetscInt,IS[],PetscInt);
621 
622 extern PetscErrorCode  MatMatMult(Mat,Mat,MatReuse,PetscReal,Mat*);
623 extern PetscErrorCode  MatMatMultSymbolic(Mat,Mat,PetscReal,Mat*);
624 extern PetscErrorCode  MatMatMultNumeric(Mat,Mat,Mat);
625 
626 extern PetscErrorCode  MatPtAP(Mat,Mat,MatReuse,PetscReal,Mat*);
627 extern PetscErrorCode  MatPtAPSymbolic(Mat,Mat,PetscReal,Mat*);
628 extern PetscErrorCode  MatPtAPNumeric(Mat,Mat,Mat);
629 extern PetscErrorCode  MatRARt(Mat,Mat,MatReuse,PetscReal,Mat*);
630 extern PetscErrorCode  MatRARtSymbolic(Mat,Mat,PetscReal,Mat*);
631 extern PetscErrorCode  MatRARtNumeric(Mat,Mat,Mat);
632 
633 extern PetscErrorCode  MatTransposeMatMult(Mat,Mat,MatReuse,PetscReal,Mat*);
634 extern PetscErrorCode  MatTransposetMatMultSymbolic(Mat,Mat,PetscReal,Mat*);
635 extern PetscErrorCode  MatTransposetMatMultNumeric(Mat,Mat,Mat);
636 extern PetscErrorCode  MatMatTransposeMult(Mat,Mat,MatReuse,PetscReal,Mat*);
637 extern PetscErrorCode  MatMatTransposeMultSymbolic(Mat,Mat,PetscReal,Mat*);
638 extern PetscErrorCode  MatMatTransposeMultNumeric(Mat,Mat,Mat);
639 
640 extern PetscErrorCode  MatAXPY(Mat,PetscScalar,Mat,MatStructure);
641 extern PetscErrorCode  MatAYPX(Mat,PetscScalar,Mat,MatStructure);
642 
643 extern PetscErrorCode  MatScale(Mat,PetscScalar);
644 extern PetscErrorCode  MatShift(Mat,PetscScalar);
645 
646 extern PetscErrorCode  MatSetLocalToGlobalMapping(Mat,ISLocalToGlobalMapping,ISLocalToGlobalMapping);
647 extern PetscErrorCode  MatSetLocalToGlobalMappingBlock(Mat,ISLocalToGlobalMapping,ISLocalToGlobalMapping);
648 extern PetscErrorCode  MatGetLocalToGlobalMapping(Mat,ISLocalToGlobalMapping*,ISLocalToGlobalMapping*);
649 extern PetscErrorCode  MatGetLocalToGlobalMappingBlock(Mat,ISLocalToGlobalMapping*,ISLocalToGlobalMapping*);
650 extern PetscErrorCode  MatZeroRowsLocal(Mat,PetscInt,const PetscInt [],PetscScalar,Vec,Vec);
651 extern PetscErrorCode  MatZeroRowsLocalIS(Mat,IS,PetscScalar,Vec,Vec);
652 extern PetscErrorCode  MatZeroRowsColumnsLocal(Mat,PetscInt,const PetscInt [],PetscScalar,Vec,Vec);
653 extern PetscErrorCode  MatZeroRowsColumnsLocalIS(Mat,IS,PetscScalar,Vec,Vec);
654 extern PetscErrorCode  MatSetValuesLocal(Mat,PetscInt,const PetscInt[],PetscInt,const PetscInt[],const PetscScalar[],InsertMode);
655 extern PetscErrorCode  MatSetValuesBlockedLocal(Mat,PetscInt,const PetscInt[],PetscInt,const PetscInt[],const PetscScalar[],InsertMode);
656 
657 extern PetscErrorCode  MatStashSetInitialSize(Mat,PetscInt,PetscInt);
658 extern PetscErrorCode  MatStashGetInfo(Mat,PetscInt*,PetscInt*,PetscInt*,PetscInt*);
659 
660 extern PetscErrorCode  MatInterpolate(Mat,Vec,Vec);
661 extern PetscErrorCode  MatInterpolateAdd(Mat,Vec,Vec,Vec);
662 PetscPolymorphicSubroutine(MatInterpolateAdd,(Mat A,Vec x,Vec y),(A,x,y,y))
663 extern PetscErrorCode  MatRestrict(Mat,Vec,Vec);
664 extern PetscErrorCode  MatGetVecs(Mat,Vec*,Vec*);
665 extern PetscErrorCode  MatGetRedundantMatrix(Mat,PetscInt,MPI_Comm,PetscInt,MatReuse,Mat*);
666 extern PetscErrorCode  MatGetMultiProcBlock(Mat,MPI_Comm,MatReuse,Mat*);
667 extern PetscErrorCode  MatFindZeroDiagonals(Mat,IS*);
668 
669 /*MC
670    MatSetValue - Set a single entry into a matrix.
671 
672    Not collective
673 
674    Input Parameters:
675 +  m - the matrix
676 .  row - the row location of the entry
677 .  col - the column location of the entry
678 .  value - the value to insert
679 -  mode - either INSERT_VALUES or ADD_VALUES
680 
681    Notes:
682    For efficiency one should use MatSetValues() and set several or many
683    values simultaneously if possible.
684 
685    Level: beginner
686 
687 .seealso: MatSetValues(), MatSetValueLocal()
688 M*/
689 PETSC_STATIC_INLINE PetscErrorCode MatSetValue(Mat v,PetscInt i,PetscInt j,PetscScalar va,InsertMode mode) {return MatSetValues(v,1,&i,1,&j,&va,mode);}
690 
691 PETSC_STATIC_INLINE PetscErrorCode MatGetValue(Mat v,PetscInt i,PetscInt j,PetscScalar *va) {return MatGetValues(v,1,&i,1,&j,va);}
692 
693 PETSC_STATIC_INLINE PetscErrorCode MatSetValueLocal(Mat v,PetscInt i,PetscInt j,PetscScalar va,InsertMode mode) {return MatSetValuesLocal(v,1,&i,1,&j,&va,mode);}
694 
695 /*MC
696    MatPreallocateInitialize - Begins the block of code that will count the number of nonzeros per
697        row in a matrix providing the data that one can use to correctly preallocate the matrix.
698 
699    Synopsis:
700    PetscErrorCode MatPreallocateInitialize(MPI_Comm comm, PetscInt nrows, PetscInt ncols, PetscInt *dnz, PetscInt *onz)
701 
702    Collective on MPI_Comm
703 
704    Input Parameters:
705 +  comm - the communicator that will share the eventually allocated matrix
706 .  nrows - the number of LOCAL rows in the matrix
707 -  ncols - the number of LOCAL columns in the matrix
708 
709    Output Parameters:
710 +  dnz - the array that will be passed to the matrix preallocation routines
711 -  ozn - the other array passed to the matrix preallocation routines
712 
713 
714    Level: intermediate
715 
716    Notes:
717     See the <A href="../../docs/manual.pdf#nameddest=ch_performance">Hints for Performance Improvment</A> chapter in the users manual for more details.
718 
719    Do not malloc or free dnz and onz, that is handled internally by these routines
720 
721    Use MatPreallocateInitializeSymmetric() for symmetric matrices (MPISBAIJ matrices)
722 
723    This is a MACRO not a function because it has a leading { that is closed by PetscPreallocateFinalize().
724 
725   Concepts: preallocation^Matrix
726 
727 .seealso: MatPreallocateFinalize(), MatPreallocateSet(), MatPreallocateSymmetricSet(), MatPreallocateSetLocal(),
728           MatPreallocateInitializeSymmetric(), MatPreallocateSymmetricSetLocal()
729 M*/
730 #define MatPreallocateInitialize(comm,nrows,ncols,dnz,onz) 0; \
731 { \
732   PetscErrorCode _4_ierr; PetscInt __nrows = (nrows),__ctmp = (ncols),__rstart,__start,__end; \
733   _4_ierr = PetscMalloc2(__nrows,PetscInt,&dnz,__nrows,PetscInt,&onz);CHKERRQ(_4_ierr); \
734   _4_ierr = PetscMemzero(dnz,__nrows*sizeof(PetscInt));CHKERRQ(_4_ierr);\
735   _4_ierr = PetscMemzero(onz,__nrows*sizeof(PetscInt));CHKERRQ(_4_ierr);\
736   _4_ierr = MPI_Scan(&__ctmp,&__end,1,MPIU_INT,MPI_SUM,comm);CHKERRQ(_4_ierr); __start = __end - __ctmp;\
737   _4_ierr = MPI_Scan(&__nrows,&__rstart,1,MPIU_INT,MPI_SUM,comm);CHKERRQ(_4_ierr); __rstart = __rstart - __nrows;
738 
739 /*MC
740    MatPreallocateSymmetricInitialize - Begins the block of code that will count the number of nonzeros per
741        row in a matrix providing the data that one can use to correctly preallocate the matrix.
742 
743    Synopsis:
744    PetscErrorCode MatPreallocateSymmetricInitialize(MPI_Comm comm, PetscInt nrows, PetscInt ncols, PetscInt *dnz, PetscInt *onz)
745 
746    Collective on MPI_Comm
747 
748    Input Parameters:
749 +  comm - the communicator that will share the eventually allocated matrix
750 .  nrows - the number of LOCAL rows in the matrix
751 -  ncols - the number of LOCAL columns in the matrix
752 
753    Output Parameters:
754 +  dnz - the array that will be passed to the matrix preallocation routines
755 -  ozn - the other array passed to the matrix preallocation routines
756 
757 
758    Level: intermediate
759 
760    Notes:
761     See the <A href="../../docs/manual.pdf#nameddest=ch_performance">Hints for Performance Improvment</A> chapter in the users manual for more details.
762 
763    Do not malloc or free dnz and onz, that is handled internally by these routines
764 
765    This is a MACRO not a function because it has a leading { that is closed by PetscPreallocateFinalize().
766 
767   Concepts: preallocation^Matrix
768 
769 .seealso: MatPreallocateFinalize(), MatPreallocateSet(), MatPreallocateSymmetricSet(), MatPreallocateSetLocal(),
770           MatPreallocateInitialize(), MatPreallocateSymmetricSetLocal()
771 M*/
772 #define MatPreallocateSymmetricInitialize(comm,nrows,ncols,dnz,onz) 0; \
773 { \
774   PetscErrorCode _4_ierr; PetscInt __nrows = (nrows),__ctmp = (ncols),__rstart,__end; \
775   _4_ierr = PetscMalloc2(__nrows,PetscInt,&dnz,__nrows,PetscInt,&onz);CHKERRQ(_4_ierr); \
776   _4_ierr = PetscMemzero(dnz,__nrows*sizeof(PetscInt));CHKERRQ(_4_ierr);\
777   _4_ierr = PetscMemzero(onz,__nrows*sizeof(PetscInt));CHKERRQ(_4_ierr);\
778   _4_ierr = MPI_Scan(&__ctmp,&__end,1,MPIU_INT,MPI_SUM,comm);CHKERRQ(_4_ierr);\
779   _4_ierr = MPI_Scan(&__nrows,&__rstart,1,MPIU_INT,MPI_SUM,comm);CHKERRQ(_4_ierr); __rstart = __rstart - __nrows;
780 
781 /*MC
782    MatPreallocateSetLocal - Indicates the locations (rows and columns) in the matrix where nonzeros will be
783        inserted using a local number of the rows and columns
784 
785    Synopsis:
786    PetscErrorCode MatPreallocateSetLocal(ISLocalToGlobalMappping map,PetscInt nrows, PetscInt *rows,PetscInt ncols, PetscInt *cols,PetscInt *dnz, PetscInt *onz)
787 
788    Not Collective
789 
790    Input Parameters:
791 +  map - the row mapping from local numbering to global numbering
792 .  nrows - the number of rows indicated
793 .  rows - the indices of the rows
794 .  cmap - the column mapping from local to global numbering
795 .  ncols - the number of columns in the matrix
796 .  cols - the columns indicated
797 .  dnz - the array that will be passed to the matrix preallocation routines
798 -  ozn - the other array passed to the matrix preallocation routines
799 
800 
801    Level: intermediate
802 
803    Notes:
804     See the <A href="../../docs/manual.pdf#nameddest=ch_performance">Hints for Performance Improvment</A> chapter in the users manual for more details.
805 
806    Do not malloc or free dnz and onz, that is handled internally by these routines
807 
808   Concepts: preallocation^Matrix
809 
810 .seealso: MatPreallocateFinalize(), MatPreallocateSet(), MatPreallocateSymmetricSet(), MatPreallocateInitialize(),
811           MatPreallocateInitialize(), MatPreallocateSymmetricSetLocal()
812 M*/
813 #define MatPreallocateSetLocal(rmap,nrows,rows,cmap,ncols,cols,dnz,onz) 0; \
814 {\
815   PetscInt __l;\
816   _4_ierr = ISLocalToGlobalMappingApply(rmap,nrows,rows,rows);CHKERRQ(_4_ierr);\
817   _4_ierr = ISLocalToGlobalMappingApply(cmap,ncols,cols,cols);CHKERRQ(_4_ierr);\
818   for (__l=0;__l<nrows;__l++) {\
819     _4_ierr = MatPreallocateSet((rows)[__l],ncols,cols,dnz,onz);CHKERRQ(_4_ierr);\
820   }\
821 }
822 
823 /*MC
824    MatPreallocateSymmetricSetLocal - Indicates the locations (rows and columns) in the matrix where nonzeros will be
825        inserted using a local number of the rows and columns
826 
827    Synopsis:
828    PetscErrorCode MatPreallocateSymmetricSetLocal(ISLocalToGlobalMappping map,PetscInt nrows, PetscInt *rows,PetscInt ncols, PetscInt *cols,PetscInt *dnz, PetscInt *onz)
829 
830    Not Collective
831 
832    Input Parameters:
833 +  map - the mapping between local numbering and global numbering
834 .  nrows - the number of rows indicated
835 .  rows - the indices of the rows
836 .  ncols - the number of columns in the matrix
837 .  cols - the columns indicated
838 .  dnz - the array that will be passed to the matrix preallocation routines
839 -  ozn - the other array passed to the matrix preallocation routines
840 
841 
842    Level: intermediate
843 
844    Notes:
845     See the <A href="../../docs/manual.pdf#nameddest=ch_performance">Hints for Performance Improvment</A> chapter in the users manual for more details.
846 
847    Do not malloc or free dnz and onz that is handled internally by these routines
848 
849   Concepts: preallocation^Matrix
850 
851 .seealso: MatPreallocateFinalize(), MatPreallocateSet(), MatPreallocateSymmetricSet(), MatPreallocateInitialize(),
852           MatPreallocateInitialize(), MatPreallocateSymmetricSetLocal(), MatPreallocateSetLocal()
853 M*/
854 #define MatPreallocateSymmetricSetLocal(map,nrows,rows,ncols,cols,dnz,onz) 0;\
855 {\
856   PetscInt __l;\
857   _4_ierr = ISLocalToGlobalMappingApply(map,nrows,rows,rows);CHKERRQ(_4_ierr);\
858   _4_ierr = ISLocalToGlobalMappingApply(map,ncols,cols,cols);CHKERRQ(_4_ierr);\
859   for (__l=0;__l<nrows;__l++) {\
860     _4_ierr = MatPreallocateSymmetricSet((rows)[__l],ncols,cols,dnz,onz);CHKERRQ(_4_ierr);\
861   }\
862 }
863 
864 /*MC
865    MatPreallocateSet - Indicates the locations (rows and columns) in the matrix where nonzeros will be
866        inserted using a local number of the rows and columns
867 
868    Synopsis:
869    PetscErrorCode MatPreallocateSet(PetscInt nrows, PetscInt *rows,PetscInt ncols, PetscInt *cols,PetscInt *dnz, PetscInt *onz)
870 
871    Not Collective
872 
873    Input Parameters:
874 +  row - the row
875 .  ncols - the number of columns in the matrix
876 -  cols - the columns indicated
877 
878    Output Parameters:
879 +  dnz - the array that will be passed to the matrix preallocation routines
880 -  ozn - the other array passed to the matrix preallocation routines
881 
882 
883    Level: intermediate
884 
885    Notes:
886     See the <A href="../../docs/manual.pdf#nameddest=ch_performance">Hints for Performance Improvment</A> chapter in the users manual for more details.
887 
888    Do not malloc or free dnz and onz that is handled internally by these routines
889 
890    This is a MACRO not a function because it uses variables declared in MatPreallocateInitialize().
891 
892   Concepts: preallocation^Matrix
893 
894 .seealso: MatPreallocateFinalize(), MatPreallocateSet(), MatPreallocateSymmetricSet(), MatPreallocateInitialize(),
895           MatPreallocateInitialize(), MatPreallocateSymmetricSetLocal(), MatPreallocateSetLocal()
896 M*/
897 #define MatPreallocateSet(row,nc,cols,dnz,onz) 0;\
898 { PetscInt __i; \
899   if (row < __rstart) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Trying to set preallocation for row %D less than first local row %D",row,__rstart);\
900   if (row >= __rstart+__nrows) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Trying to set preallocation for row %D greater than last local row %D",row,__rstart+__nrows-1);\
901   for (__i=0; __i<nc; __i++) {\
902     if ((cols)[__i] < __start || (cols)[__i] >= __end) onz[row - __rstart]++; \
903     else dnz[row - __rstart]++;\
904   }\
905 }
906 
907 /*MC
908    MatPreallocateSymmetricSet - Indicates the locations (rows and columns) in the matrix where nonzeros will be
909        inserted using a local number of the rows and columns
910 
911    Synopsis:
912    PetscErrorCode MatPreallocateSymmetricSet(PetscInt nrows, PetscInt *rows,PetscInt ncols, PetscInt *cols,PetscInt *dnz, PetscInt *onz)
913 
914    Not Collective
915 
916    Input Parameters:
917 +  nrows - the number of rows indicated
918 .  rows - the indices of the rows
919 .  ncols - the number of columns in the matrix
920 .  cols - the columns indicated
921 .  dnz - the array that will be passed to the matrix preallocation routines
922 -  ozn - the other array passed to the matrix preallocation routines
923 
924 
925    Level: intermediate
926 
927    Notes:
928     See the <A href="../../docs/manual.pdf#nameddest=ch_performance">Hints for Performance Improvment</A> chapter in the users manual for more details.
929 
930    Do not malloc or free dnz and onz that is handled internally by these routines
931 
932    This is a MACRO not a function because it uses variables declared in MatPreallocateInitialize().
933 
934   Concepts: preallocation^Matrix
935 
936 .seealso: MatPreallocateFinalize(), MatPreallocateSet(), MatPreallocateSymmetricSet(), MatPreallocateInitialize(),
937           MatPreallocateInitialize(), MatPreallocateSymmetricSetLocal(), MatPreallocateSetLocal()
938 M*/
939 #define MatPreallocateSymmetricSet(row,nc,cols,dnz,onz) 0;\
940 { PetscInt __i; \
941   for (__i=0; __i<nc; __i++) {\
942     if (cols[__i] >= __end) onz[row - __rstart]++; \
943     else if (cols[__i] >= row) dnz[row - __rstart]++;\
944   }\
945 }
946 
947 /*MC
948    MatPreallocateLocation -  An alternative to MatPreallocationSet() that puts the nonzero locations into the matrix if it exists
949 
950    Synopsis:
951    PetscErrorCode MatPreallocateLocations(Mat A,PetscInt row,PetscInt ncols,PetscInt *cols,PetscInt *dnz,PetscInt *onz)
952 
953    Not Collective
954 
955    Input Parameters:
956 .  A - matrix
957 .  row - row where values exist (must be local to this process)
958 .  ncols - number of columns
959 .  cols - columns with nonzeros
960 .  dnz - the array that will be passed to the matrix preallocation routines
961 -  ozn - the other array passed to the matrix preallocation routines
962 
963 
964    Level: intermediate
965 
966    Notes:
967     See the <A href="../../docs/manual.pdf#nameddest=ch_performance">Hints for Performance Improvment</A> chapter in the users manual for more details.
968 
969    Do not malloc or free dnz and onz that is handled internally by these routines
970 
971    This is a MACRO not a function because it uses a bunch of variables private to the MatPreallocation.... routines.
972 
973   Concepts: preallocation^Matrix
974 
975 .seealso: MatPreallocateInitialize(), MatPreallocateSet(), MatPreallocateSymmetricSet(), MatPreallocateSetLocal(),
976           MatPreallocateSymmetricInitialize(), MatPreallocateSymmetricSetLocal()
977 M*/
978 #define MatPreallocateLocation(A,row,ncols,cols,dnz,onz) 0;if (A) {ierr = MatSetValues(A,1,&row,ncols,cols,PETSC_NULL,INSERT_VALUES);CHKERRQ(ierr);} else {ierr =  MatPreallocateSet(row,ncols,cols,dnz,onz);CHKERRQ(ierr);}
979 
980 
981 /*MC
982    MatPreallocateFinalize - Ends the block of code that will count the number of nonzeros per
983        row in a matrix providing the data that one can use to correctly preallocate the matrix.
984 
985    Synopsis:
986    PetscErrorCode MatPreallocateFinalize(PetscInt *dnz, PetscInt *onz)
987 
988    Collective on MPI_Comm
989 
990    Input Parameters:
991 +  dnz - the array that was be passed to the matrix preallocation routines
992 -  ozn - the other array passed to the matrix preallocation routines
993 
994 
995    Level: intermediate
996 
997    Notes:
998     See the <A href="../../docs/manual.pdf#nameddest=ch_performance">Hints for Performance Improvment</A> chapter in the users manual for more details.
999 
1000    Do not malloc or free dnz and onz that is handled internally by these routines
1001 
1002    This is a MACRO not a function because it closes the { started in MatPreallocateInitialize().
1003 
1004   Concepts: preallocation^Matrix
1005 
1006 .seealso: MatPreallocateInitialize(), MatPreallocateSet(), MatPreallocateSymmetricSet(), MatPreallocateSetLocal(),
1007           MatPreallocateSymmetricInitialize(), MatPreallocateSymmetricSetLocal()
1008 M*/
1009 #define MatPreallocateFinalize(dnz,onz) 0;_4_ierr = PetscFree2(dnz,onz);CHKERRQ(_4_ierr);}
1010 
1011 
1012 
1013 /* Routines unique to particular data structures */
1014 extern PetscErrorCode  MatShellGetContext(Mat,void *);
1015 PetscPolymorphicFunction(MatShellGetContext,(Mat A),(A,&t),void*,t)
1016 
1017 extern PetscErrorCode  MatInodeAdjustForInodes(Mat,IS*,IS*);
1018 extern PetscErrorCode  MatInodeGetInodeSizes(Mat,PetscInt *,PetscInt *[],PetscInt *);
1019 
1020 extern PetscErrorCode  MatSeqAIJSetColumnIndices(Mat,PetscInt[]);
1021 extern PetscErrorCode  MatSeqBAIJSetColumnIndices(Mat,PetscInt[]);
1022 extern PetscErrorCode  MatCreateSeqAIJWithArrays(MPI_Comm,PetscInt,PetscInt,PetscInt[],PetscInt[],PetscScalar[],Mat*);
1023 extern PetscErrorCode  MatCreateSeqBAIJWithArrays(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt[],PetscInt[],PetscScalar[],Mat*);
1024 extern PetscErrorCode  MatCreateSeqSBAIJWithArrays(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt[],PetscInt[],PetscScalar[],Mat*);
1025 extern PetscErrorCode MatCreateSeqAIJFromTriple(MPI_Comm,PetscInt,PetscInt,PetscInt[],PetscInt[],PetscScalar[],Mat*,PetscInt,PetscBool);
1026 
1027 #define MAT_SKIP_ALLOCATION -4
1028 
1029 extern PetscErrorCode  MatSeqBAIJSetPreallocation(Mat,PetscInt,PetscInt,const PetscInt[]);
1030 PetscPolymorphicSubroutine(MatSeqBAIJSetPreallocation,(Mat A,PetscInt bs,const PetscInt nnz[]),(A,bs,0,nnz))
1031 extern PetscErrorCode  MatSeqSBAIJSetPreallocation(Mat,PetscInt,PetscInt,const PetscInt[]);
1032 PetscPolymorphicSubroutine(MatSeqSBAIJSetPreallocation,(Mat A,PetscInt bs,const PetscInt nnz[]),(A,bs,0,nnz))
1033 extern PetscErrorCode  MatSeqAIJSetPreallocation(Mat,PetscInt,const PetscInt[]);
1034 PetscPolymorphicSubroutine(MatSeqAIJSetPreallocation,(Mat A,const PetscInt nnz[]),(A,0,nnz))
1035 
1036 extern PetscErrorCode  MatMPIBAIJSetPreallocation(Mat,PetscInt,PetscInt,const PetscInt[],PetscInt,const PetscInt[]);
1037 PetscPolymorphicSubroutine(MatMPIBAIJSetPreallocation,(Mat A,PetscInt bs,const PetscInt nnz[],const PetscInt onz[]),(A,bs,0,nnz,0,onz))
1038 extern PetscErrorCode  MatMPISBAIJSetPreallocation(Mat,PetscInt,PetscInt,const PetscInt[],PetscInt,const PetscInt[]);
1039 extern PetscErrorCode  MatMPIAIJSetPreallocation(Mat,PetscInt,const PetscInt[],PetscInt,const PetscInt[]);
1040 extern PetscErrorCode  MatSeqAIJSetPreallocationCSR(Mat,const PetscInt [],const PetscInt [],const PetscScalar []);
1041 extern PetscErrorCode  MatSeqBAIJSetPreallocationCSR(Mat,PetscInt,const PetscInt[],const PetscInt[],const PetscScalar[]);
1042 extern PetscErrorCode  MatMPIAIJSetPreallocationCSR(Mat,const PetscInt[],const PetscInt[],const PetscScalar[]);
1043 extern PetscErrorCode  MatMPIBAIJSetPreallocationCSR(Mat,PetscInt,const PetscInt[],const PetscInt[],const PetscScalar[]);
1044 extern PetscErrorCode  MatMPIAdjSetPreallocation(Mat,PetscInt[],PetscInt[],PetscInt[]);
1045 extern PetscErrorCode  MatMPIDenseSetPreallocation(Mat,PetscScalar[]);
1046 extern PetscErrorCode  MatSeqDenseSetPreallocation(Mat,PetscScalar[]);
1047 extern PetscErrorCode  MatMPIAIJGetSeqAIJ(Mat,Mat*,Mat*,PetscInt*[]);
1048 extern PetscErrorCode  MatMPIBAIJGetSeqBAIJ(Mat,Mat*,Mat*,PetscInt*[]);
1049 extern PetscErrorCode  MatAdicSetLocalFunction(Mat,void (*)(void));
1050 extern PetscErrorCode  MatMPIAdjCreateNonemptySubcommMat(Mat,Mat*);
1051 
1052 extern PetscErrorCode  MatSeqDenseSetLDA(Mat,PetscInt);
1053 extern PetscErrorCode  MatDenseGetLocalMatrix(Mat,Mat*);
1054 
1055 extern PetscErrorCode  MatStoreValues(Mat);
1056 extern PetscErrorCode  MatRetrieveValues(Mat);
1057 
1058 extern PetscErrorCode  MatDAADSetCtx(Mat,void*);
1059 
1060 extern PetscErrorCode  MatFindNonzeroRows(Mat,IS*);
1061 /*
1062   These routines are not usually accessed directly, rather solving is
1063   done through the KSP and PC interfaces.
1064 */
1065 
1066 /*J
1067     MatOrderingType - String with the name of a PETSc matrix ordering or the creation function
1068        with an optional dynamic library name, for example
1069        http://www.mcs.anl.gov/petsc/lib.a:orderingcreate()
1070 
1071    Level: beginner
1072 
1073    Cannot use const because the PC objects manipulate the string
1074 
1075 .seealso: MatGetOrdering()
1076 J*/
1077 #define MatOrderingType char*
1078 #define MATORDERINGNATURAL     "natural"
1079 #define MATORDERINGND          "nd"
1080 #define MATORDERING1WD         "1wd"
1081 #define MATORDERINGRCM         "rcm"
1082 #define MATORDERINGQMD         "qmd"
1083 #define MATORDERINGROWLENGTH   "rowlength"
1084 #define MATORDERINGAMD         "amd"            /* only works if UMFPACK is installed with PETSc */
1085 
1086 extern PetscErrorCode  MatGetOrdering(Mat,const MatOrderingType,IS*,IS*);
1087 extern PetscErrorCode  MatGetOrderingList(PetscFList *list);
1088 extern PetscErrorCode  MatOrderingRegister(const char[],const char[],const char[],PetscErrorCode(*)(Mat,const MatOrderingType,IS*,IS*));
1089 
1090 /*MC
1091    MatOrderingRegisterDynamic - Adds a new sparse matrix ordering to the matrix package.
1092 
1093    Synopsis:
1094    PetscErrorCode MatOrderingRegisterDynamic(const char *name_ordering,const char *path,const char *name_create,PetscErrorCode (*routine_create)(MatOrdering))
1095 
1096    Not Collective
1097 
1098    Input Parameters:
1099 +  sname - name of ordering (for example MATORDERINGND)
1100 .  path - location of library where creation routine is
1101 .  name - name of function that creates the ordering type,a string
1102 -  function - function pointer that creates the ordering
1103 
1104    Level: developer
1105 
1106    If dynamic libraries are used, then the fourth input argument (function)
1107    is ignored.
1108 
1109    Sample usage:
1110 .vb
1111    MatOrderingRegisterDynamic("my_order",/home/username/my_lib/lib/libO/solaris/mylib.a,
1112                "MyOrder",MyOrder);
1113 .ve
1114 
1115    Then, your partitioner can be chosen with the procedural interface via
1116 $     MatOrderingSetType(part,"my_order)
1117    or at runtime via the option
1118 $     -pc_factor_mat_ordering_type my_order
1119 
1120    ${PETSC_ARCH} occuring in pathname will be replaced with appropriate values.
1121 
1122 .keywords: matrix, ordering, register
1123 
1124 .seealso: MatOrderingRegisterDestroy(), MatOrderingRegisterAll()
1125 M*/
1126 #if defined(PETSC_USE_DYNAMIC_LIBRARIES)
1127 #define MatOrderingRegisterDynamic(a,b,c,d) MatOrderingRegister(a,b,c,0)
1128 #else
1129 #define MatOrderingRegisterDynamic(a,b,c,d) MatOrderingRegister(a,b,c,d)
1130 #endif
1131 
1132 extern PetscErrorCode  MatOrderingRegisterDestroy(void);
1133 extern PetscErrorCode  MatOrderingRegisterAll(const char[]);
1134 extern PetscBool  MatOrderingRegisterAllCalled;
1135 extern PetscFList MatOrderingList;
1136 
1137 extern PetscErrorCode  MatReorderForNonzeroDiagonal(Mat,PetscReal,IS,IS);
1138 
1139 /*S
1140     MatFactorShiftType - Numeric Shift.
1141 
1142    Level: beginner
1143 
1144 S*/
1145 typedef enum {MAT_SHIFT_NONE,MAT_SHIFT_NONZERO,MAT_SHIFT_POSITIVE_DEFINITE,MAT_SHIFT_INBLOCKS} MatFactorShiftType;
1146 extern const char *MatFactorShiftTypes[];
1147 
1148 /*S
1149    MatFactorInfo - Data passed into the matrix factorization routines
1150 
1151    In Fortran these are simply double precision arrays of size MAT_FACTORINFO_SIZE, that is use
1152 $     MatFactorInfo  info(MAT_FACTORINFO_SIZE)
1153 
1154    Notes: These are not usually directly used by users, instead use PC type of LU, ILU, CHOLESKY or ICC.
1155 
1156       You can use MatFactorInfoInitialize() to set default values.
1157 
1158    Level: developer
1159 
1160 .seealso: MatLUFactorSymbolic(), MatILUFactorSymbolic(), MatCholeskyFactorSymbolic(), MatICCFactorSymbolic(), MatICCFactor(),
1161           MatFactorInfoInitialize()
1162 
1163 S*/
1164 typedef struct {
1165   PetscReal     diagonal_fill;  /* force diagonal to fill in if initially not filled */
1166   PetscReal     usedt;
1167   PetscReal     dt;             /* drop tolerance */
1168   PetscReal     dtcol;          /* tolerance for pivoting */
1169   PetscReal     dtcount;        /* maximum nonzeros to be allowed per row */
1170   PetscReal     fill;           /* expected fill, nonzeros in factored matrix/nonzeros in original matrix */
1171   PetscReal     levels;         /* ICC/ILU(levels) */
1172   PetscReal     pivotinblocks;  /* for BAIJ and SBAIJ matrices pivot in factorization on blocks, default 1.0
1173                                    factorization may be faster if do not pivot */
1174   PetscReal     zeropivot;      /* pivot is called zero if less than this */
1175   PetscReal     shifttype;      /* type of shift added to matrix factor to prevent zero pivots */
1176   PetscReal     shiftamount;     /* how large the shift is */
1177 } MatFactorInfo;
1178 
1179 extern PetscErrorCode  MatFactorInfoInitialize(MatFactorInfo*);
1180 extern PetscErrorCode  MatCholeskyFactor(Mat,IS,const MatFactorInfo*);
1181 extern PetscErrorCode  MatCholeskyFactorSymbolic(Mat,Mat,IS,const MatFactorInfo*);
1182 extern PetscErrorCode  MatCholeskyFactorNumeric(Mat,Mat,const MatFactorInfo*);
1183 extern PetscErrorCode  MatLUFactor(Mat,IS,IS,const MatFactorInfo*);
1184 extern PetscErrorCode  MatILUFactor(Mat,IS,IS,const MatFactorInfo*);
1185 extern PetscErrorCode  MatLUFactorSymbolic(Mat,Mat,IS,IS,const MatFactorInfo*);
1186 extern PetscErrorCode  MatILUFactorSymbolic(Mat,Mat,IS,IS,const MatFactorInfo*);
1187 extern PetscErrorCode  MatICCFactorSymbolic(Mat,Mat,IS,const MatFactorInfo*);
1188 extern PetscErrorCode  MatICCFactor(Mat,IS,const MatFactorInfo*);
1189 extern PetscErrorCode  MatLUFactorNumeric(Mat,Mat,const MatFactorInfo*);
1190 extern PetscErrorCode  MatGetInertia(Mat,PetscInt*,PetscInt*,PetscInt*);
1191 extern PetscErrorCode  MatSolve(Mat,Vec,Vec);
1192 extern PetscErrorCode  MatForwardSolve(Mat,Vec,Vec);
1193 extern PetscErrorCode  MatBackwardSolve(Mat,Vec,Vec);
1194 extern PetscErrorCode  MatSolveAdd(Mat,Vec,Vec,Vec);
1195 extern PetscErrorCode  MatSolveTranspose(Mat,Vec,Vec);
1196 extern PetscErrorCode  MatSolveTransposeAdd(Mat,Vec,Vec,Vec);
1197 extern PetscErrorCode  MatSolves(Mat,Vecs,Vecs);
1198 
1199 extern PetscErrorCode  MatSetUnfactored(Mat);
1200 
1201 /*E
1202     MatSORType - What type of (S)SOR to perform
1203 
1204     Level: beginner
1205 
1206    May be bitwise ORd together
1207 
1208    Any additions/changes here MUST also be made in include/finclude/petscmat.h
1209 
1210    MatSORType may be bitwise ORd together, so do not change the numbers
1211 
1212 .seealso: MatSOR()
1213 E*/
1214 typedef enum {SOR_FORWARD_SWEEP=1,SOR_BACKWARD_SWEEP=2,SOR_SYMMETRIC_SWEEP=3,
1215               SOR_LOCAL_FORWARD_SWEEP=4,SOR_LOCAL_BACKWARD_SWEEP=8,
1216               SOR_LOCAL_SYMMETRIC_SWEEP=12,SOR_ZERO_INITIAL_GUESS=16,
1217               SOR_EISENSTAT=32,SOR_APPLY_UPPER=64,SOR_APPLY_LOWER=128} MatSORType;
1218 extern PetscErrorCode  MatSOR(Mat,Vec,PetscReal,MatSORType,PetscReal,PetscInt,PetscInt,Vec);
1219 
1220 /*
1221     These routines are for efficiently computing Jacobians via finite differences.
1222 */
1223 
1224 /*J
1225     MatColoringType - String with the name of a PETSc matrix coloring or the creation function
1226        with an optional dynamic library name, for example
1227        http://www.mcs.anl.gov/petsc/lib.a:coloringcreate()
1228 
1229    Level: beginner
1230 
1231 .seealso: MatGetColoring()
1232 J*/
1233 #define MatColoringType char*
1234 #define MATCOLORINGNATURAL "natural"
1235 #define MATCOLORINGSL      "sl"
1236 #define MATCOLORINGLF      "lf"
1237 #define MATCOLORINGID      "id"
1238 
1239 extern PetscErrorCode  MatGetColoring(Mat,const MatColoringType,ISColoring*);
1240 extern PetscErrorCode  MatColoringRegister(const char[],const char[],const char[],PetscErrorCode(*)(Mat,MatColoringType,ISColoring *));
1241 
1242 /*MC
1243    MatColoringRegisterDynamic - Adds a new sparse matrix coloring to the
1244                                matrix package.
1245 
1246    Synopsis:
1247    PetscErrorCode MatColoringRegisterDynamic(const char *name_coloring,const char *path,const char *name_create,PetscErrorCode (*routine_create)(MatColoring))
1248 
1249    Not Collective
1250 
1251    Input Parameters:
1252 +  sname - name of Coloring (for example MATCOLORINGSL)
1253 .  path - location of library where creation routine is
1254 .  name - name of function that creates the Coloring type, a string
1255 -  function - function pointer that creates the coloring
1256 
1257    Level: developer
1258 
1259    If dynamic libraries are used, then the fourth input argument (function)
1260    is ignored.
1261 
1262    Sample usage:
1263 .vb
1264    MatColoringRegisterDynamic("my_color",/home/username/my_lib/lib/libO/solaris/mylib.a,
1265                "MyColor",MyColor);
1266 .ve
1267 
1268    Then, your partitioner can be chosen with the procedural interface via
1269 $     MatColoringSetType(part,"my_color")
1270    or at runtime via the option
1271 $     -mat_coloring_type my_color
1272 
1273    $PETSC_ARCH occuring in pathname will be replaced with appropriate values.
1274 
1275 .keywords: matrix, Coloring, register
1276 
1277 .seealso: MatColoringRegisterDestroy(), MatColoringRegisterAll()
1278 M*/
1279 #if defined(PETSC_USE_DYNAMIC_LIBRARIES)
1280 #define MatColoringRegisterDynamic(a,b,c,d) MatColoringRegister(a,b,c,0)
1281 #else
1282 #define MatColoringRegisterDynamic(a,b,c,d) MatColoringRegister(a,b,c,d)
1283 #endif
1284 
1285 extern PetscBool  MatColoringRegisterAllCalled;
1286 
1287 extern PetscErrorCode  MatColoringRegisterAll(const char[]);
1288 extern PetscErrorCode  MatColoringRegisterDestroy(void);
1289 extern PetscErrorCode  MatColoringPatch(Mat,PetscInt,PetscInt,ISColoringValue[],ISColoring*);
1290 
1291 /*S
1292      MatFDColoring - Object for computing a sparse Jacobian via finite differences
1293         and coloring
1294 
1295    Level: beginner
1296 
1297   Concepts: coloring, sparse Jacobian, finite differences
1298 
1299 .seealso:  MatFDColoringCreate()
1300 S*/
1301 typedef struct _p_MatFDColoring* MatFDColoring;
1302 
1303 extern PetscErrorCode  MatFDColoringCreate(Mat,ISColoring,MatFDColoring *);
1304 extern PetscErrorCode  MatFDColoringDestroy(MatFDColoring*);
1305 extern PetscErrorCode  MatFDColoringView(MatFDColoring,PetscViewer);
1306 extern PetscErrorCode  MatFDColoringSetFunction(MatFDColoring,PetscErrorCode (*)(void),void*);
1307 extern PetscErrorCode  MatFDColoringGetFunction(MatFDColoring,PetscErrorCode (**)(void),void**);
1308 extern PetscErrorCode  MatFDColoringSetParameters(MatFDColoring,PetscReal,PetscReal);
1309 extern PetscErrorCode  MatFDColoringSetFromOptions(MatFDColoring);
1310 extern PetscErrorCode  MatFDColoringApply(Mat,MatFDColoring,Vec,MatStructure*,void *);
1311 extern PetscErrorCode  MatFDColoringSetF(MatFDColoring,Vec);
1312 extern PetscErrorCode  MatFDColoringGetPerturbedColumns(MatFDColoring,PetscInt*,PetscInt*[]);
1313 
1314 /*S
1315      MatTransposeColoring - Object for computing a sparse matrix product C=A*B^T via coloring
1316 
1317    Level: beginner
1318 
1319   Concepts: coloring, sparse matrix product
1320 
1321 .seealso:  MatTransposeColoringCreate()
1322 S*/
1323 typedef struct _p_MatTransposeColoring* MatTransposeColoring;
1324 
1325 extern PetscErrorCode MatTransposeColoringCreate(Mat,ISColoring,MatTransposeColoring *);
1326 extern PetscErrorCode MatTransColoringApplySpToDen(MatTransposeColoring,Mat,Mat);
1327 extern PetscErrorCode MatTransColoringApplyDenToSp(MatTransposeColoring,Mat,Mat);
1328 extern PetscErrorCode MatTransposeColoringDestroy(MatTransposeColoring*);
1329 
1330 /*
1331     These routines are for partitioning matrices: currently used only
1332   for adjacency matrix, MatCreateMPIAdj().
1333 */
1334 
1335 /*S
1336      MatPartitioning - Object for managing the partitioning of a matrix or graph
1337 
1338    Level: beginner
1339 
1340   Concepts: partitioning
1341 
1342 .seealso:  MatPartitioningCreate(), MatPartitioningType
1343 S*/
1344 typedef struct _p_MatPartitioning* MatPartitioning;
1345 
1346 /*J
1347     MatPartitioningType - String with the name of a PETSc matrix partitioning or the creation function
1348        with an optional dynamic library name, for example
1349        http://www.mcs.anl.gov/petsc/lib.a:partitioningcreate()
1350 
1351    Level: beginner
1352 dm
1353 .seealso: MatPartitioningCreate(), MatPartitioning
1354 J*/
1355 #define MatPartitioningType char*
1356 #define MATPARTITIONINGCURRENT  "current"
1357 #define MATPARTITIONINGSQUARE   "square"
1358 #define MATPARTITIONINGPARMETIS "parmetis"
1359 #define MATPARTITIONINGCHACO    "chaco"
1360 #define MATPARTITIONINGPARTY    "party"
1361 #define MATPARTITIONINGPTSCOTCH "ptscotch"
1362 
1363 
1364 extern PetscErrorCode  MatPartitioningCreate(MPI_Comm,MatPartitioning*);
1365 extern PetscErrorCode  MatPartitioningSetType(MatPartitioning,const MatPartitioningType);
1366 extern PetscErrorCode  MatPartitioningSetNParts(MatPartitioning,PetscInt);
1367 extern PetscErrorCode  MatPartitioningSetAdjacency(MatPartitioning,Mat);
1368 extern PetscErrorCode  MatPartitioningSetVertexWeights(MatPartitioning,const PetscInt[]);
1369 extern PetscErrorCode  MatPartitioningSetPartitionWeights(MatPartitioning,const PetscReal []);
1370 extern PetscErrorCode  MatPartitioningApply(MatPartitioning,IS*);
1371 extern PetscErrorCode  MatPartitioningDestroy(MatPartitioning*);
1372 
1373 extern PetscErrorCode  MatPartitioningRegister(const char[],const char[],const char[],PetscErrorCode (*)(MatPartitioning));
1374 
1375 /*MC
1376    MatPartitioningRegisterDynamic - Adds a new sparse matrix partitioning to the
1377    matrix package.
1378 
1379    Synopsis:
1380    PetscErrorCode MatPartitioningRegisterDynamic(const char *name_partitioning,const char *path,const char *name_create,PetscErrorCode (*routine_create)(MatPartitioning))
1381 
1382    Not Collective
1383 
1384    Input Parameters:
1385 +  sname - name of partitioning (for example MATPARTITIONINGCURRENT) or parmetis
1386 .  path - location of library where creation routine is
1387 .  name - name of function that creates the partitioning type, a string
1388 -  function - function pointer that creates the partitioning type
1389 
1390    Level: developer
1391 
1392    If dynamic libraries are used, then the fourth input argument (function)
1393    is ignored.
1394 
1395    Sample usage:
1396 .vb
1397    MatPartitioningRegisterDynamic("my_part",/home/username/my_lib/lib/libO/solaris/mylib.a,
1398                "MyPartCreate",MyPartCreate);
1399 .ve
1400 
1401    Then, your partitioner can be chosen with the procedural interface via
1402 $     MatPartitioningSetType(part,"my_part")
1403    or at runtime via the option
1404 $     -mat_partitioning_type my_part
1405 
1406    $PETSC_ARCH occuring in pathname will be replaced with appropriate values.
1407 
1408 .keywords: matrix, partitioning, register
1409 
1410 .seealso: MatPartitioningRegisterDestroy(), MatPartitioningRegisterAll()
1411 M*/
1412 #if defined(PETSC_USE_DYNAMIC_LIBRARIES)
1413 #define MatPartitioningRegisterDynamic(a,b,c,d) MatPartitioningRegister(a,b,c,0)
1414 #else
1415 #define MatPartitioningRegisterDynamic(a,b,c,d) MatPartitioningRegister(a,b,c,d)
1416 #endif
1417 
1418 extern PetscBool  MatPartitioningRegisterAllCalled;
1419 
1420 extern PetscErrorCode  MatPartitioningRegisterAll(const char[]);
1421 extern PetscErrorCode  MatPartitioningRegisterDestroy(void);
1422 
1423 extern PetscErrorCode  MatPartitioningView(MatPartitioning,PetscViewer);
1424 extern PetscErrorCode  MatPartitioningSetFromOptions(MatPartitioning);
1425 extern PetscErrorCode  MatPartitioningGetType(MatPartitioning,const MatPartitioningType*);
1426 
1427 extern PetscErrorCode  MatPartitioningParmetisSetCoarseSequential(MatPartitioning);
1428 extern PetscErrorCode  MatPartitioningParmetisGetEdgeCut(MatPartitioning, PetscInt *);
1429 
1430 typedef enum { MP_CHACO_MULTILEVEL=1,MP_CHACO_SPECTRAL=2,MP_CHACO_LINEAR=4,MP_CHACO_RANDOM=5,MP_CHACO_SCATTERED=6 } MPChacoGlobalType;
1431 extern const char *MPChacoGlobalTypes[];
1432 typedef enum { MP_CHACO_KERNIGHAN=1,MP_CHACO_NONE=2 } MPChacoLocalType;
1433 extern const char *MPChacoLocalTypes[];
1434 typedef enum { MP_CHACO_LANCZOS=0,MP_CHACO_RQI=1 } MPChacoEigenType;
1435 extern const char *MPChacoEigenTypes[];
1436 
1437 extern PetscErrorCode  MatPartitioningChacoSetGlobal(MatPartitioning,MPChacoGlobalType);
1438 extern PetscErrorCode  MatPartitioningChacoGetGlobal(MatPartitioning,MPChacoGlobalType*);
1439 extern PetscErrorCode  MatPartitioningChacoSetLocal(MatPartitioning,MPChacoLocalType);
1440 extern PetscErrorCode  MatPartitioningChacoGetLocal(MatPartitioning,MPChacoLocalType*);
1441 extern PetscErrorCode  MatPartitioningChacoSetCoarseLevel(MatPartitioning,PetscReal);
1442 extern PetscErrorCode  MatPartitioningChacoSetEigenSolver(MatPartitioning,MPChacoEigenType);
1443 extern PetscErrorCode  MatPartitioningChacoGetEigenSolver(MatPartitioning,MPChacoEigenType*);
1444 extern PetscErrorCode  MatPartitioningChacoSetEigenTol(MatPartitioning,PetscReal);
1445 extern PetscErrorCode  MatPartitioningChacoGetEigenTol(MatPartitioning,PetscReal*);
1446 extern PetscErrorCode  MatPartitioningChacoSetEigenNumber(MatPartitioning,PetscInt);
1447 extern PetscErrorCode  MatPartitioningChacoGetEigenNumber(MatPartitioning,PetscInt*);
1448 
1449 #define MP_PARTY_OPT "opt"
1450 #define MP_PARTY_LIN "lin"
1451 #define MP_PARTY_SCA "sca"
1452 #define MP_PARTY_RAN "ran"
1453 #define MP_PARTY_GBF "gbf"
1454 #define MP_PARTY_GCF "gcf"
1455 #define MP_PARTY_BUB "bub"
1456 #define MP_PARTY_DEF "def"
1457 extern PetscErrorCode  MatPartitioningPartySetGlobal(MatPartitioning,const char*);
1458 #define MP_PARTY_HELPFUL_SETS "hs"
1459 #define MP_PARTY_KERNIGHAN_LIN "kl"
1460 #define MP_PARTY_NONE "no"
1461 extern PetscErrorCode  MatPartitioningPartySetLocal(MatPartitioning,const char*);
1462 extern PetscErrorCode  MatPartitioningPartySetCoarseLevel(MatPartitioning,PetscReal);
1463 extern PetscErrorCode  MatPartitioningPartySetBipart(MatPartitioning,PetscBool);
1464 extern PetscErrorCode  MatPartitioningPartySetMatchOptimization(MatPartitioning,PetscBool);
1465 
1466 typedef enum { MP_PTSCOTCH_QUALITY,MP_PTSCOTCH_SPEED,MP_PTSCOTCH_BALANCE,MP_PTSCOTCH_SAFETY,MP_PTSCOTCH_SCALABILITY } MPPTScotchStrategyType;
1467 extern const char *MPPTScotchStrategyTypes[];
1468 
1469 extern PetscErrorCode MatPartitioningPTScotchSetImbalance(MatPartitioning,PetscReal);
1470 extern PetscErrorCode MatPartitioningPTScotchGetImbalance(MatPartitioning,PetscReal*);
1471 extern PetscErrorCode MatPartitioningPTScotchSetStrategy(MatPartitioning,MPPTScotchStrategyType);
1472 extern PetscErrorCode MatPartitioningPTScotchGetStrategy(MatPartitioning,MPPTScotchStrategyType*);
1473 
1474 extern PetscErrorCode MatMeshToVertexGraph(Mat,PetscInt,Mat*);
1475 extern PetscErrorCode MatMeshToCellGraph(Mat,PetscInt,Mat*);
1476 
1477 /*
1478     If you add entries here you must also add them to finclude/petscmat.h
1479 */
1480 typedef enum { MATOP_SET_VALUES=0,
1481                MATOP_GET_ROW=1,
1482                MATOP_RESTORE_ROW=2,
1483                MATOP_MULT=3,
1484                MATOP_MULT_ADD=4,
1485                MATOP_MULT_TRANSPOSE=5,
1486                MATOP_MULT_TRANSPOSE_ADD=6,
1487                MATOP_SOLVE=7,
1488                MATOP_SOLVE_ADD=8,
1489                MATOP_SOLVE_TRANSPOSE=9,
1490                MATOP_SOLVE_TRANSPOSE_ADD=10,
1491                MATOP_LUFACTOR=11,
1492                MATOP_CHOLESKYFACTOR=12,
1493                MATOP_SOR=13,
1494                MATOP_TRANSPOSE=14,
1495                MATOP_GETINFO=15,
1496                MATOP_EQUAL=16,
1497                MATOP_GET_DIAGONAL=17,
1498                MATOP_DIAGONAL_SCALE=18,
1499                MATOP_NORM=19,
1500                MATOP_ASSEMBLY_BEGIN=20,
1501                MATOP_ASSEMBLY_END=21,
1502                MATOP_SET_OPTION=22,
1503                MATOP_ZERO_ENTRIES=23,
1504                MATOP_ZERO_ROWS=24,
1505                MATOP_LUFACTOR_SYMBOLIC=25,
1506                MATOP_LUFACTOR_NUMERIC=26,
1507                MATOP_CHOLESKY_FACTOR_SYMBOLIC=27,
1508                MATOP_CHOLESKY_FACTOR_NUMERIC=28,
1509                MATOP_SETUP_PREALLOCATION=29,
1510                MATOP_ILUFACTOR_SYMBOLIC=30,
1511                MATOP_ICCFACTOR_SYMBOLIC=31,
1512                MATOP_GET_ARRAY=32,
1513                MATOP_RESTORE_ARRAY=33,
1514                MATOP_DUPLICATE=34,
1515                MATOP_FORWARD_SOLVE=35,
1516                MATOP_BACKWARD_SOLVE=36,
1517                MATOP_ILUFACTOR=37,
1518                MATOP_ICCFACTOR=38,
1519                MATOP_AXPY=39,
1520                MATOP_GET_SUBMATRICES=40,
1521                MATOP_INCREASE_OVERLAP=41,
1522                MATOP_GET_VALUES=42,
1523                MATOP_COPY=43,
1524                MATOP_GET_ROW_MAX=44,
1525                MATOP_SCALE=45,
1526                MATOP_SHIFT=46,
1527                MATOP_DIAGONAL_SET=47,
1528                MATOP_ILUDT_FACTOR=48,
1529                MATOP_SET_BLOCK_SIZE=49,
1530                MATOP_GET_ROW_IJ=50,
1531                MATOP_RESTORE_ROW_IJ=51,
1532                MATOP_GET_COLUMN_IJ=52,
1533                MATOP_RESTORE_COLUMN_IJ=53,
1534                MATOP_FDCOLORING_CREATE=54,
1535                MATOP_COLORING_PATCH=55,
1536                MATOP_SET_UNFACTORED=56,
1537                MATOP_PERMUTE=57,
1538                MATOP_SET_VALUES_BLOCKED=58,
1539                MATOP_GET_SUBMATRIX=59,
1540                MATOP_DESTROY=60,
1541                MATOP_VIEW=61,
1542                MATOP_CONVERT_FROM=62,
1543                MATOP_USE_SCALED_FORM=63,
1544                MATOP_SCALE_SYSTEM=64,
1545                MATOP_UNSCALE_SYSTEM=65,
1546                MATOP_SET_LOCAL_TO_GLOBAL_MAP=66,
1547                MATOP_SET_VALUES_LOCAL=67,
1548                MATOP_ZERO_ROWS_LOCAL=68,
1549                MATOP_GET_ROW_MAX_ABS=69,
1550                MATOP_GET_ROW_MIN_ABS=70,
1551                MATOP_CONVERT=71,
1552                MATOP_SET_COLORING=72,
1553                MATOP_SET_VALUES_ADIC=73,
1554                MATOP_SET_VALUES_ADIFOR=74,
1555                MATOP_FD_COLORING_APPLY=75,
1556                MATOP_SET_FROM_OPTIONS=76,
1557                MATOP_MULT_CON=77,
1558                MATOP_MULT_TRANSPOSE_CON=78,
1559                MATOP_PERMUTE_SPARSIFY=79,
1560                MATOP_MULT_MULTIPLE=80,
1561                MATOP_SOLVE_MULTIPLE=81,
1562                MATOP_GET_INERTIA=82,
1563                MATOP_LOAD=83,
1564                MATOP_IS_SYMMETRIC=84,
1565                MATOP_IS_HERMITIAN=85,
1566                MATOP_IS_STRUCTURALLY_SYMMETRIC=86,
1567                MATOP_DUMMY=87,
1568                MATOP_GET_VECS=88,
1569                MATOP_MAT_MULT=89,
1570                MATOP_MAT_MULT_SYMBOLIC=90,
1571                MATOP_MAT_MULT_NUMERIC=91,
1572                MATOP_PTAP=92,
1573                MATOP_PTAP_SYMBOLIC=93,
1574                MATOP_PTAP_NUMERIC=94,
1575                MATOP_MAT_MULTTRANSPOSE=95,
1576                MATOP_MAT_MULTTRANSPOSE_SYM=96,
1577                MATOP_MAT_MULTTRANSPOSE_NUM=97,
1578                MATOP_PTAP_SYMBOLIC_SEQAIJ=98,
1579                MATOP_PTAP_NUMERIC_SEQAIJ=99,
1580                MATOP_PTAP_SYMBOLIC_MPIAIJ=100,
1581                MATOP_PTAP_NUMERIC_MPIAIJ=101,
1582                MATOP_CONJUGATE=102,
1583                MATOP_SET_SIZES=103,
1584                MATOP_SET_VALUES_ROW=104,
1585                MATOP_REAL_PART=105,
1586                MATOP_IMAG_PART=106,
1587                MATOP_GET_ROW_UTRIANGULAR=107,
1588                MATOP_RESTORE_ROW_UTRIANGULAR=108,
1589                MATOP_MATSOLVE=109,
1590                MATOP_GET_REDUNDANTMATRIX=110,
1591                MATOP_GET_ROW_MIN=111,
1592                MATOP_GET_COLUMN_VEC=112,
1593                MATOP_MISSING_DIAGONAL=113,
1594                MATOP_MATGETSEQNONZEROSTRUCTURE=114,
1595                MATOP_CREATE=115,
1596                MATOP_GET_GHOSTS=116,
1597                MATOP_GET_LOCALSUBMATRIX=117,
1598                MATOP_RESTORE_LOCALSUBMATRIX=118,
1599                MATOP_MULT_DIAGONAL_BLOCK=119,
1600                MATOP_HERMITIANTRANSPOSE=120,
1601                MATOP_MULTHERMITIANTRANSPOSE=121,
1602                MATOP_MULTHERMITIANTRANSPOSEADD=122,
1603                MATOP_GETMULTIPROCBLOCK=123,
1604                MATOP_GETCOLUMNNORMS=125,
1605 	       MATOP_GET_SUBMATRICES_PARALLEL=128,
1606                MATOP_SET_VALUES_BATCH=129,
1607                MATOP_TRANSPOSEMATMULT=130,
1608                MATOP_TRANSPOSEMATMULT_SYMBOLIC=131,
1609                MATOP_TRANSPOSEMATMULT_NUMERIC=132,
1610                MATOP_TRANSPOSECOLORING_CREATE=133,
1611                MATOP_TRANSCOLORING_APPLY_SPTODEN=134,
1612                MATOP_TRANSCOLORING_APPLY_DENTOSP=135,
1613                MATOP_RARt=136,
1614                MATOP_RARt_SYMBOLIC=137,
1615                MATOP_RARt_NUMERIC=138
1616              } MatOperation;
1617 extern PetscErrorCode  MatHasOperation(Mat,MatOperation,PetscBool *);
1618 extern PetscErrorCode  MatShellSetOperation(Mat,MatOperation,void(*)(void));
1619 extern PetscErrorCode  MatShellGetOperation(Mat,MatOperation,void(**)(void));
1620 extern PetscErrorCode  MatShellSetContext(Mat,void*);
1621 
1622 /*
1623    Codes for matrices stored on disk. By default they are
1624    stored in a universal format. By changing the format with
1625    PetscViewerSetFormat(viewer,PETSC_VIEWER_NATIVE); the matrices will
1626    be stored in a way natural for the matrix, for example dense matrices
1627    would be stored as dense. Matrices stored this way may only be
1628    read into matrices of the same type.
1629 */
1630 #define MATRIX_BINARY_FORMAT_DENSE -1
1631 
1632 extern PetscErrorCode  MatMPIBAIJSetHashTableFactor(Mat,PetscReal);
1633 extern PetscErrorCode  MatISGetLocalMat(Mat,Mat*);
1634 
1635 /*S
1636      MatNullSpace - Object that removes a null space from a vector, i.e.
1637          orthogonalizes the vector to a subsapce
1638 
1639    Level: advanced
1640 
1641   Concepts: matrix; linear operator, null space
1642 
1643   Users manual sections:
1644 .   sec_singular
1645 
1646 .seealso:  MatNullSpaceCreate()
1647 S*/
1648 typedef struct _p_MatNullSpace* MatNullSpace;
1649 
1650 extern PetscErrorCode  MatNullSpaceCreate(MPI_Comm,PetscBool ,PetscInt,const Vec[],MatNullSpace*);
1651 extern PetscErrorCode  MatNullSpaceSetFunction(MatNullSpace,PetscErrorCode (*)(MatNullSpace,Vec,void*),void*);
1652 extern PetscErrorCode  MatNullSpaceDestroy(MatNullSpace*);
1653 extern PetscErrorCode  MatNullSpaceRemove(MatNullSpace,Vec,Vec*);
1654 extern PetscErrorCode  MatGetNullSpace(Mat, MatNullSpace *);
1655 extern PetscErrorCode  MatSetNullSpace(Mat,MatNullSpace);
1656 extern PetscErrorCode  MatSetNearNullSpace(Mat,MatNullSpace);
1657 extern PetscErrorCode  MatNullSpaceTest(MatNullSpace,Mat,PetscBool  *);
1658 extern PetscErrorCode  MatNullSpaceView(MatNullSpace,PetscViewer);
1659 
1660 extern PetscErrorCode  MatReorderingSeqSBAIJ(Mat,IS);
1661 extern PetscErrorCode  MatMPISBAIJSetHashTableFactor(Mat,PetscReal);
1662 extern PetscErrorCode  MatSeqSBAIJSetColumnIndices(Mat,PetscInt *);
1663 extern PetscErrorCode  MatSeqBAIJInvertBlockDiagonal(Mat);
1664 
1665 extern PetscErrorCode  MatCreateMAIJ(Mat,PetscInt,Mat*);
1666 extern PetscErrorCode  MatMAIJRedimension(Mat,PetscInt,Mat*);
1667 extern PetscErrorCode  MatMAIJGetAIJ(Mat,Mat*);
1668 
1669 extern PetscErrorCode  MatComputeExplicitOperator(Mat,Mat*);
1670 
1671 extern PetscErrorCode  MatDiagonalScaleLocal(Mat,Vec);
1672 
1673 extern PetscErrorCode  MatCreateMFFD(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,Mat*);
1674 extern PetscErrorCode  MatMFFDSetBase(Mat,Vec,Vec);
1675 extern PetscErrorCode  MatMFFDSetFunction(Mat,PetscErrorCode(*)(void*,Vec,Vec),void*);
1676 extern PetscErrorCode  MatMFFDSetFunctioni(Mat,PetscErrorCode (*)(void*,PetscInt,Vec,PetscScalar*));
1677 extern PetscErrorCode  MatMFFDSetFunctioniBase(Mat,PetscErrorCode (*)(void*,Vec));
1678 extern PetscErrorCode  MatMFFDAddNullSpace(Mat,MatNullSpace);
1679 extern PetscErrorCode  MatMFFDSetHHistory(Mat,PetscScalar[],PetscInt);
1680 extern PetscErrorCode  MatMFFDResetHHistory(Mat);
1681 extern PetscErrorCode  MatMFFDSetFunctionError(Mat,PetscReal);
1682 extern PetscErrorCode  MatMFFDSetPeriod(Mat,PetscInt);
1683 extern PetscErrorCode  MatMFFDGetH(Mat,PetscScalar *);
1684 extern PetscErrorCode  MatMFFDSetOptionsPrefix(Mat,const char[]);
1685 extern PetscErrorCode  MatMFFDCheckPositivity(void*,Vec,Vec,PetscScalar*);
1686 extern PetscErrorCode  MatMFFDSetCheckh(Mat,PetscErrorCode (*)(void*,Vec,Vec,PetscScalar*),void*);
1687 
1688 /*S
1689     MatMFFD - A data structured used to manage the computation of the h differencing parameter for matrix-free
1690               Jacobian vector products
1691 
1692     Notes: MATMFFD is a specific MatType which uses the MatMFFD data structure
1693 
1694            MatMFFD*() methods actually take the Mat as their first argument. Not a MatMFFD data structure
1695 
1696     Level: developer
1697 
1698 .seealso: MATMFFD, MatCreateMFFD(), MatMFFDSetFuction(), MatMFFDSetType(), MatMFFDRegister()
1699 S*/
1700 typedef struct _p_MatMFFD* MatMFFD;
1701 
1702 /*J
1703     MatMFFDType - algorithm used to compute the h used in computing matrix-vector products via differencing of the function
1704 
1705    Level: beginner
1706 
1707 .seealso: MatMFFDSetType(), MatMFFDRegister()
1708 J*/
1709 #define MatMFFDType char*
1710 #define MATMFFD_DS  "ds"
1711 #define MATMFFD_WP  "wp"
1712 
1713 extern PetscErrorCode  MatMFFDSetType(Mat,const MatMFFDType);
1714 extern PetscErrorCode  MatMFFDRegister(const char[],const char[],const char[],PetscErrorCode (*)(MatMFFD));
1715 
1716 /*MC
1717    MatMFFDRegisterDynamic - Adds a method to the MatMFFD registry.
1718 
1719    Synopsis:
1720    PetscErrorCode MatMFFDRegisterDynamic(const char *name_solver,const char *path,const char *name_create,PetscErrorCode (*routine_create)(MatMFFD))
1721 
1722    Not Collective
1723 
1724    Input Parameters:
1725 +  name_solver - name of a new user-defined compute-h module
1726 .  path - path (either absolute or relative) the library containing this solver
1727 .  name_create - name of routine to create method context
1728 -  routine_create - routine to create method context
1729 
1730    Level: developer
1731 
1732    Notes:
1733    MatMFFDRegisterDynamic() may be called multiple times to add several user-defined solvers.
1734 
1735    If dynamic libraries are used, then the fourth input argument (routine_create)
1736    is ignored.
1737 
1738    Sample usage:
1739 .vb
1740    MatMFFDRegisterDynamic("my_h",/home/username/my_lib/lib/libO/solaris/mylib.a,
1741                "MyHCreate",MyHCreate);
1742 .ve
1743 
1744    Then, your solver can be chosen with the procedural interface via
1745 $     MatMFFDSetType(mfctx,"my_h")
1746    or at runtime via the option
1747 $     -snes_mf_type my_h
1748 
1749 .keywords: MatMFFD, register
1750 
1751 .seealso: MatMFFDRegisterAll(), MatMFFDRegisterDestroy()
1752 M*/
1753 #if defined(PETSC_USE_DYNAMIC_LIBRARIES)
1754 #define MatMFFDRegisterDynamic(a,b,c,d) MatMFFDRegister(a,b,c,0)
1755 #else
1756 #define MatMFFDRegisterDynamic(a,b,c,d) MatMFFDRegister(a,b,c,d)
1757 #endif
1758 
1759 extern PetscErrorCode  MatMFFDRegisterAll(const char[]);
1760 extern PetscErrorCode  MatMFFDRegisterDestroy(void);
1761 extern PetscErrorCode  MatMFFDDSSetUmin(Mat,PetscReal);
1762 extern PetscErrorCode  MatMFFDWPSetComputeNormU(Mat,PetscBool );
1763 
1764 
1765 extern PetscErrorCode  PetscViewerMathematicaPutMatrix(PetscViewer, PetscInt, PetscInt, PetscReal *);
1766 extern PetscErrorCode  PetscViewerMathematicaPutCSRMatrix(PetscViewer, PetscInt, PetscInt, PetscInt *, PetscInt *, PetscReal *);
1767 
1768 /*
1769    PETSc interface to MUMPS
1770 */
1771 #ifdef PETSC_HAVE_MUMPS
1772 extern PetscErrorCode  MatMumpsSetIcntl(Mat,PetscInt,PetscInt);
1773 #endif
1774 
1775 /*
1776    PETSc interface to SUPERLU
1777 */
1778 #ifdef PETSC_HAVE_SUPERLU
1779 extern PetscErrorCode  MatSuperluSetILUDropTol(Mat,PetscReal);
1780 #endif
1781 
1782 #if defined(PETSC_HAVE_CUSP)
1783 extern PetscErrorCode  MatCreateSeqAIJCUSP(MPI_Comm,PetscInt,PetscInt,PetscInt,const PetscInt[],Mat*);
1784 extern PetscErrorCode  MatCreateMPIAIJCUSP(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt,const PetscInt[],PetscInt,const PetscInt[],Mat*);
1785 #endif
1786 
1787 /*
1788    PETSc interface to FFTW
1789 */
1790 #if defined(PETSC_HAVE_FFTW)
1791 extern PetscErrorCode VecScatterPetscToFFTW(Mat,Vec,Vec);
1792 extern PetscErrorCode VecScatterFFTWToPetsc(Mat,Vec,Vec);
1793 extern PetscErrorCode MatGetVecsFFTW(Mat,Vec*,Vec*,Vec*);
1794 #endif
1795 
1796 extern PetscErrorCode MatCreateNest(MPI_Comm,PetscInt,const IS[],PetscInt,const IS[],const Mat[],Mat*);
1797 extern PetscErrorCode MatNestGetSize(Mat,PetscInt*,PetscInt*);
1798 extern PetscErrorCode MatNestGetSubMats(Mat,PetscInt*,PetscInt*,Mat***);
1799 extern PetscErrorCode MatNestGetSubMat(Mat,PetscInt,PetscInt,Mat*);
1800 extern PetscErrorCode MatNestSetVecType(Mat,const VecType);
1801 extern PetscErrorCode MatNestSetSubMats(Mat,PetscInt,const IS[],PetscInt,const IS[],const Mat[]);
1802 extern PetscErrorCode MatNestSetSubMat(Mat,PetscInt,PetscInt,Mat);
1803 
1804 /*
1805  MatIJ:
1806  An unweighted directed pseudograph
1807  An interpretation of this matrix as a (pseudo)graph allows us to define additional operations on it:
1808  A MatIJ can act on sparse arrays: arrays of indices, or index arrays of integers, scalars, or integer-scalar pairs
1809  by mapping the indices to the indices connected to them by the (pseudo)graph ed
1810  */
1811 typedef enum {MATIJ_LOCAL, MATIJ_GLOBAL} MatIJIndexType;
1812 extern  PetscErrorCode MatIJSetMultivalued(Mat, PetscBool);
1813 extern  PetscErrorCode MatIJGetMultivalued(Mat, PetscBool*);
1814 extern  PetscErrorCode MatIJSetEdges(Mat, PetscInt, const PetscInt*, const PetscInt*);
1815 extern  PetscErrorCode MatIJGetEdges(Mat, PetscInt *, PetscInt **, PetscInt **);
1816 extern  PetscErrorCode MatIJSetEdgesIS(Mat, IS, IS);
1817 extern  PetscErrorCode MatIJGetEdgesIS(Mat, IS*, IS*);
1818 extern  PetscErrorCode MatIJGetRowSizes(Mat, MatIJIndexType, PetscInt, const PetscInt *, PetscInt **);
1819 extern  PetscErrorCode MatIJGetMinRowSize(Mat, PetscInt *);
1820 extern  PetscErrorCode MatIJGetMaxRowSize(Mat, PetscInt *);
1821 extern  PetscErrorCode MatIJGetSupport(Mat,  PetscInt *, PetscInt **);
1822 extern  PetscErrorCode MatIJGetSupportIS(Mat, IS *);
1823 extern  PetscErrorCode MatIJGetImage(Mat, PetscInt*, PetscInt**);
1824 extern  PetscErrorCode MatIJGetImageIS(Mat, IS *);
1825 extern  PetscErrorCode MatIJGetSupportSize(Mat, PetscInt *);
1826 extern  PetscErrorCode MatIJGetImageSize(Mat, PetscInt *);
1827 
1828 extern  PetscErrorCode MatIJBinRenumber(Mat, Mat*);
1829 
1830 extern  PetscErrorCode MatIJMap(Mat, MatIJIndexType, PetscInt,const PetscInt*,const PetscInt*,const PetscScalar*, MatIJIndexType,PetscInt*,PetscInt**,PetscInt**,PetscScalar**,PetscInt**);
1831 extern  PetscErrorCode MatIJBin(Mat, MatIJIndexType, PetscInt,const PetscInt*,const PetscInt*,const PetscScalar*,PetscInt*,PetscInt**,PetscInt**,PetscScalar**,PetscInt**);
1832 extern  PetscErrorCode MatIJBinMap(Mat,Mat, MatIJIndexType,PetscInt,const PetscInt*,const PetscInt*,const PetscScalar*,MatIJIndexType,PetscInt*,PetscInt**,PetscInt**,PetscScalar**,PetscInt**);
1833 
1834 PETSC_EXTERN_CXX_END
1835 #endif
1836