xref: /petsc/include/petscmat.h (revision 843c4018fc3d006c2d8dd5725ecebc3683f00fb0)
1 /*
2      Include file for the matrix component of PETSc
3 */
4 #ifndef __PETSCMAT_H
5 #define __PETSCMAT_H
6 #include "petscvec.h"
7 PETSC_EXTERN_CXX_BEGIN
8 
9 /*S
10      Mat - Abstract PETSc matrix object
11 
12    Level: beginner
13 
14   Concepts: matrix; linear operator
15 
16 .seealso:  MatCreate(), MatType, MatSetType()
17 S*/
18 typedef struct _p_Mat*           Mat;
19 
20 /*J
21     MatType - String with the name of a PETSc matrix or the creation function
22        with an optional dynamic library name, for example
23        http://www.mcs.anl.gov/petsc/lib.a:mymatcreate()
24 
25    Level: beginner
26 
27 .seealso: MatSetType(), Mat, MatSolverPackage
28 J*/
29 #define MatType char*
30 #define MATSAME            "same"
31 #define MATMAIJ            "maij"
32 #define MATSEQMAIJ         "seqmaij"
33 #define MATMPIMAIJ         "mpimaij"
34 #define MATIS              "is"
35 #define MATAIJ             "aij"
36 #define MATSEQAIJ          "seqaij"
37 #define MATSEQAIJPTHREAD   "seqaijpthread"
38 #define MATAIJPTHREAD      "aijpthread"
39 #define MATMPIAIJ          "mpiaij"
40 #define MATAIJCRL          "aijcrl"
41 #define MATSEQAIJCRL       "seqaijcrl"
42 #define MATMPIAIJCRL       "mpiaijcrl"
43 #define MATAIJCUSP         "aijcusp"
44 #define MATSEQAIJCUSP      "seqaijcusp"
45 #define MATMPIAIJCUSP      "mpiaijcusp"
46 #define MATAIJPERM         "aijperm"
47 #define MATSEQAIJPERM      "seqaijperm"
48 #define MATMPIAIJPERM      "mpiaijperm"
49 #define MATSHELL           "shell"
50 #define MATDENSE           "dense"
51 #define MATSEQDENSE        "seqdense"
52 #define MATMPIDENSE        "mpidense"
53 #define MATBAIJ            "baij"
54 #define MATSEQBAIJ         "seqbaij"
55 #define MATMPIBAIJ         "mpibaij"
56 #define MATMPIADJ          "mpiadj"
57 #define MATSBAIJ           "sbaij"
58 #define MATSEQSBAIJ        "seqsbaij"
59 #define MATMPISBAIJ        "mpisbaij"
60 #define MATSEQBSTRM        "seqbstrm"
61 #define MATMPIBSTRM        "mpibstrm"
62 #define MATBSTRM           "bstrm"
63 #define MATSEQSBSTRM       "seqsbstrm"
64 #define MATMPISBSTRM       "mpisbstrm"
65 #define MATSBSTRM          "sbstrm"
66 #define MATDAAD            "daad"
67 #define MATMFFD            "mffd"
68 #define MATNORMAL          "normal"
69 #define MATLRC             "lrc"
70 #define MATSCATTER         "scatter"
71 #define MATBLOCKMAT        "blockmat"
72 #define MATCOMPOSITE       "composite"
73 #define MATFFT             "fft"
74 #define MATFFTW            "fftw"
75 #define MATSEQCUFFT        "seqcufft"
76 #define MATTRANSPOSEMAT    "transpose"
77 #define MATSCHURCOMPLEMENT "schurcomplement"
78 #define MATPYTHON          "python"
79 #define MATHYPRESTRUCT     "hyprestruct"
80 #define MATHYPRESSTRUCT    "hypresstruct"
81 #define MATSUBMATRIX       "submatrix"
82 #define MATLOCALREF        "localref"
83 #define MATNEST            "nest"
84 #define MATIJ              "ij"
85 
86 /*J
87     MatSolverPackage - String with the name of a PETSc matrix solver type.
88 
89     For example: "petsc" indicates what PETSc provides, "superlu" indicates either
90        SuperLU or SuperLU_Dist etc.
91 
92 
93    Level: beginner
94 
95 .seealso: MatGetFactor(), Mat, MatSetType(), MatType
96 J*/
97 #define MatSolverPackage char*
98 #define MATSOLVERSPOOLES      "spooles"
99 #define MATSOLVERSUPERLU      "superlu"
100 #define MATSOLVERSUPERLU_DIST "superlu_dist"
101 #define MATSOLVERUMFPACK      "umfpack"
102 #define MATSOLVERCHOLMOD      "cholmod"
103 #define MATSOLVERESSL         "essl"
104 #define MATSOLVERLUSOL        "lusol"
105 #define MATSOLVERMUMPS        "mumps"
106 #define MATSOLVERPASTIX       "pastix"
107 #define MATSOLVERMATLAB       "matlab"
108 #define MATSOLVERPETSC        "petsc"
109 #define MATSOLVERPLAPACK      "plapack"
110 #define MATSOLVERBAS          "bas"
111 
112 #define MATSOLVERBSTRM        "bstrm"
113 #define MATSOLVERSBSTRM       "sbstrm"
114 
115 /*E
116     MatFactorType - indicates what type of factorization is requested
117 
118     Level: beginner
119 
120    Any additions/changes here MUST also be made in include/finclude/petscmat.h
121 
122 .seealso: MatSolverPackage, MatGetFactor()
123 E*/
124 typedef enum {MAT_FACTOR_NONE, MAT_FACTOR_LU, MAT_FACTOR_CHOLESKY, MAT_FACTOR_ILU, MAT_FACTOR_ICC,MAT_FACTOR_ILUDT} MatFactorType;
125 extern const char *const MatFactorTypes[];
126 
127 extern PetscErrorCode  MatGetFactor(Mat,const MatSolverPackage,MatFactorType,Mat*);
128 extern PetscErrorCode  MatGetFactorAvailable(Mat,const MatSolverPackage,MatFactorType,PetscBool *);
129 extern PetscErrorCode  MatFactorGetSolverPackage(Mat,const MatSolverPackage*);
130 extern PetscErrorCode  MatGetFactorType(Mat,MatFactorType*);
131 
132 /* Logging support */
133 #define    MAT_FILE_CLASSID 1211216    /* used to indicate matrices in binary files */
134 extern PetscClassId  MAT_CLASSID;
135 extern PetscClassId  MAT_FDCOLORING_CLASSID;
136 extern PetscClassId  MAT_MULTTRANSPOSECOLORING_CLASSID;
137 extern PetscClassId  MAT_PARTITIONING_CLASSID;
138 extern PetscClassId  MAT_NULLSPACE_CLASSID;
139 extern PetscClassId  MATMFFD_CLASSID;
140 
141 /*E
142     MatReuse - Indicates if matrices obtained from a previous call to MatGetSubMatrices()
143      or MatGetSubMatrix() are to be reused to store the new matrix values. For MatConvert() is used to indicate
144      that the input matrix is to be replaced with the converted matrix.
145 
146     Level: beginner
147 
148    Any additions/changes here MUST also be made in include/finclude/petscmat.h
149 
150 .seealso: MatGetSubMatrices(), MatGetSubMatrix(), MatDestroyMatrices(), MatConvert()
151 E*/
152 typedef enum {MAT_INITIAL_MATRIX,MAT_REUSE_MATRIX,MAT_IGNORE_MATRIX} MatReuse;
153 
154 /*E
155     MatGetSubMatrixOption - Indicates if matrices obtained from a call to MatGetSubMatrices()
156      include the matrix values. Currently it is only used by MatGetSeqNonzerostructure().
157 
158     Level: beginner
159 
160 .seealso: MatGetSeqNonzerostructure()
161 E*/
162 typedef enum {MAT_DO_NOT_GET_VALUES,MAT_GET_VALUES} MatGetSubMatrixOption;
163 
164 extern PetscErrorCode  MatInitializePackage(const char[]);
165 
166 extern PetscErrorCode  MatCreate(MPI_Comm,Mat*);
167 PetscPolymorphicFunction(MatCreate,(MPI_Comm comm),(comm,&A),Mat,A)
168 PetscPolymorphicFunction(MatCreate,(),(PETSC_COMM_WORLD,&A),Mat,A)
169 extern PetscErrorCode  MatSetSizes(Mat,PetscInt,PetscInt,PetscInt,PetscInt);
170 extern PetscErrorCode  MatSetType(Mat,const MatType);
171 extern PetscErrorCode  MatSetFromOptions(Mat);
172 extern PetscErrorCode  MatSetUpPreallocation(Mat);
173 extern PetscErrorCode  MatRegisterAll(const char[]);
174 extern PetscErrorCode  MatRegister(const char[],const char[],const char[],PetscErrorCode(*)(Mat));
175 extern PetscErrorCode  MatRegisterBaseName(const char[],const char[],const char[]);
176 extern PetscErrorCode  MatSetOptionsPrefix(Mat,const char[]);
177 extern PetscErrorCode  MatAppendOptionsPrefix(Mat,const char[]);
178 extern PetscErrorCode  MatGetOptionsPrefix(Mat,const char*[]);
179 
180 /*MC
181    MatRegisterDynamic - Adds a new matrix type
182 
183    Synopsis:
184    PetscErrorCode MatRegisterDynamic(const char *name,const char *path,const char *name_create,PetscErrorCode (*routine_create)(Mat))
185 
186    Not Collective
187 
188    Input Parameters:
189 +  name - name of a new user-defined matrix type
190 .  path - path (either absolute or relative) the library containing this solver
191 .  name_create - name of routine to create method context
192 -  routine_create - routine to create method context
193 
194    Notes:
195    MatRegisterDynamic() may be called multiple times to add several user-defined solvers.
196 
197    If dynamic libraries are used, then the fourth input argument (routine_create)
198    is ignored.
199 
200    Sample usage:
201 .vb
202    MatRegisterDynamic("my_mat",/home/username/my_lib/lib/libO/solaris/mylib.a,
203                "MyMatCreate",MyMatCreate);
204 .ve
205 
206    Then, your solver can be chosen with the procedural interface via
207 $     MatSetType(Mat,"my_mat")
208    or at runtime via the option
209 $     -mat_type my_mat
210 
211    Level: advanced
212 
213    Notes: ${PETSC_ARCH} occuring in pathname will be replaced with appropriate values.
214          If your function is not being put into a shared library then use VecRegister() instead
215 
216 .keywords: Mat, register
217 
218 .seealso: MatRegisterAll(), MatRegisterDestroy()
219 
220 M*/
221 #if defined(PETSC_USE_DYNAMIC_LIBRARIES)
222 #define MatRegisterDynamic(a,b,c,d) MatRegister(a,b,c,0)
223 #else
224 #define MatRegisterDynamic(a,b,c,d) MatRegister(a,b,c,d)
225 #endif
226 
227 extern PetscBool  MatRegisterAllCalled;
228 extern PetscFList MatList;
229 extern PetscFList MatColoringList;
230 extern PetscFList MatPartitioningList;
231 
232 /*E
233     MatStructure - Indicates if the matrix has the same nonzero structure
234 
235     Level: beginner
236 
237    Any additions/changes here MUST also be made in include/finclude/petscmat.h
238 
239 .seealso: MatCopy(), KSPSetOperators(), PCSetOperators()
240 E*/
241 typedef enum {DIFFERENT_NONZERO_PATTERN,SUBSET_NONZERO_PATTERN,SAME_NONZERO_PATTERN,SAME_PRECONDITIONER} MatStructure;
242 
243 extern PetscErrorCode  MatCreateSeqDense(MPI_Comm,PetscInt,PetscInt,PetscScalar[],Mat*);
244 extern PetscErrorCode  MatCreateMPIDense(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,PetscScalar[],Mat*);
245 extern PetscErrorCode  MatCreateSeqAIJ(MPI_Comm,PetscInt,PetscInt,PetscInt,const PetscInt[],Mat*);
246 PetscPolymorphicFunction(MatCreateSeqAIJ,(PetscInt m,PetscInt n,PetscInt nz,const PetscInt nnz[]),(PETSC_COMM_SELF,m,n,nz,nnz,&A),Mat,A)
247 PetscPolymorphicFunction(MatCreateSeqAIJ,(PetscInt m,PetscInt n,PetscInt nz),(PETSC_COMM_SELF,m,n,nz,PETSC_NULL,&A),Mat,A)
248 PetscPolymorphicFunction(MatCreateSeqAIJ,(PetscInt m,PetscInt n,const PetscInt nnz[]),(PETSC_COMM_SELF,m,n,0,nnz,&A),Mat,A)
249 PetscPolymorphicFunction(MatCreateSeqAIJ,(PetscInt m,PetscInt n),(PETSC_COMM_SELF,m,n,0,PETSC_NULL,&A),Mat,A)
250 PetscPolymorphicSubroutine(MatCreateSeqAIJ,(PetscInt m,PetscInt n,PetscInt nz,Mat *A),(PETSC_COMM_SELF,m,n,nz,PETSC_NULL,A))
251 PetscPolymorphicSubroutine(MatCreateSeqAIJ,(PetscInt m,PetscInt n,const PetscInt nnz[],Mat *A),(PETSC_COMM_SELF,m,n,0,nnz,A))
252 PetscPolymorphicSubroutine(MatCreateSeqAIJ,(PetscInt m,PetscInt n,Mat *A),(PETSC_COMM_SELF,m,n,0,PETSC_NULL,A))
253 extern PetscErrorCode  MatCreateMPIAIJ(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt,const PetscInt[],PetscInt,const PetscInt[],Mat*);
254 PetscPolymorphicFunction(MatCreateMPIAIJ,(MPI_Comm comm,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,const PetscInt nnz[],PetscInt onz,const PetscInt onnz[]),(comm,m,n,M,N,nz,nnz,onz,onnz,&A),Mat,A)
255 PetscPolymorphicFunction(MatCreateMPIAIJ,(MPI_Comm comm,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,PetscInt nnz),(comm,m,n,M,N,nz,PETSC_NULL,nnz,PETSC_NULL,&A),Mat,A)
256 PetscPolymorphicFunction(MatCreateMPIAIJ,(MPI_Comm comm,PetscInt m,PetscInt n,PetscInt M,PetscInt N,const PetscInt nnz[],const PetscInt onz[]),(comm,m,n,M,N,0,nnz,0,onz,&A),Mat,A)
257 PetscPolymorphicFunction(MatCreateMPIAIJ,(MPI_Comm comm,PetscInt m,PetscInt n,PetscInt M,PetscInt N),(comm,m,n,M,N,0,PETSC_NULL,0,PETSC_NULL,&A),Mat,A)
258 PetscPolymorphicSubroutine(MatCreateMPIAIJ,(MPI_Comm comm,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,PetscInt nnz,Mat *A),(comm,m,n,M,N,nz,PETSC_NULL,nnz,PETSC_NULL,A))
259 PetscPolymorphicSubroutine(MatCreateMPIAIJ,(MPI_Comm comm,PetscInt m,PetscInt n,PetscInt M,PetscInt N,const PetscInt nnz[],const PetscInt onz[],Mat *A),(comm,m,n,M,N,0,nnz,0,onz,A))
260 PetscPolymorphicSubroutine(MatCreateMPIAIJ,(MPI_Comm comm,PetscInt m,PetscInt n,PetscInt M,PetscInt N,Mat *A),(comm,m,n,M,N,0,PETSC_NULL,0,PETSC_NULL,A))
261 PetscPolymorphicFunction(MatCreateMPIAIJ,(PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,const PetscInt nnz[],PetscInt onz,const PetscInt onnz[]),(PETSC_COMM_WORLD,m,n,M,N,nz,nnz,onz,onnz,&A),Mat,A)
262 PetscPolymorphicFunction(MatCreateMPIAIJ,(PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,PetscInt nnz),(PETSC_COMM_WORLD,m,n,M,N,nz,PETSC_NULL,nnz,PETSC_NULL,&A),Mat,A)
263 PetscPolymorphicFunction(MatCreateMPIAIJ,(PetscInt m,PetscInt n,PetscInt M,PetscInt N,const PetscInt nnz[],const PetscInt onz[]),(PETSC_COMM_WORLD,m,n,M,N,0,nnz,0,onz,&A),Mat,A)
264 PetscPolymorphicFunction(MatCreateMPIAIJ,(PetscInt m,PetscInt n,PetscInt M,PetscInt N),(PETSC_COMM_WORLD,m,n,M,N,0,PETSC_NULL,0,PETSC_NULL,&A),Mat,A)
265 PetscPolymorphicSubroutine(MatCreateMPIAIJ,(PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,PetscInt nnz,Mat *A),(PETSC_COMM_WORLD,m,n,M,N,nz,PETSC_NULL,nnz,PETSC_NULL,A))
266 PetscPolymorphicSubroutine(MatCreateMPIAIJ,(PetscInt m,PetscInt n,PetscInt M,PetscInt N,const PetscInt nnz[],const PetscInt onz[],Mat *A),(PETSC_COMM_WORLD,m,n,M,N,0,nnz,0,onz,A))
267 PetscPolymorphicSubroutine(MatCreateMPIAIJ,(PetscInt m,PetscInt n,PetscInt M,PetscInt N,Mat *A),(PETSC_COMM_WORLD,m,n,M,N,0,PETSC_NULL,0,PETSC_NULL,A))
268 extern PetscErrorCode  MatCreateMPIAIJWithArrays(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,const PetscInt[],const PetscInt[],const PetscScalar[],Mat *);
269 extern PetscErrorCode  MatCreateMPIAIJWithSplitArrays(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt[],PetscInt[],PetscScalar[],PetscInt[],PetscInt[],PetscScalar[],Mat*);
270 
271 extern PetscErrorCode  MatCreateSeqBAIJ(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,const PetscInt[],Mat*);
272 PetscPolymorphicFunction(MatCreateSeqBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt nz,const PetscInt nnz[]),(PETSC_COMM_SELF,bs,m,n,nz,nnz,&A),Mat,A)
273 PetscPolymorphicFunction(MatCreateSeqBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt nz),(PETSC_COMM_SELF,bs,m,n,nz,PETSC_NULL,&A),Mat,A)
274 PetscPolymorphicFunction(MatCreateSeqBAIJ,(PetscInt bs,PetscInt m,PetscInt n,const PetscInt nnz[]),(PETSC_COMM_SELF,bs,m,n,0,nnz,&A),Mat,A)
275 PetscPolymorphicFunction(MatCreateSeqBAIJ,(PetscInt bs,PetscInt m,PetscInt n),(PETSC_COMM_SELF,bs,m,n,0,PETSC_NULL,&A),Mat,A)
276 PetscPolymorphicSubroutine(MatCreateSeqBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt nz,Mat *A),(PETSC_COMM_SELF,bs,m,n,nz,PETSC_NULL,A))
277 PetscPolymorphicSubroutine(MatCreateSeqBAIJ,(PetscInt bs,PetscInt m,PetscInt n,const PetscInt nnz[],Mat *A),(PETSC_COMM_SELF,bs,m,n,0,nnz,A))
278 PetscPolymorphicSubroutine(MatCreateSeqBAIJ,(PetscInt bs,PetscInt m,PetscInt n,Mat *A),(PETSC_COMM_SELF,bs,m,n,0,PETSC_NULL,A))
279 extern PetscErrorCode  MatCreateMPIBAIJ(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt,const PetscInt[],PetscInt,const PetscInt[],Mat*);
280 PetscPolymorphicFunction(MatCreateMPIBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,const PetscInt nnz[],PetscInt onz,const PetscInt onnz[]),(comm,bs,m,n,M,N,nz,nnz,onz,onnz,&A),Mat,A)
281 PetscPolymorphicFunction(MatCreateMPIBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,PetscInt nnz),(comm,bs,m,n,M,N,nz,PETSC_NULL,nnz,PETSC_NULL,&A),Mat,A)
282 PetscPolymorphicFunction(MatCreateMPIBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,const PetscInt nnz[],const PetscInt onz[]),(comm,bs,m,n,M,N,0,nnz,0,onz,&A),Mat,A)
283 PetscPolymorphicFunction(MatCreateMPIBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N),(comm,bs,m,n,M,N,0,PETSC_NULL,0,PETSC_NULL,&A),Mat,A)
284 PetscPolymorphicSubroutine(MatCreateMPIBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,PetscInt nnz,Mat *A),(comm,bs,m,n,M,N,nz,PETSC_NULL,nnz,PETSC_NULL,A))
285 PetscPolymorphicSubroutine(MatCreateMPIBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,const PetscInt nnz[],const PetscInt onz[],Mat *A),(comm,bs,m,n,M,N,0,nnz,0,onz,A))
286 PetscPolymorphicSubroutine(MatCreateMPIBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,Mat *A),(comm,bs,m,n,M,N,0,PETSC_NULL,0,PETSC_NULL,A))
287 PetscPolymorphicFunction(MatCreateMPIBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,const PetscInt nnz[],PetscInt onz,const PetscInt onnz[]),(PETSC_COMM_WORLD,bs,m,n,M,N,nz,nnz,onz,onnz,&A),Mat,A)
288 PetscPolymorphicFunction(MatCreateMPIBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,PetscInt nnz),(PETSC_COMM_WORLD,bs,m,n,M,N,nz,PETSC_NULL,nnz,PETSC_NULL,&A),Mat,A)
289 PetscPolymorphicFunction(MatCreateMPIBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,const PetscInt nnz[],const PetscInt onz[]),(PETSC_COMM_WORLD,bs,m,n,M,N,0,nnz,0,onz,&A),Mat,A)
290 PetscPolymorphicFunction(MatCreateMPIBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N),(PETSC_COMM_WORLD,bs,m,n,M,N,0,PETSC_NULL,0,PETSC_NULL,&A),Mat,A)
291 PetscPolymorphicSubroutine(MatCreateMPIBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,PetscInt nnz,Mat *A),(PETSC_COMM_WORLD,bs,m,n,M,N,nz,PETSC_NULL,nnz,PETSC_NULL,A))
292 PetscPolymorphicSubroutine(MatCreateMPIBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,const PetscInt nnz[],const PetscInt onz[],Mat *A),(PETSC_COMM_WORLD,bs,m,n,M,N,0,nnz,0,onz,A))
293 PetscPolymorphicSubroutine(MatCreateMPIBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,Mat *A),(PETSC_COMM_WORLD,bs,m,n,M,N,0,PETSC_NULL,0,PETSC_NULL,A))
294 extern PetscErrorCode  MatCreateMPIBAIJWithArrays(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt,const PetscInt[],const PetscInt[],const PetscScalar[],Mat*);
295 
296 extern PetscErrorCode  MatCreateMPIAdj(MPI_Comm,PetscInt,PetscInt,PetscInt[],PetscInt[],PetscInt[],Mat*);
297 extern PetscErrorCode  MatCreateSeqSBAIJ(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,const PetscInt[],Mat*);
298 PetscPolymorphicFunction(MatCreateSeqSBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt nz,const PetscInt nnz[]),(PETSC_COMM_SELF,bs,m,n,nz,nnz,&A),Mat,A)
299 PetscPolymorphicFunction(MatCreateSeqSBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt nz),(PETSC_COMM_SELF,bs,m,n,nz,PETSC_NULL,&A),Mat,A)
300 PetscPolymorphicFunction(MatCreateSeqSBAIJ,(PetscInt bs,PetscInt m,PetscInt n,const PetscInt nnz[]),(PETSC_COMM_SELF,bs,m,n,0,nnz,&A),Mat,A)
301 PetscPolymorphicFunction(MatCreateSeqSBAIJ,(PetscInt bs,PetscInt m,PetscInt n),(PETSC_COMM_SELF,bs,m,n,0,PETSC_NULL,&A),Mat,A)
302 PetscPolymorphicSubroutine(MatCreateSeqSBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt nz,Mat *A),(PETSC_COMM_SELF,bs,m,n,nz,PETSC_NULL,A))
303 PetscPolymorphicSubroutine(MatCreateSeqSBAIJ,(PetscInt bs,PetscInt m,PetscInt n,const PetscInt nnz[],Mat *A),(PETSC_COMM_SELF,bs,m,n,0,nnz,A))
304 PetscPolymorphicSubroutine(MatCreateSeqSBAIJ,(PetscInt bs,PetscInt m,PetscInt n,Mat *A),(PETSC_COMM_SELF,bs,m,n,0,PETSC_NULL,A))
305 
306 extern PetscErrorCode  MatCreateMPISBAIJ(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt,const PetscInt[],PetscInt,const PetscInt[],Mat*);
307 PetscPolymorphicFunction(MatCreateMPISBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,const PetscInt nnz[],PetscInt onz,const PetscInt onnz[]),(comm,bs,m,n,M,N,nz,nnz,onz,onnz,&A),Mat,A)
308 PetscPolymorphicFunction(MatCreateMPISBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,PetscInt nnz),(comm,bs,m,n,M,N,nz,PETSC_NULL,nnz,PETSC_NULL,&A),Mat,A)
309 PetscPolymorphicFunction(MatCreateMPISBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,const PetscInt nnz[],const PetscInt onz[]),(comm,bs,m,n,M,N,0,nnz,0,onz,&A),Mat,A)
310 PetscPolymorphicFunction(MatCreateMPISBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N),(comm,bs,m,n,M,N,0,PETSC_NULL,0,PETSC_NULL,&A),Mat,A)
311 PetscPolymorphicSubroutine(MatCreateMPISBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,PetscInt nnz,Mat *A),(comm,bs,m,n,M,N,nz,PETSC_NULL,nnz,PETSC_NULL,A))
312 PetscPolymorphicSubroutine(MatCreateMPISBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,const PetscInt nnz[],const PetscInt onz[],Mat *A),(comm,bs,m,n,M,N,0,nnz,0,onz,A))
313 PetscPolymorphicSubroutine(MatCreateMPISBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,Mat *A),(comm,bs,m,n,M,N,0,PETSC_NULL,0,PETSC_NULL,A))
314 PetscPolymorphicFunction(MatCreateMPISBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,const PetscInt nnz[],PetscInt onz,const PetscInt onnz[]),(PETSC_COMM_WORLD,bs,m,n,M,N,nz,nnz,onz,onnz,&A),Mat,A)
315 PetscPolymorphicFunction(MatCreateMPISBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,PetscInt nnz),(PETSC_COMM_WORLD,bs,m,n,M,N,nz,PETSC_NULL,nnz,PETSC_NULL,&A),Mat,A)
316 PetscPolymorphicFunction(MatCreateMPISBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,const PetscInt nnz[],const PetscInt onz[]),(PETSC_COMM_WORLD,bs,m,n,M,N,0,nnz,0,onz,&A),Mat,A)
317 PetscPolymorphicFunction(MatCreateMPISBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N),(PETSC_COMM_WORLD,bs,m,n,M,N,0,PETSC_NULL,0,PETSC_NULL,&A),Mat,A)
318 PetscPolymorphicSubroutine(MatCreateMPISBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,PetscInt nnz,Mat *A),(PETSC_COMM_WORLD,bs,m,n,M,N,nz,PETSC_NULL,nnz,PETSC_NULL,A))
319 PetscPolymorphicSubroutine(MatCreateMPISBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,const PetscInt nnz[],const PetscInt onz[],Mat *A),(PETSC_COMM_WORLD,bs,m,n,M,N,0,nnz,0,onz,A))
320 PetscPolymorphicSubroutine(MatCreateMPISBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,Mat *A),(PETSC_COMM_WORLD,bs,m,n,M,N,0,PETSC_NULL,0,PETSC_NULL,A))
321 extern PetscErrorCode  MatCreateMPISBAIJWithArrays(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt,const PetscInt[],const PetscInt[],const PetscScalar[],Mat *);
322 extern PetscErrorCode  MatMPISBAIJSetPreallocationCSR(Mat,PetscInt,const PetscInt[],const PetscInt[],const PetscScalar[]);
323 
324 extern PetscErrorCode  MatCreateShell(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,void *,Mat*);
325 PetscPolymorphicFunction(MatCreateShell,(MPI_Comm comm,PetscInt m,PetscInt n,PetscInt M,PetscInt N,void *ctx),(comm,m,n,M,N,ctx,&A),Mat,A)
326 PetscPolymorphicFunction(MatCreateShell,(PetscInt m,PetscInt n,PetscInt M,PetscInt N,void *ctx),(PETSC_COMM_WORLD,m,n,M,N,ctx,&A),Mat,A)
327 extern PetscErrorCode  MatCreateAdic(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt,void (*)(void),Mat*);
328 extern PetscErrorCode  MatCreateNormal(Mat,Mat*);
329 PetscPolymorphicFunction(MatCreateNormal,(Mat mat),(mat,&A),Mat,A)
330 extern PetscErrorCode  MatCreateLRC(Mat,Mat,Mat,Mat*);
331 extern PetscErrorCode  MatCreateIS(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,ISLocalToGlobalMapping,Mat*);
332 extern PetscErrorCode  MatCreateSeqAIJCRL(MPI_Comm,PetscInt,PetscInt,PetscInt,const PetscInt[],Mat*);
333 extern PetscErrorCode  MatCreateMPIAIJCRL(MPI_Comm,PetscInt,PetscInt,PetscInt,const PetscInt[],PetscInt,const PetscInt[],Mat*);
334 
335 extern PetscErrorCode  MatCreateSeqBSTRM(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,const PetscInt[],Mat*);
336 extern PetscErrorCode  MatCreateMPIBSTRM(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt,const PetscInt[],PetscInt,const PetscInt[],Mat*);
337 extern PetscErrorCode  MatCreateSeqSBSTRM(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,const PetscInt[],Mat*);
338 extern PetscErrorCode  MatCreateMPISBSTRM(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt,const PetscInt[],PetscInt,const PetscInt[],Mat*);
339 
340 extern PetscErrorCode  MatCreateScatter(MPI_Comm,VecScatter,Mat*);
341 extern PetscErrorCode  MatScatterSetVecScatter(Mat,VecScatter);
342 extern PetscErrorCode  MatScatterGetVecScatter(Mat,VecScatter*);
343 extern PetscErrorCode  MatCreateBlockMat(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt*,Mat*);
344 extern PetscErrorCode  MatCompositeAddMat(Mat,Mat);
345 extern PetscErrorCode  MatCompositeMerge(Mat);
346 extern PetscErrorCode  MatCreateComposite(MPI_Comm,PetscInt,const Mat*,Mat*);
347 typedef enum {MAT_COMPOSITE_ADDITIVE,MAT_COMPOSITE_MULTIPLICATIVE} MatCompositeType;
348 extern PetscErrorCode  MatCompositeSetType(Mat,MatCompositeType);
349 
350 extern PetscErrorCode  MatCreateFFT(MPI_Comm,PetscInt,const PetscInt[],const MatType,Mat*);
351 extern PetscErrorCode  MatCreateSeqCUFFT(MPI_Comm,PetscInt,const PetscInt[],Mat*);
352 
353 extern PetscErrorCode  MatCreateTranspose(Mat,Mat*);
354 extern PetscErrorCode  MatCreateSubMatrix(Mat,IS,IS,Mat*);
355 extern PetscErrorCode  MatSubMatrixUpdate(Mat,Mat,IS,IS);
356 extern PetscErrorCode  MatCreateLocalRef(Mat,IS,IS,Mat*);
357 
358 extern PetscErrorCode  MatPythonSetType(Mat,const char[]);
359 
360 extern PetscErrorCode  MatSetUp(Mat);
361 extern PetscErrorCode  MatDestroy(Mat*);
362 
363 extern PetscErrorCode  MatConjugate(Mat);
364 extern PetscErrorCode  MatRealPart(Mat);
365 extern PetscErrorCode  MatImaginaryPart(Mat);
366 extern PetscErrorCode  MatGetDiagonalBlock(Mat,Mat*);
367 extern PetscErrorCode  MatGetTrace(Mat,PetscScalar*);
368 extern PetscErrorCode  MatInvertBlockDiagonal(Mat,PetscScalar **);
369 
370 /* ------------------------------------------------------------*/
371 extern PetscErrorCode  MatSetValues(Mat,PetscInt,const PetscInt[],PetscInt,const PetscInt[],const PetscScalar[],InsertMode);
372 extern PetscErrorCode  MatSetValuesBlocked(Mat,PetscInt,const PetscInt[],PetscInt,const PetscInt[],const PetscScalar[],InsertMode);
373 extern PetscErrorCode  MatSetValuesRow(Mat,PetscInt,const PetscScalar[]);
374 extern PetscErrorCode  MatSetValuesRowLocal(Mat,PetscInt,const PetscScalar[]);
375 extern PetscErrorCode  MatSetValuesBatch(Mat,PetscInt,PetscInt,PetscInt[],const PetscScalar[]);
376 
377 /*S
378      MatStencil - Data structure (C struct) for storing information about a single row or
379         column of a matrix as indexed on an associated grid.
380 
381    Fortran usage is different, see MatSetValuesStencil() for details.
382 
383    Level: beginner
384 
385   Concepts: matrix; linear operator
386 
387 .seealso:  MatSetValuesStencil(), MatSetStencil(), MatSetValuesBlockedStencil()
388 S*/
389 typedef struct {
390   PetscInt k,j,i,c;
391 } MatStencil;
392 
393 extern PetscErrorCode  MatSetValuesStencil(Mat,PetscInt,const MatStencil[],PetscInt,const MatStencil[],const PetscScalar[],InsertMode);
394 extern PetscErrorCode  MatSetValuesBlockedStencil(Mat,PetscInt,const MatStencil[],PetscInt,const MatStencil[],const PetscScalar[],InsertMode);
395 extern PetscErrorCode  MatSetStencil(Mat,PetscInt,const PetscInt[],const PetscInt[],PetscInt);
396 
397 extern PetscErrorCode  MatSetColoring(Mat,ISColoring);
398 extern PetscErrorCode  MatSetValuesAdic(Mat,void*);
399 extern PetscErrorCode  MatSetValuesAdifor(Mat,PetscInt,void*);
400 
401 /*E
402     MatAssemblyType - Indicates if the matrix is now to be used, or if you plan
403      to continue to add values to it
404 
405     Level: beginner
406 
407 .seealso: MatAssemblyBegin(), MatAssemblyEnd()
408 E*/
409 typedef enum {MAT_FLUSH_ASSEMBLY=1,MAT_FINAL_ASSEMBLY=0} MatAssemblyType;
410 extern PetscErrorCode  MatAssemblyBegin(Mat,MatAssemblyType);
411 extern PetscErrorCode  MatAssemblyEnd(Mat,MatAssemblyType);
412 extern PetscErrorCode  MatAssembled(Mat,PetscBool *);
413 
414 
415 
416 /*E
417     MatOption - Options that may be set for a matrix and its behavior or storage
418 
419     Level: beginner
420 
421    Any additions/changes here MUST also be made in include/finclude/petscmat.h
422 
423 .seealso: MatSetOption()
424 E*/
425 typedef enum {MAT_ROW_ORIENTED,MAT_NEW_NONZERO_LOCATIONS,
426               MAT_SYMMETRIC,
427               MAT_STRUCTURALLY_SYMMETRIC,
428               MAT_NEW_DIAGONALS,MAT_IGNORE_OFF_PROC_ENTRIES,
429               MAT_NEW_NONZERO_LOCATION_ERR,
430               MAT_NEW_NONZERO_ALLOCATION_ERR,MAT_USE_HASH_TABLE,
431               MAT_KEEP_NONZERO_PATTERN,MAT_IGNORE_ZERO_ENTRIES,
432               MAT_USE_INODES,
433               MAT_HERMITIAN,
434               MAT_SYMMETRY_ETERNAL,
435               MAT_CHECK_COMPRESSED_ROW,
436               MAT_IGNORE_LOWER_TRIANGULAR,MAT_ERROR_LOWER_TRIANGULAR,
437               MAT_GETROW_UPPERTRIANGULAR,MAT_UNUSED_NONZERO_LOCATION_ERR,
438               MAT_SPD,MAT_NO_OFF_PROC_ENTRIES,MAT_NO_OFF_PROC_ZERO_ROWS,
439               NUM_MAT_OPTIONS} MatOption;
440 extern const char *MatOptions[];
441 extern PetscErrorCode  MatSetOption(Mat,MatOption,PetscBool );
442 extern PetscErrorCode  MatGetType(Mat,const MatType*);
443 PetscPolymorphicFunction(MatGetType,(Mat mat),(mat,&t),const MatType,t)
444 
445 extern PetscErrorCode  MatGetValues(Mat,PetscInt,const PetscInt[],PetscInt,const PetscInt[],PetscScalar[]);
446 extern PetscErrorCode  MatGetRow(Mat,PetscInt,PetscInt *,const PetscInt *[],const PetscScalar*[]);
447 extern PetscErrorCode  MatRestoreRow(Mat,PetscInt,PetscInt *,const PetscInt *[],const PetscScalar*[]);
448 extern PetscErrorCode  MatGetRowUpperTriangular(Mat);
449 extern PetscErrorCode  MatRestoreRowUpperTriangular(Mat);
450 extern PetscErrorCode  MatGetColumn(Mat,PetscInt,PetscInt *,const PetscInt *[],const PetscScalar*[]);
451 extern PetscErrorCode  MatRestoreColumn(Mat,PetscInt,PetscInt *,const PetscInt *[],const PetscScalar*[]);
452 extern PetscErrorCode  MatGetColumnVector(Mat,Vec,PetscInt);
453 extern PetscErrorCode  MatGetArray(Mat,PetscScalar *[]);
454 PetscPolymorphicFunction(MatGetArray,(Mat mat),(mat,&a),PetscScalar*,a)
455 extern PetscErrorCode  MatRestoreArray(Mat,PetscScalar *[]);
456 extern PetscErrorCode  MatGetBlockSize(Mat,PetscInt *);
457 PetscPolymorphicFunction(MatGetBlockSize,(Mat mat),(mat,&a),PetscInt,a)
458 extern PetscErrorCode  MatSetBlockSize(Mat,PetscInt);
459 
460 
461 extern PetscErrorCode  MatMult(Mat,Vec,Vec);
462 extern PetscErrorCode  MatMultDiagonalBlock(Mat,Vec,Vec);
463 extern PetscErrorCode  MatMultAdd(Mat,Vec,Vec,Vec);
464 PetscPolymorphicSubroutine(MatMultAdd,(Mat A,Vec x,Vec y),(A,x,y,y))
465 extern PetscErrorCode  MatMultTranspose(Mat,Vec,Vec);
466 extern PetscErrorCode  MatMultHermitianTranspose(Mat,Vec,Vec);
467 extern PetscErrorCode  MatIsTranspose(Mat,Mat,PetscReal,PetscBool *);
468 PetscPolymorphicFunction(MatIsTranspose,(Mat A,Mat B,PetscReal tol),(A,B,tol,&t),PetscBool ,t)
469 PetscPolymorphicFunction(MatIsTranspose,(Mat A,Mat B),(A,B,0,&t),PetscBool ,t)
470 extern PetscErrorCode  MatIsHermitianTranspose(Mat,Mat,PetscReal,PetscBool *);
471 extern PetscErrorCode  MatMultTransposeAdd(Mat,Vec,Vec,Vec);
472 extern PetscErrorCode  MatMultHermitianTransposeAdd(Mat,Vec,Vec,Vec);
473 PetscPolymorphicSubroutine(MatMultTransposeAdd,(Mat A,Vec x,Vec y),(A,x,y,y))
474 extern PetscErrorCode  MatMultConstrained(Mat,Vec,Vec);
475 extern PetscErrorCode  MatMultTransposeConstrained(Mat,Vec,Vec);
476 extern PetscErrorCode  MatMatSolve(Mat,Mat,Mat);
477 
478 /*E
479     MatDuplicateOption - Indicates if a duplicated sparse matrix should have
480   its numerical values copied over or just its nonzero structure.
481 
482     Level: beginner
483 
484    Any additions/changes here MUST also be made in include/finclude/petscmat.h
485 
486 $   MAT_SHARE_NONZERO_PATTERN - the i and j arrays in the new matrix will be shared with the original matrix
487 $                               this also triggers the MAT_DO_NOT_COPY_VALUES option. This is used when you
488 $                               have several matrices with the same nonzero pattern.
489 
490 .seealso: MatDuplicate()
491 E*/
492 typedef enum {MAT_DO_NOT_COPY_VALUES,MAT_COPY_VALUES,MAT_SHARE_NONZERO_PATTERN} MatDuplicateOption;
493 
494 extern PetscErrorCode  MatConvert(Mat,const MatType,MatReuse,Mat*);
495 PetscPolymorphicFunction(MatConvert,(Mat A,const MatType t),(A,t,MAT_INITIAL_MATRIX,&a),Mat,a)
496 extern PetscErrorCode  MatDuplicate(Mat,MatDuplicateOption,Mat*);
497 PetscPolymorphicFunction(MatDuplicate,(Mat A,MatDuplicateOption o),(A,o,&a),Mat,a)
498 PetscPolymorphicFunction(MatDuplicate,(Mat A),(A,MAT_COPY_VALUES,&a),Mat,a)
499 
500 
501 extern PetscErrorCode  MatCopy(Mat,Mat,MatStructure);
502 extern PetscErrorCode  MatView(Mat,PetscViewer);
503 extern PetscErrorCode  MatIsSymmetric(Mat,PetscReal,PetscBool *);
504 PetscPolymorphicFunction(MatIsSymmetric,(Mat A,PetscReal tol),(A,tol,&t),PetscBool ,t)
505 PetscPolymorphicFunction(MatIsSymmetric,(Mat A),(A,0,&t),PetscBool ,t)
506 extern PetscErrorCode  MatIsStructurallySymmetric(Mat,PetscBool *);
507 PetscPolymorphicFunction(MatIsStructurallySymmetric,(Mat A),(A,&t),PetscBool ,t)
508 extern PetscErrorCode  MatIsHermitian(Mat,PetscReal,PetscBool *);
509 PetscPolymorphicFunction(MatIsHermitian,(Mat A),(A,0,&t),PetscBool ,t)
510 extern PetscErrorCode  MatIsSymmetricKnown(Mat,PetscBool *,PetscBool *);
511 extern PetscErrorCode  MatIsHermitianKnown(Mat,PetscBool *,PetscBool *);
512 extern PetscErrorCode  MatMissingDiagonal(Mat,PetscBool  *,PetscInt *);
513 extern PetscErrorCode  MatLoad(Mat, PetscViewer);
514 
515 extern PetscErrorCode  MatGetRowIJ(Mat,PetscInt,PetscBool ,PetscBool ,PetscInt*,PetscInt *[],PetscInt *[],PetscBool  *);
516 extern PetscErrorCode  MatRestoreRowIJ(Mat,PetscInt,PetscBool ,PetscBool ,PetscInt *,PetscInt *[],PetscInt *[],PetscBool  *);
517 extern PetscErrorCode  MatGetColumnIJ(Mat,PetscInt,PetscBool ,PetscBool ,PetscInt*,PetscInt *[],PetscInt *[],PetscBool  *);
518 extern PetscErrorCode  MatRestoreColumnIJ(Mat,PetscInt,PetscBool ,PetscBool ,PetscInt *,PetscInt *[],PetscInt *[],PetscBool  *);
519 
520 /*S
521      MatInfo - Context of matrix information, used with MatGetInfo()
522 
523    In Fortran this is simply a double precision array of dimension MAT_INFO_SIZE
524 
525    Level: intermediate
526 
527   Concepts: matrix^nonzero information
528 
529 .seealso:  MatGetInfo(), MatInfoType
530 S*/
531 typedef struct {
532   PetscLogDouble block_size;                         /* block size */
533   PetscLogDouble nz_allocated,nz_used,nz_unneeded;   /* number of nonzeros */
534   PetscLogDouble memory;                             /* memory allocated */
535   PetscLogDouble assemblies;                         /* number of matrix assemblies called */
536   PetscLogDouble mallocs;                            /* number of mallocs during MatSetValues() */
537   PetscLogDouble fill_ratio_given,fill_ratio_needed; /* fill ratio for LU/ILU */
538   PetscLogDouble factor_mallocs;                     /* number of mallocs during factorization */
539 } MatInfo;
540 
541 /*E
542     MatInfoType - Indicates if you want information about the local part of the matrix,
543      the entire parallel matrix or the maximum over all the local parts.
544 
545     Level: beginner
546 
547    Any additions/changes here MUST also be made in include/finclude/petscmat.h
548 
549 .seealso: MatGetInfo(), MatInfo
550 E*/
551 typedef enum {MAT_LOCAL=1,MAT_GLOBAL_MAX=2,MAT_GLOBAL_SUM=3} MatInfoType;
552 extern PetscErrorCode  MatGetInfo(Mat,MatInfoType,MatInfo*);
553 extern PetscErrorCode  MatGetDiagonal(Mat,Vec);
554 extern PetscErrorCode  MatGetRowMax(Mat,Vec,PetscInt[]);
555 extern PetscErrorCode  MatGetRowMin(Mat,Vec,PetscInt[]);
556 extern PetscErrorCode  MatGetRowMaxAbs(Mat,Vec,PetscInt[]);
557 extern PetscErrorCode  MatGetRowMinAbs(Mat,Vec,PetscInt[]);
558 extern PetscErrorCode  MatGetRowSum(Mat,Vec);
559 extern PetscErrorCode  MatTranspose(Mat,MatReuse,Mat*);
560 PetscPolymorphicFunction(MatTranspose,(Mat A),(A,MAT_INITIAL_MATRIX,&t),Mat,t)
561 extern PetscErrorCode  MatHermitianTranspose(Mat,MatReuse,Mat*);
562 extern PetscErrorCode  MatPermute(Mat,IS,IS,Mat *);
563 PetscPolymorphicFunction(MatPermute,(Mat A,IS is1,IS is2),(A,is1,is2,&t),Mat,t)
564 extern PetscErrorCode  MatDiagonalScale(Mat,Vec,Vec);
565 extern PetscErrorCode  MatDiagonalSet(Mat,Vec,InsertMode);
566 extern PetscErrorCode  MatEqual(Mat,Mat,PetscBool *);
567 PetscPolymorphicFunction(MatEqual,(Mat A,Mat B),(A,B,&t),PetscBool ,t)
568 extern PetscErrorCode  MatMultEqual(Mat,Mat,PetscInt,PetscBool *);
569 extern PetscErrorCode  MatMultAddEqual(Mat,Mat,PetscInt,PetscBool *);
570 extern PetscErrorCode  MatMultTransposeEqual(Mat,Mat,PetscInt,PetscBool *);
571 extern PetscErrorCode  MatMultTransposeAddEqual(Mat,Mat,PetscInt,PetscBool *);
572 
573 extern PetscErrorCode  MatNorm(Mat,NormType,PetscReal *);
574 PetscPolymorphicFunction(MatNorm,(Mat A,NormType t),(A,t,&n),PetscReal,n)
575 extern PetscErrorCode  MatGetColumnNorms(Mat,NormType,PetscReal *);
576 extern PetscErrorCode  MatZeroEntries(Mat);
577 extern PetscErrorCode  MatZeroRows(Mat,PetscInt,const PetscInt [],PetscScalar,Vec,Vec);
578 extern PetscErrorCode  MatZeroRowsIS(Mat,IS,PetscScalar,Vec,Vec);
579 extern PetscErrorCode  MatZeroRowsStencil(Mat,PetscInt,const MatStencil [],PetscScalar,Vec,Vec);
580 extern PetscErrorCode  MatZeroRowsColumnsStencil(Mat,PetscInt,const MatStencil[],PetscScalar,Vec,Vec);
581 extern PetscErrorCode  MatZeroRowsColumns(Mat,PetscInt,const PetscInt [],PetscScalar,Vec,Vec);
582 extern PetscErrorCode  MatZeroRowsColumnsIS(Mat,IS,PetscScalar,Vec,Vec);
583 
584 extern PetscErrorCode  MatUseScaledForm(Mat,PetscBool );
585 extern PetscErrorCode  MatScaleSystem(Mat,Vec,Vec);
586 extern PetscErrorCode  MatUnScaleSystem(Mat,Vec,Vec);
587 
588 extern PetscErrorCode  MatGetSize(Mat,PetscInt*,PetscInt*);
589 extern PetscErrorCode  MatGetLocalSize(Mat,PetscInt*,PetscInt*);
590 extern PetscErrorCode  MatGetOwnershipRange(Mat,PetscInt*,PetscInt*);
591 extern PetscErrorCode  MatGetOwnershipRanges(Mat,const PetscInt**);
592 extern PetscErrorCode  MatGetOwnershipRangeColumn(Mat,PetscInt*,PetscInt*);
593 extern PetscErrorCode  MatGetOwnershipRangesColumn(Mat,const PetscInt**);
594 
595 extern PetscErrorCode  MatGetSubMatrices(Mat,PetscInt,const IS[],const IS[],MatReuse,Mat *[]);
596 extern PetscErrorCode  MatGetSubMatricesParallel(Mat,PetscInt,const IS[],const IS[],MatReuse,Mat *[]);
597 extern PetscErrorCode  MatDestroyMatrices(PetscInt,Mat *[]);
598 extern PetscErrorCode  MatGetSubMatrix(Mat,IS,IS,MatReuse,Mat *);
599 extern PetscErrorCode  MatGetLocalSubMatrix(Mat,IS,IS,Mat*);
600 extern PetscErrorCode  MatRestoreLocalSubMatrix(Mat,IS,IS,Mat*);
601 extern PetscErrorCode  MatGetSeqNonzeroStructure(Mat,Mat*);
602 extern PetscErrorCode  MatDestroySeqNonzeroStructure(Mat*);
603 
604 extern PetscErrorCode  MatMerge(MPI_Comm,Mat,PetscInt,MatReuse,Mat*);
605 extern PetscErrorCode  MatMergeSymbolic(MPI_Comm,Mat,PetscInt,Mat*);
606 extern PetscErrorCode  MatMergeNumeric(MPI_Comm,Mat,PetscInt,Mat);
607 extern PetscErrorCode  MatMerge_SeqsToMPI(MPI_Comm,Mat,PetscInt,PetscInt,MatReuse,Mat*);
608 extern PetscErrorCode  MatMerge_SeqsToMPISymbolic(MPI_Comm,Mat,PetscInt,PetscInt,Mat*);
609 extern PetscErrorCode  MatMerge_SeqsToMPINumeric(Mat,Mat);
610 extern PetscErrorCode  MatMPIAIJGetLocalMat(Mat,MatReuse,Mat*);
611 extern PetscErrorCode  MatMPIAIJGetLocalMatCondensed(Mat,MatReuse,IS*,IS*,Mat*);
612 extern PetscErrorCode  MatGetBrowsOfAcols(Mat,Mat,MatReuse,IS*,IS*,Mat*);
613 extern PetscErrorCode  MatGetBrowsOfAoCols(Mat,Mat,MatReuse,PetscInt**,PetscInt**,MatScalar**,Mat*);
614 #if defined (PETSC_USE_CTABLE)
615 extern PetscErrorCode  MatGetCommunicationStructs(Mat, Vec *, PetscTable *, VecScatter *);
616 #else
617 extern PetscErrorCode  MatGetCommunicationStructs(Mat, Vec *, PetscInt *[], VecScatter *);
618 #endif
619 extern PetscErrorCode  MatGetGhosts(Mat, PetscInt *,const PetscInt *[]);
620 
621 extern PetscErrorCode  MatIncreaseOverlap(Mat,PetscInt,IS[],PetscInt);
622 
623 extern PetscErrorCode  MatMatMult(Mat,Mat,MatReuse,PetscReal,Mat*);
624 extern PetscErrorCode  MatMatMultSymbolic(Mat,Mat,PetscReal,Mat*);
625 extern PetscErrorCode  MatMatMultNumeric(Mat,Mat,Mat);
626 
627 extern PetscErrorCode  MatPtAP(Mat,Mat,MatReuse,PetscReal,Mat*);
628 extern PetscErrorCode  MatPtAPSymbolic(Mat,Mat,PetscReal,Mat*);
629 extern PetscErrorCode  MatPtAPNumeric(Mat,Mat,Mat);
630 
631 extern PetscErrorCode  MatMatTransposeMult(Mat,Mat,MatReuse,PetscReal,Mat*);
632 extern PetscErrorCode  MatMatTransposetMultSymbolic(Mat,Mat,PetscReal,Mat*);
633 extern PetscErrorCode  MatMatTransposetMultNumeric(Mat,Mat,Mat);
634 extern PetscErrorCode  MatMatMultTranspose(Mat,Mat,MatReuse,PetscReal,Mat*);
635 extern PetscErrorCode  MatMatMultTransposeSymbolic(Mat,Mat,PetscReal,Mat*);
636 extern PetscErrorCode  MatMatMultTransposeNumeric(Mat,Mat,Mat);
637 
638 extern PetscErrorCode  MatAXPY(Mat,PetscScalar,Mat,MatStructure);
639 extern PetscErrorCode  MatAYPX(Mat,PetscScalar,Mat,MatStructure);
640 
641 extern PetscErrorCode  MatScale(Mat,PetscScalar);
642 extern PetscErrorCode  MatShift(Mat,PetscScalar);
643 
644 extern PetscErrorCode  MatSetLocalToGlobalMapping(Mat,ISLocalToGlobalMapping,ISLocalToGlobalMapping);
645 extern PetscErrorCode  MatSetLocalToGlobalMappingBlock(Mat,ISLocalToGlobalMapping,ISLocalToGlobalMapping);
646 extern PetscErrorCode  MatGetLocalToGlobalMapping(Mat,ISLocalToGlobalMapping*,ISLocalToGlobalMapping*);
647 extern PetscErrorCode  MatGetLocalToGlobalMappingBlock(Mat,ISLocalToGlobalMapping*,ISLocalToGlobalMapping*);
648 extern PetscErrorCode  MatZeroRowsLocal(Mat,PetscInt,const PetscInt [],PetscScalar,Vec,Vec);
649 extern PetscErrorCode  MatZeroRowsLocalIS(Mat,IS,PetscScalar,Vec,Vec);
650 extern PetscErrorCode  MatZeroRowsColumnsLocal(Mat,PetscInt,const PetscInt [],PetscScalar,Vec,Vec);
651 extern PetscErrorCode  MatZeroRowsColumnsLocalIS(Mat,IS,PetscScalar,Vec,Vec);
652 extern PetscErrorCode  MatSetValuesLocal(Mat,PetscInt,const PetscInt[],PetscInt,const PetscInt[],const PetscScalar[],InsertMode);
653 extern PetscErrorCode  MatSetValuesBlockedLocal(Mat,PetscInt,const PetscInt[],PetscInt,const PetscInt[],const PetscScalar[],InsertMode);
654 
655 extern PetscErrorCode  MatStashSetInitialSize(Mat,PetscInt,PetscInt);
656 extern PetscErrorCode  MatStashGetInfo(Mat,PetscInt*,PetscInt*,PetscInt*,PetscInt*);
657 
658 extern PetscErrorCode  MatInterpolate(Mat,Vec,Vec);
659 extern PetscErrorCode  MatInterpolateAdd(Mat,Vec,Vec,Vec);
660 PetscPolymorphicSubroutine(MatInterpolateAdd,(Mat A,Vec x,Vec y),(A,x,y,y))
661 extern PetscErrorCode  MatRestrict(Mat,Vec,Vec);
662 extern PetscErrorCode  MatGetVecs(Mat,Vec*,Vec*);
663 extern PetscErrorCode  MatGetRedundantMatrix(Mat,PetscInt,MPI_Comm,PetscInt,MatReuse,Mat*);
664 extern PetscErrorCode  MatGetMultiProcBlock(Mat,MPI_Comm,Mat*);
665 extern PetscErrorCode  MatFindZeroDiagonals(Mat,IS*);
666 
667 /*MC
668    MatSetValue - Set a single entry into a matrix.
669 
670    Not collective
671 
672    Input Parameters:
673 +  m - the matrix
674 .  row - the row location of the entry
675 .  col - the column location of the entry
676 .  value - the value to insert
677 -  mode - either INSERT_VALUES or ADD_VALUES
678 
679    Notes:
680    For efficiency one should use MatSetValues() and set several or many
681    values simultaneously if possible.
682 
683    Level: beginner
684 
685 .seealso: MatSetValues(), MatSetValueLocal()
686 M*/
687 PETSC_STATIC_INLINE PetscErrorCode MatSetValue(Mat v,PetscInt i,PetscInt j,PetscScalar va,InsertMode mode) {return MatSetValues(v,1,&i,1,&j,&va,mode);}
688 
689 PETSC_STATIC_INLINE PetscErrorCode MatGetValue(Mat v,PetscInt i,PetscInt j,PetscScalar *va) {return MatGetValues(v,1,&i,1,&j,va);}
690 
691 PETSC_STATIC_INLINE PetscErrorCode MatSetValueLocal(Mat v,PetscInt i,PetscInt j,PetscScalar va,InsertMode mode) {return MatSetValuesLocal(v,1,&i,1,&j,&va,mode);}
692 
693 /*MC
694    MatPreallocateInitialize - Begins the block of code that will count the number of nonzeros per
695        row in a matrix providing the data that one can use to correctly preallocate the matrix.
696 
697    Synopsis:
698    PetscErrorCode MatPreallocateInitialize(MPI_Comm comm, PetscInt nrows, PetscInt ncols, PetscInt *dnz, PetscInt *onz)
699 
700    Collective on MPI_Comm
701 
702    Input Parameters:
703 +  comm - the communicator that will share the eventually allocated matrix
704 .  nrows - the number of LOCAL rows in the matrix
705 -  ncols - the number of LOCAL columns in the matrix
706 
707    Output Parameters:
708 +  dnz - the array that will be passed to the matrix preallocation routines
709 -  ozn - the other array passed to the matrix preallocation routines
710 
711 
712    Level: intermediate
713 
714    Notes:
715     See the <A href="../../docs/manual.pdf#nameddest=ch_performance">Hints for Performance Improvment</A> chapter in the users manual for more details.
716 
717    Do not malloc or free dnz and onz, that is handled internally by these routines
718 
719    Use MatPreallocateInitializeSymmetric() for symmetric matrices (MPISBAIJ matrices)
720 
721    This is a MACRO not a function because it has a leading { that is closed by PetscPreallocateFinalize().
722 
723   Concepts: preallocation^Matrix
724 
725 .seealso: MatPreallocateFinalize(), MatPreallocateSet(), MatPreallocateSymmetricSet(), MatPreallocateSetLocal(),
726           MatPreallocateInitializeSymmetric(), MatPreallocateSymmetricSetLocal()
727 M*/
728 #define MatPreallocateInitialize(comm,nrows,ncols,dnz,onz) 0; \
729 { \
730   PetscErrorCode _4_ierr; PetscInt __nrows = (nrows),__ctmp = (ncols),__rstart,__start,__end; \
731   _4_ierr = PetscMalloc2(__nrows,PetscInt,&dnz,__nrows,PetscInt,&onz);CHKERRQ(_4_ierr); \
732   _4_ierr = PetscMemzero(dnz,__nrows*sizeof(PetscInt));CHKERRQ(_4_ierr);\
733   _4_ierr = PetscMemzero(onz,__nrows*sizeof(PetscInt));CHKERRQ(_4_ierr);\
734   _4_ierr = MPI_Scan(&__ctmp,&__end,1,MPIU_INT,MPI_SUM,comm);CHKERRQ(_4_ierr); __start = __end - __ctmp;\
735   _4_ierr = MPI_Scan(&__nrows,&__rstart,1,MPIU_INT,MPI_SUM,comm);CHKERRQ(_4_ierr); __rstart = __rstart - __nrows;
736 
737 /*MC
738    MatPreallocateSymmetricInitialize - Begins the block of code that will count the number of nonzeros per
739        row in a matrix providing the data that one can use to correctly preallocate the matrix.
740 
741    Synopsis:
742    PetscErrorCode MatPreallocateSymmetricInitialize(MPI_Comm comm, PetscInt nrows, PetscInt ncols, PetscInt *dnz, PetscInt *onz)
743 
744    Collective on MPI_Comm
745 
746    Input Parameters:
747 +  comm - the communicator that will share the eventually allocated matrix
748 .  nrows - the number of LOCAL rows in the matrix
749 -  ncols - the number of LOCAL columns in the matrix
750 
751    Output Parameters:
752 +  dnz - the array that will be passed to the matrix preallocation routines
753 -  ozn - the other array passed to the matrix preallocation routines
754 
755 
756    Level: intermediate
757 
758    Notes:
759     See the <A href="../../docs/manual.pdf#nameddest=ch_performance">Hints for Performance Improvment</A> chapter in the users manual for more details.
760 
761    Do not malloc or free dnz and onz, that is handled internally by these routines
762 
763    This is a MACRO not a function because it has a leading { that is closed by PetscPreallocateFinalize().
764 
765   Concepts: preallocation^Matrix
766 
767 .seealso: MatPreallocateFinalize(), MatPreallocateSet(), MatPreallocateSymmetricSet(), MatPreallocateSetLocal(),
768           MatPreallocateInitialize(), MatPreallocateSymmetricSetLocal()
769 M*/
770 #define MatPreallocateSymmetricInitialize(comm,nrows,ncols,dnz,onz) 0; \
771 { \
772   PetscErrorCode _4_ierr; PetscInt __nrows = (nrows),__ctmp = (ncols),__rstart,__end; \
773   _4_ierr = PetscMalloc2(__nrows,PetscInt,&dnz,__nrows,PetscInt,&onz);CHKERRQ(_4_ierr); \
774   _4_ierr = PetscMemzero(dnz,__nrows*sizeof(PetscInt));CHKERRQ(_4_ierr);\
775   _4_ierr = PetscMemzero(onz,__nrows*sizeof(PetscInt));CHKERRQ(_4_ierr);\
776   _4_ierr = MPI_Scan(&__ctmp,&__end,1,MPIU_INT,MPI_SUM,comm);CHKERRQ(_4_ierr);\
777   _4_ierr = MPI_Scan(&__nrows,&__rstart,1,MPIU_INT,MPI_SUM,comm);CHKERRQ(_4_ierr); __rstart = __rstart - __nrows;
778 
779 /*MC
780    MatPreallocateSetLocal - Indicates the locations (rows and columns) in the matrix where nonzeros will be
781        inserted using a local number of the rows and columns
782 
783    Synopsis:
784    PetscErrorCode MatPreallocateSetLocal(ISLocalToGlobalMappping map,PetscInt nrows, PetscInt *rows,PetscInt ncols, PetscInt *cols,PetscInt *dnz, PetscInt *onz)
785 
786    Not Collective
787 
788    Input Parameters:
789 +  map - the row mapping from local numbering to global numbering
790 .  nrows - the number of rows indicated
791 .  rows - the indices of the rows
792 .  cmap - the column mapping from local to global numbering
793 .  ncols - the number of columns in the matrix
794 .  cols - the columns indicated
795 .  dnz - the array that will be passed to the matrix preallocation routines
796 -  ozn - the other array passed to the matrix preallocation routines
797 
798 
799    Level: intermediate
800 
801    Notes:
802     See the <A href="../../docs/manual.pdf#nameddest=ch_performance">Hints for Performance Improvment</A> chapter in the users manual for more details.
803 
804    Do not malloc or free dnz and onz, that is handled internally by these routines
805 
806   Concepts: preallocation^Matrix
807 
808 .seealso: MatPreallocateFinalize(), MatPreallocateSet(), MatPreallocateSymmetricSet(), MatPreallocateInitialize(),
809           MatPreallocateInitialize(), MatPreallocateSymmetricSetLocal()
810 M*/
811 #define MatPreallocateSetLocal(rmap,nrows,rows,cmap,ncols,cols,dnz,onz) 0; \
812 {\
813   PetscInt __l;\
814   _4_ierr = ISLocalToGlobalMappingApply(rmap,nrows,rows,rows);CHKERRQ(_4_ierr);\
815   _4_ierr = ISLocalToGlobalMappingApply(cmap,ncols,cols,cols);CHKERRQ(_4_ierr);\
816   for (__l=0;__l<nrows;__l++) {\
817     _4_ierr = MatPreallocateSet((rows)[__l],ncols,cols,dnz,onz);CHKERRQ(_4_ierr);\
818   }\
819 }
820 
821 /*MC
822    MatPreallocateSymmetricSetLocal - Indicates the locations (rows and columns) in the matrix where nonzeros will be
823        inserted using a local number of the rows and columns
824 
825    Synopsis:
826    PetscErrorCode MatPreallocateSymmetricSetLocal(ISLocalToGlobalMappping map,PetscInt nrows, PetscInt *rows,PetscInt ncols, PetscInt *cols,PetscInt *dnz, PetscInt *onz)
827 
828    Not Collective
829 
830    Input Parameters:
831 +  map - the mapping between local numbering and global numbering
832 .  nrows - the number of rows indicated
833 .  rows - the indices of the rows
834 .  ncols - the number of columns in the matrix
835 .  cols - the columns indicated
836 .  dnz - the array that will be passed to the matrix preallocation routines
837 -  ozn - the other array passed to the matrix preallocation routines
838 
839 
840    Level: intermediate
841 
842    Notes:
843     See the <A href="../../docs/manual.pdf#nameddest=ch_performance">Hints for Performance Improvment</A> chapter in the users manual for more details.
844 
845    Do not malloc or free dnz and onz that is handled internally by these routines
846 
847   Concepts: preallocation^Matrix
848 
849 .seealso: MatPreallocateFinalize(), MatPreallocateSet(), MatPreallocateSymmetricSet(), MatPreallocateInitialize(),
850           MatPreallocateInitialize(), MatPreallocateSymmetricSetLocal(), MatPreallocateSetLocal()
851 M*/
852 #define MatPreallocateSymmetricSetLocal(map,nrows,rows,ncols,cols,dnz,onz) 0;\
853 {\
854   PetscInt __l;\
855   _4_ierr = ISLocalToGlobalMappingApply(map,nrows,rows,rows);CHKERRQ(_4_ierr);\
856   _4_ierr = ISLocalToGlobalMappingApply(map,ncols,cols,cols);CHKERRQ(_4_ierr);\
857   for (__l=0;__l<nrows;__l++) {\
858     _4_ierr = MatPreallocateSymmetricSet((rows)[__l],ncols,cols,dnz,onz);CHKERRQ(_4_ierr);\
859   }\
860 }
861 
862 /*MC
863    MatPreallocateSet - Indicates the locations (rows and columns) in the matrix where nonzeros will be
864        inserted using a local number of the rows and columns
865 
866    Synopsis:
867    PetscErrorCode MatPreallocateSet(PetscInt nrows, PetscInt *rows,PetscInt ncols, PetscInt *cols,PetscInt *dnz, PetscInt *onz)
868 
869    Not Collective
870 
871    Input Parameters:
872 +  row - the row
873 .  ncols - the number of columns in the matrix
874 -  cols - the columns indicated
875 
876    Output Parameters:
877 +  dnz - the array that will be passed to the matrix preallocation routines
878 -  ozn - the other array passed to the matrix preallocation routines
879 
880 
881    Level: intermediate
882 
883    Notes:
884     See the <A href="../../docs/manual.pdf#nameddest=ch_performance">Hints for Performance Improvment</A> chapter in the users manual for more details.
885 
886    Do not malloc or free dnz and onz that is handled internally by these routines
887 
888    This is a MACRO not a function because it uses variables declared in MatPreallocateInitialize().
889 
890   Concepts: preallocation^Matrix
891 
892 .seealso: MatPreallocateFinalize(), MatPreallocateSet(), MatPreallocateSymmetricSet(), MatPreallocateInitialize(),
893           MatPreallocateInitialize(), MatPreallocateSymmetricSetLocal(), MatPreallocateSetLocal()
894 M*/
895 #define MatPreallocateSet(row,nc,cols,dnz,onz) 0;\
896 { PetscInt __i; \
897   if (row < __rstart) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Trying to set preallocation for row %D less than first local row %D",row,__rstart);\
898   if (row >= __rstart+__nrows) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Trying to set preallocation for row %D greater than last local row %D",row,__rstart+__nrows-1);\
899   for (__i=0; __i<nc; __i++) {\
900     if ((cols)[__i] < __start || (cols)[__i] >= __end) onz[row - __rstart]++; \
901     else dnz[row - __rstart]++;\
902   }\
903 }
904 
905 /*MC
906    MatPreallocateSymmetricSet - Indicates the locations (rows and columns) in the matrix where nonzeros will be
907        inserted using a local number of the rows and columns
908 
909    Synopsis:
910    PetscErrorCode MatPreallocateSymmetricSet(PetscInt nrows, PetscInt *rows,PetscInt ncols, PetscInt *cols,PetscInt *dnz, PetscInt *onz)
911 
912    Not Collective
913 
914    Input Parameters:
915 +  nrows - the number of rows indicated
916 .  rows - the indices of the rows
917 .  ncols - the number of columns in the matrix
918 .  cols - the columns indicated
919 .  dnz - the array that will be passed to the matrix preallocation routines
920 -  ozn - the other array passed to the matrix preallocation routines
921 
922 
923    Level: intermediate
924 
925    Notes:
926     See the <A href="../../docs/manual.pdf#nameddest=ch_performance">Hints for Performance Improvment</A> chapter in the users manual for more details.
927 
928    Do not malloc or free dnz and onz that is handled internally by these routines
929 
930    This is a MACRO not a function because it uses variables declared in MatPreallocateInitialize().
931 
932   Concepts: preallocation^Matrix
933 
934 .seealso: MatPreallocateFinalize(), MatPreallocateSet(), MatPreallocateSymmetricSet(), MatPreallocateInitialize(),
935           MatPreallocateInitialize(), MatPreallocateSymmetricSetLocal(), MatPreallocateSetLocal()
936 M*/
937 #define MatPreallocateSymmetricSet(row,nc,cols,dnz,onz) 0;\
938 { PetscInt __i; \
939   for (__i=0; __i<nc; __i++) {\
940     if (cols[__i] >= __end) onz[row - __rstart]++; \
941     else if (cols[__i] >= row) dnz[row - __rstart]++;\
942   }\
943 }
944 
945 /*MC
946    MatPreallocateLocation -  An alternative to MatPreallocationSet() that puts the nonzero locations into the matrix if it exists
947 
948    Synopsis:
949    PetscErrorCode MatPreallocateLocations(Mat A,PetscInt row,PetscInt ncols,PetscInt *cols,PetscInt *dnz,PetscInt *onz)
950 
951    Not Collective
952 
953    Input Parameters:
954 .  A - matrix
955 .  row - row where values exist (must be local to this process)
956 .  ncols - number of columns
957 .  cols - columns with nonzeros
958 .  dnz - the array that will be passed to the matrix preallocation routines
959 -  ozn - the other array passed to the matrix preallocation routines
960 
961 
962    Level: intermediate
963 
964    Notes:
965     See the <A href="../../docs/manual.pdf#nameddest=ch_performance">Hints for Performance Improvment</A> chapter in the users manual for more details.
966 
967    Do not malloc or free dnz and onz that is handled internally by these routines
968 
969    This is a MACRO not a function because it uses a bunch of variables private to the MatPreallocation.... routines.
970 
971   Concepts: preallocation^Matrix
972 
973 .seealso: MatPreallocateInitialize(), MatPreallocateSet(), MatPreallocateSymmetricSet(), MatPreallocateSetLocal(),
974           MatPreallocateSymmetricInitialize(), MatPreallocateSymmetricSetLocal()
975 M*/
976 #define MatPreallocateLocation(A,row,ncols,cols,dnz,onz) 0;if (A) {ierr = MatSetValues(A,1,&row,ncols,cols,PETSC_NULL,INSERT_VALUES);CHKERRQ(ierr);} else {ierr =  MatPreallocateSet(row,ncols,cols,dnz,onz);CHKERRQ(ierr);}
977 
978 
979 /*MC
980    MatPreallocateFinalize - Ends the block of code that will count the number of nonzeros per
981        row in a matrix providing the data that one can use to correctly preallocate the matrix.
982 
983    Synopsis:
984    PetscErrorCode MatPreallocateFinalize(PetscInt *dnz, PetscInt *onz)
985 
986    Collective on MPI_Comm
987 
988    Input Parameters:
989 +  dnz - the array that was be passed to the matrix preallocation routines
990 -  ozn - the other array passed to the matrix preallocation routines
991 
992 
993    Level: intermediate
994 
995    Notes:
996     See the <A href="../../docs/manual.pdf#nameddest=ch_performance">Hints for Performance Improvment</A> chapter in the users manual for more details.
997 
998    Do not malloc or free dnz and onz that is handled internally by these routines
999 
1000    This is a MACRO not a function because it closes the { started in MatPreallocateInitialize().
1001 
1002   Concepts: preallocation^Matrix
1003 
1004 .seealso: MatPreallocateInitialize(), MatPreallocateSet(), MatPreallocateSymmetricSet(), MatPreallocateSetLocal(),
1005           MatPreallocateSymmetricInitialize(), MatPreallocateSymmetricSetLocal()
1006 M*/
1007 #define MatPreallocateFinalize(dnz,onz) 0;_4_ierr = PetscFree2(dnz,onz);CHKERRQ(_4_ierr);}
1008 
1009 
1010 
1011 /* Routines unique to particular data structures */
1012 extern PetscErrorCode  MatShellGetContext(Mat,void *);
1013 PetscPolymorphicFunction(MatShellGetContext,(Mat A),(A,&t),void*,t)
1014 
1015 extern PetscErrorCode  MatInodeAdjustForInodes(Mat,IS*,IS*);
1016 extern PetscErrorCode  MatInodeGetInodeSizes(Mat,PetscInt *,PetscInt *[],PetscInt *);
1017 
1018 extern PetscErrorCode  MatSeqAIJSetColumnIndices(Mat,PetscInt[]);
1019 extern PetscErrorCode  MatSeqBAIJSetColumnIndices(Mat,PetscInt[]);
1020 extern PetscErrorCode  MatCreateSeqAIJWithArrays(MPI_Comm,PetscInt,PetscInt,PetscInt[],PetscInt[],PetscScalar[],Mat*);
1021 extern PetscErrorCode  MatCreateSeqBAIJWithArrays(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt[],PetscInt[],PetscScalar[],Mat*);
1022 extern PetscErrorCode  MatCreateSeqSBAIJWithArrays(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt[],PetscInt[],PetscScalar[],Mat*);
1023 extern PetscErrorCode MatCreateSeqAIJFromTriple(MPI_Comm,PetscInt,PetscInt,PetscInt[],PetscInt[],PetscScalar[],Mat*,PetscInt,PetscBool);
1024 
1025 #define MAT_SKIP_ALLOCATION -4
1026 
1027 extern PetscErrorCode  MatSeqBAIJSetPreallocation(Mat,PetscInt,PetscInt,const PetscInt[]);
1028 PetscPolymorphicSubroutine(MatSeqBAIJSetPreallocation,(Mat A,PetscInt bs,const PetscInt nnz[]),(A,bs,0,nnz))
1029 extern PetscErrorCode  MatSeqSBAIJSetPreallocation(Mat,PetscInt,PetscInt,const PetscInt[]);
1030 PetscPolymorphicSubroutine(MatSeqSBAIJSetPreallocation,(Mat A,PetscInt bs,const PetscInt nnz[]),(A,bs,0,nnz))
1031 extern PetscErrorCode  MatSeqAIJSetPreallocation(Mat,PetscInt,const PetscInt[]);
1032 PetscPolymorphicSubroutine(MatSeqAIJSetPreallocation,(Mat A,const PetscInt nnz[]),(A,0,nnz))
1033 
1034 extern PetscErrorCode  MatMPIBAIJSetPreallocation(Mat,PetscInt,PetscInt,const PetscInt[],PetscInt,const PetscInt[]);
1035 PetscPolymorphicSubroutine(MatMPIBAIJSetPreallocation,(Mat A,PetscInt bs,const PetscInt nnz[],const PetscInt onz[]),(A,bs,0,nnz,0,onz))
1036 extern PetscErrorCode  MatMPISBAIJSetPreallocation(Mat,PetscInt,PetscInt,const PetscInt[],PetscInt,const PetscInt[]);
1037 extern PetscErrorCode  MatMPIAIJSetPreallocation(Mat,PetscInt,const PetscInt[],PetscInt,const PetscInt[]);
1038 extern PetscErrorCode  MatSeqAIJSetPreallocationCSR(Mat,const PetscInt [],const PetscInt [],const PetscScalar []);
1039 extern PetscErrorCode  MatSeqBAIJSetPreallocationCSR(Mat,PetscInt,const PetscInt[],const PetscInt[],const PetscScalar[]);
1040 extern PetscErrorCode  MatMPIAIJSetPreallocationCSR(Mat,const PetscInt[],const PetscInt[],const PetscScalar[]);
1041 extern PetscErrorCode  MatMPIBAIJSetPreallocationCSR(Mat,PetscInt,const PetscInt[],const PetscInt[],const PetscScalar[]);
1042 extern PetscErrorCode  MatMPIAdjSetPreallocation(Mat,PetscInt[],PetscInt[],PetscInt[]);
1043 extern PetscErrorCode  MatMPIDenseSetPreallocation(Mat,PetscScalar[]);
1044 extern PetscErrorCode  MatSeqDenseSetPreallocation(Mat,PetscScalar[]);
1045 extern PetscErrorCode  MatMPIAIJGetSeqAIJ(Mat,Mat*,Mat*,PetscInt*[]);
1046 extern PetscErrorCode  MatMPIBAIJGetSeqBAIJ(Mat,Mat*,Mat*,PetscInt*[]);
1047 extern PetscErrorCode  MatAdicSetLocalFunction(Mat,void (*)(void));
1048 
1049 extern PetscErrorCode  MatSeqDenseSetLDA(Mat,PetscInt);
1050 extern PetscErrorCode  MatDenseGetLocalMatrix(Mat,Mat*);
1051 
1052 extern PetscErrorCode  MatStoreValues(Mat);
1053 extern PetscErrorCode  MatRetrieveValues(Mat);
1054 
1055 extern PetscErrorCode  MatDAADSetCtx(Mat,void*);
1056 
1057 extern PetscErrorCode  MatFindNonzeroRows(Mat,IS*);
1058 /*
1059   These routines are not usually accessed directly, rather solving is
1060   done through the KSP and PC interfaces.
1061 */
1062 
1063 /*J
1064     MatOrderingType - String with the name of a PETSc matrix ordering or the creation function
1065        with an optional dynamic library name, for example
1066        http://www.mcs.anl.gov/petsc/lib.a:orderingcreate()
1067 
1068    Level: beginner
1069 
1070    Cannot use const because the PC objects manipulate the string
1071 
1072 .seealso: MatGetOrdering()
1073 J*/
1074 #define MatOrderingType char*
1075 #define MATORDERINGNATURAL     "natural"
1076 #define MATORDERINGND          "nd"
1077 #define MATORDERING1WD         "1wd"
1078 #define MATORDERINGRCM         "rcm"
1079 #define MATORDERINGQMD         "qmd"
1080 #define MATORDERINGROWLENGTH   "rowlength"
1081 #define MATORDERINGAMD         "amd"            /* only works if UMFPACK is installed with PETSc */
1082 
1083 extern PetscErrorCode  MatGetOrdering(Mat,const MatOrderingType,IS*,IS*);
1084 extern PetscErrorCode  MatGetOrderingList(PetscFList *list);
1085 extern PetscErrorCode  MatOrderingRegister(const char[],const char[],const char[],PetscErrorCode(*)(Mat,const MatOrderingType,IS*,IS*));
1086 
1087 /*MC
1088    MatOrderingRegisterDynamic - Adds a new sparse matrix ordering to the matrix package.
1089 
1090    Synopsis:
1091    PetscErrorCode MatOrderingRegisterDynamic(const char *name_ordering,const char *path,const char *name_create,PetscErrorCode (*routine_create)(MatOrdering))
1092 
1093    Not Collective
1094 
1095    Input Parameters:
1096 +  sname - name of ordering (for example MATORDERINGND)
1097 .  path - location of library where creation routine is
1098 .  name - name of function that creates the ordering type,a string
1099 -  function - function pointer that creates the ordering
1100 
1101    Level: developer
1102 
1103    If dynamic libraries are used, then the fourth input argument (function)
1104    is ignored.
1105 
1106    Sample usage:
1107 .vb
1108    MatOrderingRegisterDynamic("my_order",/home/username/my_lib/lib/libO/solaris/mylib.a,
1109                "MyOrder",MyOrder);
1110 .ve
1111 
1112    Then, your partitioner can be chosen with the procedural interface via
1113 $     MatOrderingSetType(part,"my_order)
1114    or at runtime via the option
1115 $     -pc_factor_mat_ordering_type my_order
1116 
1117    ${PETSC_ARCH} occuring in pathname will be replaced with appropriate values.
1118 
1119 .keywords: matrix, ordering, register
1120 
1121 .seealso: MatOrderingRegisterDestroy(), MatOrderingRegisterAll()
1122 M*/
1123 #if defined(PETSC_USE_DYNAMIC_LIBRARIES)
1124 #define MatOrderingRegisterDynamic(a,b,c,d) MatOrderingRegister(a,b,c,0)
1125 #else
1126 #define MatOrderingRegisterDynamic(a,b,c,d) MatOrderingRegister(a,b,c,d)
1127 #endif
1128 
1129 extern PetscErrorCode  MatOrderingRegisterDestroy(void);
1130 extern PetscErrorCode  MatOrderingRegisterAll(const char[]);
1131 extern PetscBool  MatOrderingRegisterAllCalled;
1132 extern PetscFList MatOrderingList;
1133 
1134 extern PetscErrorCode  MatReorderForNonzeroDiagonal(Mat,PetscReal,IS,IS);
1135 
1136 /*S
1137     MatFactorShiftType - Numeric Shift.
1138 
1139    Level: beginner
1140 
1141 S*/
1142 typedef enum {MAT_SHIFT_NONE,MAT_SHIFT_NONZERO,MAT_SHIFT_POSITIVE_DEFINITE,MAT_SHIFT_INBLOCKS} MatFactorShiftType;
1143 extern const char *MatFactorShiftTypes[];
1144 
1145 /*S
1146    MatFactorInfo - Data passed into the matrix factorization routines
1147 
1148    In Fortran these are simply double precision arrays of size MAT_FACTORINFO_SIZE, that is use
1149 $     MatFactorInfo  info(MAT_FACTORINFO_SIZE)
1150 
1151    Notes: These are not usually directly used by users, instead use PC type of LU, ILU, CHOLESKY or ICC.
1152 
1153       You can use MatFactorInfoInitialize() to set default values.
1154 
1155    Level: developer
1156 
1157 .seealso: MatLUFactorSymbolic(), MatILUFactorSymbolic(), MatCholeskyFactorSymbolic(), MatICCFactorSymbolic(), MatICCFactor(),
1158           MatFactorInfoInitialize()
1159 
1160 S*/
1161 typedef struct {
1162   PetscReal     diagonal_fill;  /* force diagonal to fill in if initially not filled */
1163   PetscReal     usedt;
1164   PetscReal     dt;             /* drop tolerance */
1165   PetscReal     dtcol;          /* tolerance for pivoting */
1166   PetscReal     dtcount;        /* maximum nonzeros to be allowed per row */
1167   PetscReal     fill;           /* expected fill, nonzeros in factored matrix/nonzeros in original matrix */
1168   PetscReal     levels;         /* ICC/ILU(levels) */
1169   PetscReal     pivotinblocks;  /* for BAIJ and SBAIJ matrices pivot in factorization on blocks, default 1.0
1170                                    factorization may be faster if do not pivot */
1171   PetscReal     zeropivot;      /* pivot is called zero if less than this */
1172   PetscReal     shifttype;      /* type of shift added to matrix factor to prevent zero pivots */
1173   PetscReal     shiftamount;     /* how large the shift is */
1174 } MatFactorInfo;
1175 
1176 extern PetscErrorCode  MatFactorInfoInitialize(MatFactorInfo*);
1177 extern PetscErrorCode  MatCholeskyFactor(Mat,IS,const MatFactorInfo*);
1178 extern PetscErrorCode  MatCholeskyFactorSymbolic(Mat,Mat,IS,const MatFactorInfo*);
1179 extern PetscErrorCode  MatCholeskyFactorNumeric(Mat,Mat,const MatFactorInfo*);
1180 extern PetscErrorCode  MatLUFactor(Mat,IS,IS,const MatFactorInfo*);
1181 extern PetscErrorCode  MatILUFactor(Mat,IS,IS,const MatFactorInfo*);
1182 extern PetscErrorCode  MatLUFactorSymbolic(Mat,Mat,IS,IS,const MatFactorInfo*);
1183 extern PetscErrorCode  MatILUFactorSymbolic(Mat,Mat,IS,IS,const MatFactorInfo*);
1184 extern PetscErrorCode  MatICCFactorSymbolic(Mat,Mat,IS,const MatFactorInfo*);
1185 extern PetscErrorCode  MatICCFactor(Mat,IS,const MatFactorInfo*);
1186 extern PetscErrorCode  MatLUFactorNumeric(Mat,Mat,const MatFactorInfo*);
1187 extern PetscErrorCode  MatGetInertia(Mat,PetscInt*,PetscInt*,PetscInt*);
1188 extern PetscErrorCode  MatSolve(Mat,Vec,Vec);
1189 extern PetscErrorCode  MatForwardSolve(Mat,Vec,Vec);
1190 extern PetscErrorCode  MatBackwardSolve(Mat,Vec,Vec);
1191 extern PetscErrorCode  MatSolveAdd(Mat,Vec,Vec,Vec);
1192 extern PetscErrorCode  MatSolveTranspose(Mat,Vec,Vec);
1193 extern PetscErrorCode  MatSolveTransposeAdd(Mat,Vec,Vec,Vec);
1194 extern PetscErrorCode  MatSolves(Mat,Vecs,Vecs);
1195 
1196 extern PetscErrorCode  MatSetUnfactored(Mat);
1197 
1198 /*E
1199     MatSORType - What type of (S)SOR to perform
1200 
1201     Level: beginner
1202 
1203    May be bitwise ORd together
1204 
1205    Any additions/changes here MUST also be made in include/finclude/petscmat.h
1206 
1207    MatSORType may be bitwise ORd together, so do not change the numbers
1208 
1209 .seealso: MatSOR()
1210 E*/
1211 typedef enum {SOR_FORWARD_SWEEP=1,SOR_BACKWARD_SWEEP=2,SOR_SYMMETRIC_SWEEP=3,
1212               SOR_LOCAL_FORWARD_SWEEP=4,SOR_LOCAL_BACKWARD_SWEEP=8,
1213               SOR_LOCAL_SYMMETRIC_SWEEP=12,SOR_ZERO_INITIAL_GUESS=16,
1214               SOR_EISENSTAT=32,SOR_APPLY_UPPER=64,SOR_APPLY_LOWER=128} MatSORType;
1215 extern PetscErrorCode  MatSOR(Mat,Vec,PetscReal,MatSORType,PetscReal,PetscInt,PetscInt,Vec);
1216 
1217 /*
1218     These routines are for efficiently computing Jacobians via finite differences.
1219 */
1220 
1221 /*J
1222     MatColoringType - String with the name of a PETSc matrix coloring or the creation function
1223        with an optional dynamic library name, for example
1224        http://www.mcs.anl.gov/petsc/lib.a:coloringcreate()
1225 
1226    Level: beginner
1227 
1228 .seealso: MatGetColoring()
1229 J*/
1230 #define MatColoringType char*
1231 #define MATCOLORINGNATURAL "natural"
1232 #define MATCOLORINGSL      "sl"
1233 #define MATCOLORINGLF      "lf"
1234 #define MATCOLORINGID      "id"
1235 
1236 extern PetscErrorCode  MatGetColoring(Mat,const MatColoringType,ISColoring*);
1237 extern PetscErrorCode  MatColoringRegister(const char[],const char[],const char[],PetscErrorCode(*)(Mat,MatColoringType,ISColoring *));
1238 
1239 /*MC
1240    MatColoringRegisterDynamic - Adds a new sparse matrix coloring to the
1241                                matrix package.
1242 
1243    Synopsis:
1244    PetscErrorCode MatColoringRegisterDynamic(const char *name_coloring,const char *path,const char *name_create,PetscErrorCode (*routine_create)(MatColoring))
1245 
1246    Not Collective
1247 
1248    Input Parameters:
1249 +  sname - name of Coloring (for example MATCOLORINGSL)
1250 .  path - location of library where creation routine is
1251 .  name - name of function that creates the Coloring type, a string
1252 -  function - function pointer that creates the coloring
1253 
1254    Level: developer
1255 
1256    If dynamic libraries are used, then the fourth input argument (function)
1257    is ignored.
1258 
1259    Sample usage:
1260 .vb
1261    MatColoringRegisterDynamic("my_color",/home/username/my_lib/lib/libO/solaris/mylib.a,
1262                "MyColor",MyColor);
1263 .ve
1264 
1265    Then, your partitioner can be chosen with the procedural interface via
1266 $     MatColoringSetType(part,"my_color")
1267    or at runtime via the option
1268 $     -mat_coloring_type my_color
1269 
1270    $PETSC_ARCH occuring in pathname will be replaced with appropriate values.
1271 
1272 .keywords: matrix, Coloring, register
1273 
1274 .seealso: MatColoringRegisterDestroy(), MatColoringRegisterAll()
1275 M*/
1276 #if defined(PETSC_USE_DYNAMIC_LIBRARIES)
1277 #define MatColoringRegisterDynamic(a,b,c,d) MatColoringRegister(a,b,c,0)
1278 #else
1279 #define MatColoringRegisterDynamic(a,b,c,d) MatColoringRegister(a,b,c,d)
1280 #endif
1281 
1282 extern PetscBool  MatColoringRegisterAllCalled;
1283 
1284 extern PetscErrorCode  MatColoringRegisterAll(const char[]);
1285 extern PetscErrorCode  MatColoringRegisterDestroy(void);
1286 extern PetscErrorCode  MatColoringPatch(Mat,PetscInt,PetscInt,ISColoringValue[],ISColoring*);
1287 
1288 /*S
1289      MatFDColoring - Object for computing a sparse Jacobian via finite differences
1290         and coloring
1291 
1292    Level: beginner
1293 
1294   Concepts: coloring, sparse Jacobian, finite differences
1295 
1296 .seealso:  MatFDColoringCreate()
1297 S*/
1298 typedef struct _p_MatFDColoring* MatFDColoring;
1299 
1300 extern PetscErrorCode  MatFDColoringCreate(Mat,ISColoring,MatFDColoring *);
1301 extern PetscErrorCode  MatFDColoringDestroy(MatFDColoring*);
1302 extern PetscErrorCode  MatFDColoringView(MatFDColoring,PetscViewer);
1303 extern PetscErrorCode  MatFDColoringSetFunction(MatFDColoring,PetscErrorCode (*)(void),void*);
1304 extern PetscErrorCode  MatFDColoringGetFunction(MatFDColoring,PetscErrorCode (**)(void),void**);
1305 extern PetscErrorCode  MatFDColoringSetParameters(MatFDColoring,PetscReal,PetscReal);
1306 extern PetscErrorCode  MatFDColoringSetFromOptions(MatFDColoring);
1307 extern PetscErrorCode  MatFDColoringApply(Mat,MatFDColoring,Vec,MatStructure*,void *);
1308 extern PetscErrorCode  MatFDColoringApplyTS(Mat,MatFDColoring,PetscReal,Vec,MatStructure*,void *);
1309 extern PetscErrorCode  MatFDColoringSetF(MatFDColoring,Vec);
1310 extern PetscErrorCode  MatFDColoringGetPerturbedColumns(MatFDColoring,PetscInt*,PetscInt*[]);
1311 
1312 /*S
1313      MatMultTransposeColoring - Object for computing a sparse matrix product C=A*B^T via coloring
1314 
1315    Level: beginner
1316 
1317   Concepts: coloring, sparse matrix product
1318 
1319 .seealso:  MatMultTransposeColoringCreate()
1320 S*/
1321 typedef struct _p_MatMultTransposeColoring* MatMultTransposeColoring;
1322 
1323 extern PetscErrorCode  MatMultTransposeColoringCreate(Mat,ISColoring,MatMultTransposeColoring *);
1324 extern PetscErrorCode  MatMultTransposeColoringApply(Mat,Mat,MatMultTransposeColoring);
1325 extern PetscErrorCode  MatMultTransposeColoringDestroy(MatMultTransposeColoring*);
1326 
1327 /*
1328     These routines are for partitioning matrices: currently used only
1329   for adjacency matrix, MatCreateMPIAdj().
1330 */
1331 
1332 /*S
1333      MatPartitioning - Object for managing the partitioning of a matrix or graph
1334 
1335    Level: beginner
1336 
1337   Concepts: partitioning
1338 
1339 .seealso:  MatPartitioningCreate(), MatPartitioningType
1340 S*/
1341 typedef struct _p_MatPartitioning* MatPartitioning;
1342 
1343 /*J
1344     MatPartitioningType - String with the name of a PETSc matrix partitioning or the creation function
1345        with an optional dynamic library name, for example
1346        http://www.mcs.anl.gov/petsc/lib.a:partitioningcreate()
1347 
1348    Level: beginner
1349 
1350 .seealso: MatPartitioningCreate(), MatPartitioning
1351 J*/
1352 #define MatPartitioningType char*
1353 #define MATPARTITIONINGCURRENT  "current"
1354 #define MATPARTITIONINGSQUARE   "square"
1355 #define MATPARTITIONINGPARMETIS "parmetis"
1356 #define MATPARTITIONINGCHACO    "chaco"
1357 #define MATPARTITIONINGPARTY    "party"
1358 #define MATPARTITIONINGPTSCOTCH "ptscotch"
1359 
1360 
1361 extern PetscErrorCode  MatPartitioningCreate(MPI_Comm,MatPartitioning*);
1362 extern PetscErrorCode  MatPartitioningSetType(MatPartitioning,const MatPartitioningType);
1363 extern PetscErrorCode  MatPartitioningSetNParts(MatPartitioning,PetscInt);
1364 extern PetscErrorCode  MatPartitioningSetAdjacency(MatPartitioning,Mat);
1365 extern PetscErrorCode  MatPartitioningSetVertexWeights(MatPartitioning,const PetscInt[]);
1366 extern PetscErrorCode  MatPartitioningSetPartitionWeights(MatPartitioning,const PetscReal []);
1367 extern PetscErrorCode  MatPartitioningApply(MatPartitioning,IS*);
1368 extern PetscErrorCode  MatPartitioningDestroy(MatPartitioning*);
1369 
1370 extern PetscErrorCode  MatPartitioningRegister(const char[],const char[],const char[],PetscErrorCode (*)(MatPartitioning));
1371 
1372 /*MC
1373    MatPartitioningRegisterDynamic - Adds a new sparse matrix partitioning to the
1374    matrix package.
1375 
1376    Synopsis:
1377    PetscErrorCode MatPartitioningRegisterDynamic(const char *name_partitioning,const char *path,const char *name_create,PetscErrorCode (*routine_create)(MatPartitioning))
1378 
1379    Not Collective
1380 
1381    Input Parameters:
1382 +  sname - name of partitioning (for example MATPARTITIONINGCURRENT) or parmetis
1383 .  path - location of library where creation routine is
1384 .  name - name of function that creates the partitioning type, a string
1385 -  function - function pointer that creates the partitioning type
1386 
1387    Level: developer
1388 
1389    If dynamic libraries are used, then the fourth input argument (function)
1390    is ignored.
1391 
1392    Sample usage:
1393 .vb
1394    MatPartitioningRegisterDynamic("my_part",/home/username/my_lib/lib/libO/solaris/mylib.a,
1395                "MyPartCreate",MyPartCreate);
1396 .ve
1397 
1398    Then, your partitioner can be chosen with the procedural interface via
1399 $     MatPartitioningSetType(part,"my_part")
1400    or at runtime via the option
1401 $     -mat_partitioning_type my_part
1402 
1403    $PETSC_ARCH occuring in pathname will be replaced with appropriate values.
1404 
1405 .keywords: matrix, partitioning, register
1406 
1407 .seealso: MatPartitioningRegisterDestroy(), MatPartitioningRegisterAll()
1408 M*/
1409 #if defined(PETSC_USE_DYNAMIC_LIBRARIES)
1410 #define MatPartitioningRegisterDynamic(a,b,c,d) MatPartitioningRegister(a,b,c,0)
1411 #else
1412 #define MatPartitioningRegisterDynamic(a,b,c,d) MatPartitioningRegister(a,b,c,d)
1413 #endif
1414 
1415 extern PetscBool  MatPartitioningRegisterAllCalled;
1416 
1417 extern PetscErrorCode  MatPartitioningRegisterAll(const char[]);
1418 extern PetscErrorCode  MatPartitioningRegisterDestroy(void);
1419 
1420 extern PetscErrorCode  MatPartitioningView(MatPartitioning,PetscViewer);
1421 extern PetscErrorCode  MatPartitioningSetFromOptions(MatPartitioning);
1422 extern PetscErrorCode  MatPartitioningGetType(MatPartitioning,const MatPartitioningType*);
1423 
1424 extern PetscErrorCode  MatPartitioningParmetisSetCoarseSequential(MatPartitioning);
1425 extern PetscErrorCode  MatPartitioningParmetisGetEdgeCut(MatPartitioning, PetscInt *);
1426 
1427 typedef enum { MP_CHACO_MULTILEVEL=1,MP_CHACO_SPECTRAL=2,MP_CHACO_LINEAR=4,MP_CHACO_RANDOM=5,MP_CHACO_SCATTERED=6 } MPChacoGlobalType;
1428 extern const char *MPChacoGlobalTypes[];
1429 typedef enum { MP_CHACO_KERNIGHAN=1,MP_CHACO_NONE=2 } MPChacoLocalType;
1430 extern const char *MPChacoLocalTypes[];
1431 typedef enum { MP_CHACO_LANCZOS=0,MP_CHACO_RQI=1 } MPChacoEigenType;
1432 extern const char *MPChacoEigenTypes[];
1433 
1434 extern PetscErrorCode  MatPartitioningChacoSetGlobal(MatPartitioning,MPChacoGlobalType);
1435 extern PetscErrorCode  MatPartitioningChacoGetGlobal(MatPartitioning,MPChacoGlobalType*);
1436 extern PetscErrorCode  MatPartitioningChacoSetLocal(MatPartitioning,MPChacoLocalType);
1437 extern PetscErrorCode  MatPartitioningChacoGetLocal(MatPartitioning,MPChacoLocalType*);
1438 extern PetscErrorCode  MatPartitioningChacoSetCoarseLevel(MatPartitioning,PetscReal);
1439 extern PetscErrorCode  MatPartitioningChacoSetEigenSolver(MatPartitioning,MPChacoEigenType);
1440 extern PetscErrorCode  MatPartitioningChacoGetEigenSolver(MatPartitioning,MPChacoEigenType*);
1441 extern PetscErrorCode  MatPartitioningChacoSetEigenTol(MatPartitioning,PetscReal);
1442 extern PetscErrorCode  MatPartitioningChacoGetEigenTol(MatPartitioning,PetscReal*);
1443 extern PetscErrorCode  MatPartitioningChacoSetEigenNumber(MatPartitioning,PetscInt);
1444 extern PetscErrorCode  MatPartitioningChacoGetEigenNumber(MatPartitioning,PetscInt*);
1445 
1446 #define MP_PARTY_OPT "opt"
1447 #define MP_PARTY_LIN "lin"
1448 #define MP_PARTY_SCA "sca"
1449 #define MP_PARTY_RAN "ran"
1450 #define MP_PARTY_GBF "gbf"
1451 #define MP_PARTY_GCF "gcf"
1452 #define MP_PARTY_BUB "bub"
1453 #define MP_PARTY_DEF "def"
1454 extern PetscErrorCode  MatPartitioningPartySetGlobal(MatPartitioning,const char*);
1455 #define MP_PARTY_HELPFUL_SETS "hs"
1456 #define MP_PARTY_KERNIGHAN_LIN "kl"
1457 #define MP_PARTY_NONE "no"
1458 extern PetscErrorCode  MatPartitioningPartySetLocal(MatPartitioning,const char*);
1459 extern PetscErrorCode  MatPartitioningPartySetCoarseLevel(MatPartitioning,PetscReal);
1460 extern PetscErrorCode  MatPartitioningPartySetBipart(MatPartitioning,PetscBool);
1461 extern PetscErrorCode  MatPartitioningPartySetMatchOptimization(MatPartitioning,PetscBool);
1462 
1463 typedef enum { MP_PTSCOTCH_QUALITY,MP_PTSCOTCH_SPEED,MP_PTSCOTCH_BALANCE,MP_PTSCOTCH_SAFETY,MP_PTSCOTCH_SCALABILITY } MPPTScotchStrategyType;
1464 extern const char *MPPTScotchStrategyTypes[];
1465 
1466 extern PetscErrorCode MatPartitioningPTScotchSetImbalance(MatPartitioning,PetscReal);
1467 extern PetscErrorCode MatPartitioningPTScotchGetImbalance(MatPartitioning,PetscReal*);
1468 extern PetscErrorCode MatPartitioningPTScotchSetStrategy(MatPartitioning,MPPTScotchStrategyType);
1469 extern PetscErrorCode MatPartitioningPTScotchGetStrategy(MatPartitioning,MPPTScotchStrategyType*);
1470 
1471 extern PetscErrorCode MatMeshToVertexGraph(Mat,PetscInt,Mat*);
1472 extern PetscErrorCode MatMeshToCellGraph(Mat,PetscInt,Mat*);
1473 
1474 /*
1475     If you add entries here you must also add them to finclude/petscmat.h
1476 */
1477 typedef enum { MATOP_SET_VALUES=0,
1478                MATOP_GET_ROW=1,
1479                MATOP_RESTORE_ROW=2,
1480                MATOP_MULT=3,
1481                MATOP_MULT_ADD=4,
1482                MATOP_MULT_TRANSPOSE=5,
1483                MATOP_MULT_TRANSPOSE_ADD=6,
1484                MATOP_SOLVE=7,
1485                MATOP_SOLVE_ADD=8,
1486                MATOP_SOLVE_TRANSPOSE=9,
1487                MATOP_SOLVE_TRANSPOSE_ADD=10,
1488                MATOP_LUFACTOR=11,
1489                MATOP_CHOLESKYFACTOR=12,
1490                MATOP_SOR=13,
1491                MATOP_TRANSPOSE=14,
1492                MATOP_GETINFO=15,
1493                MATOP_EQUAL=16,
1494                MATOP_GET_DIAGONAL=17,
1495                MATOP_DIAGONAL_SCALE=18,
1496                MATOP_NORM=19,
1497                MATOP_ASSEMBLY_BEGIN=20,
1498                MATOP_ASSEMBLY_END=21,
1499                MATOP_SET_OPTION=22,
1500                MATOP_ZERO_ENTRIES=23,
1501                MATOP_ZERO_ROWS=24,
1502                MATOP_LUFACTOR_SYMBOLIC=25,
1503                MATOP_LUFACTOR_NUMERIC=26,
1504                MATOP_CHOLESKY_FACTOR_SYMBOLIC=27,
1505                MATOP_CHOLESKY_FACTOR_NUMERIC=28,
1506                MATOP_SETUP_PREALLOCATION=29,
1507                MATOP_ILUFACTOR_SYMBOLIC=30,
1508                MATOP_ICCFACTOR_SYMBOLIC=31,
1509                MATOP_GET_ARRAY=32,
1510                MATOP_RESTORE_ARRAY=33,
1511                MATOP_DUPLICATE=34,
1512                MATOP_FORWARD_SOLVE=35,
1513                MATOP_BACKWARD_SOLVE=36,
1514                MATOP_ILUFACTOR=37,
1515                MATOP_ICCFACTOR=38,
1516                MATOP_AXPY=39,
1517                MATOP_GET_SUBMATRICES=40,
1518                MATOP_INCREASE_OVERLAP=41,
1519                MATOP_GET_VALUES=42,
1520                MATOP_COPY=43,
1521                MATOP_GET_ROW_MAX=44,
1522                MATOP_SCALE=45,
1523                MATOP_SHIFT=46,
1524                MATOP_DIAGONAL_SET=47,
1525                MATOP_ILUDT_FACTOR=48,
1526                MATOP_SET_BLOCK_SIZE=49,
1527                MATOP_GET_ROW_IJ=50,
1528                MATOP_RESTORE_ROW_IJ=51,
1529                MATOP_GET_COLUMN_IJ=52,
1530                MATOP_RESTORE_COLUMN_IJ=53,
1531                MATOP_FDCOLORING_CREATE=54,
1532                MATOP_COLORING_PATCH=55,
1533                MATOP_SET_UNFACTORED=56,
1534                MATOP_PERMUTE=57,
1535                MATOP_SET_VALUES_BLOCKED=58,
1536                MATOP_GET_SUBMATRIX=59,
1537                MATOP_DESTROY=60,
1538                MATOP_VIEW=61,
1539                MATOP_CONVERT_FROM=62,
1540                MATOP_USE_SCALED_FORM=63,
1541                MATOP_SCALE_SYSTEM=64,
1542                MATOP_UNSCALE_SYSTEM=65,
1543                MATOP_SET_LOCAL_TO_GLOBAL_MAP=66,
1544                MATOP_SET_VALUES_LOCAL=67,
1545                MATOP_ZERO_ROWS_LOCAL=68,
1546                MATOP_GET_ROW_MAX_ABS=69,
1547                MATOP_GET_ROW_MIN_ABS=70,
1548                MATOP_CONVERT=71,
1549                MATOP_SET_COLORING=72,
1550                MATOP_SET_VALUES_ADIC=73,
1551                MATOP_SET_VALUES_ADIFOR=74,
1552                MATOP_FD_COLORING_APPLY=75,
1553                MATOP_SET_FROM_OPTIONS=76,
1554                MATOP_MULT_CON=77,
1555                MATOP_MULT_TRANSPOSE_CON=78,
1556                MATOP_PERMUTE_SPARSIFY=79,
1557                MATOP_MULT_MULTIPLE=80,
1558                MATOP_SOLVE_MULTIPLE=81,
1559                MATOP_GET_INERTIA=82,
1560                MATOP_LOAD=83,
1561                MATOP_IS_SYMMETRIC=84,
1562                MATOP_IS_HERMITIAN=85,
1563                MATOP_IS_STRUCTURALLY_SYMMETRIC=86,
1564                MATOP_DUMMY=87,
1565                MATOP_GET_VECS=88,
1566                MATOP_MAT_MULT=89,
1567                MATOP_MAT_MULT_SYMBOLIC=90,
1568                MATOP_MAT_MULT_NUMERIC=91,
1569                MATOP_PTAP=92,
1570                MATOP_PTAP_SYMBOLIC=93,
1571                MATOP_PTAP_NUMERIC=94,
1572                MATOP_MAT_MULTTRANSPOSE=95,
1573                MATOP_MAT_MULTTRANSPOSE_SYM=96,
1574                MATOP_MAT_MULTTRANSPOSE_NUM=97,
1575                MATOP_PTAP_SYMBOLIC_SEQAIJ=98,
1576                MATOP_PTAP_NUMERIC_SEQAIJ=99,
1577                MATOP_PTAP_SYMBOLIC_MPIAIJ=100,
1578                MATOP_PTAP_NUMERIC_MPIAIJ=101,
1579                MATOP_CONJUGATE=102,
1580                MATOP_SET_SIZES=103,
1581                MATOP_SET_VALUES_ROW=104,
1582                MATOP_REAL_PART=105,
1583                MATOP_IMAG_PART=106,
1584                MATOP_GET_ROW_UTRIANGULAR=107,
1585                MATOP_RESTORE_ROW_UTRIANGULAR=108,
1586                MATOP_MATSOLVE=109,
1587                MATOP_GET_REDUNDANTMATRIX=110,
1588                MATOP_GET_ROW_MIN=111,
1589                MATOP_GET_COLUMN_VEC=112,
1590                MATOP_MISSING_DIAGONAL=113,
1591                MATOP_MATGETSEQNONZEROSTRUCTURE=114,
1592                MATOP_CREATE=115,
1593                MATOP_GET_GHOSTS=116,
1594                MATOP_GET_LOCALSUBMATRIX=117,
1595                MATOP_RESTORE_LOCALSUBMATRIX=118,
1596                MATOP_MULT_DIAGONAL_BLOCK=119,
1597                MATOP_HERMITIANTRANSPOSE=120,
1598                MATOP_MULTHERMITIANTRANSPOSE=121,
1599                MATOP_MULTHERMITIANTRANSPOSEADD=122,
1600                MATOP_GETMULTIPROCBLOCK=123,
1601                MATOP_GETCOLUMNNORMS=125,
1602 	       MATOP_GET_SUBMATRICES_PARALLEL=128,
1603                MATOP_SET_VALUES_BATCH=129
1604              } MatOperation;
1605 extern PetscErrorCode  MatHasOperation(Mat,MatOperation,PetscBool *);
1606 extern PetscErrorCode  MatShellSetOperation(Mat,MatOperation,void(*)(void));
1607 extern PetscErrorCode  MatShellGetOperation(Mat,MatOperation,void(**)(void));
1608 extern PetscErrorCode  MatShellSetContext(Mat,void*);
1609 
1610 /*
1611    Codes for matrices stored on disk. By default they are
1612    stored in a universal format. By changing the format with
1613    PetscViewerSetFormat(viewer,PETSC_VIEWER_NATIVE); the matrices will
1614    be stored in a way natural for the matrix, for example dense matrices
1615    would be stored as dense. Matrices stored this way may only be
1616    read into matrices of the same type.
1617 */
1618 #define MATRIX_BINARY_FORMAT_DENSE -1
1619 
1620 extern PetscErrorCode  MatMPIBAIJSetHashTableFactor(Mat,PetscReal);
1621 extern PetscErrorCode  MatISGetLocalMat(Mat,Mat*);
1622 
1623 /*S
1624      MatNullSpace - Object that removes a null space from a vector, i.e.
1625          orthogonalizes the vector to a subsapce
1626 
1627    Level: advanced
1628 
1629   Concepts: matrix; linear operator, null space
1630 
1631   Users manual sections:
1632 .   sec_singular
1633 
1634 .seealso:  MatNullSpaceCreate()
1635 S*/
1636 typedef struct _p_MatNullSpace* MatNullSpace;
1637 
1638 extern PetscErrorCode  MatNullSpaceCreate(MPI_Comm,PetscBool ,PetscInt,const Vec[],MatNullSpace*);
1639 extern PetscErrorCode  MatNullSpaceSetFunction(MatNullSpace,PetscErrorCode (*)(MatNullSpace,Vec,void*),void*);
1640 extern PetscErrorCode  MatNullSpaceDestroy(MatNullSpace*);
1641 extern PetscErrorCode  MatNullSpaceRemove(MatNullSpace,Vec,Vec*);
1642 extern PetscErrorCode  MatSetNullSpace(Mat,MatNullSpace);
1643 extern PetscErrorCode  MatSetNearNullSpace(Mat,MatNullSpace);
1644 extern PetscErrorCode  MatNullSpaceTest(MatNullSpace,Mat,PetscBool  *);
1645 extern PetscErrorCode  MatNullSpaceView(MatNullSpace,PetscViewer);
1646 
1647 extern PetscErrorCode  MatReorderingSeqSBAIJ(Mat,IS);
1648 extern PetscErrorCode  MatMPISBAIJSetHashTableFactor(Mat,PetscReal);
1649 extern PetscErrorCode  MatSeqSBAIJSetColumnIndices(Mat,PetscInt *);
1650 extern PetscErrorCode  MatSeqBAIJInvertBlockDiagonal(Mat);
1651 
1652 extern PetscErrorCode  MatCreateMAIJ(Mat,PetscInt,Mat*);
1653 extern PetscErrorCode  MatMAIJRedimension(Mat,PetscInt,Mat*);
1654 extern PetscErrorCode  MatMAIJGetAIJ(Mat,Mat*);
1655 
1656 extern PetscErrorCode  MatComputeExplicitOperator(Mat,Mat*);
1657 
1658 extern PetscErrorCode  MatDiagonalScaleLocal(Mat,Vec);
1659 
1660 extern PetscErrorCode  MatCreateMFFD(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,Mat*);
1661 extern PetscErrorCode  MatMFFDSetBase(Mat,Vec,Vec);
1662 extern PetscErrorCode  MatMFFDSetFunction(Mat,PetscErrorCode(*)(void*,Vec,Vec),void*);
1663 extern PetscErrorCode  MatMFFDSetFunctioni(Mat,PetscErrorCode (*)(void*,PetscInt,Vec,PetscScalar*));
1664 extern PetscErrorCode  MatMFFDSetFunctioniBase(Mat,PetscErrorCode (*)(void*,Vec));
1665 extern PetscErrorCode  MatMFFDAddNullSpace(Mat,MatNullSpace);
1666 extern PetscErrorCode  MatMFFDSetHHistory(Mat,PetscScalar[],PetscInt);
1667 extern PetscErrorCode  MatMFFDResetHHistory(Mat);
1668 extern PetscErrorCode  MatMFFDSetFunctionError(Mat,PetscReal);
1669 extern PetscErrorCode  MatMFFDSetPeriod(Mat,PetscInt);
1670 extern PetscErrorCode  MatMFFDGetH(Mat,PetscScalar *);
1671 extern PetscErrorCode  MatMFFDSetOptionsPrefix(Mat,const char[]);
1672 extern PetscErrorCode  MatMFFDCheckPositivity(void*,Vec,Vec,PetscScalar*);
1673 extern PetscErrorCode  MatMFFDSetCheckh(Mat,PetscErrorCode (*)(void*,Vec,Vec,PetscScalar*),void*);
1674 
1675 /*S
1676     MatMFFD - A data structured used to manage the computation of the h differencing parameter for matrix-free
1677               Jacobian vector products
1678 
1679     Notes: MATMFFD is a specific MatType which uses the MatMFFD data structure
1680 
1681            MatMFFD*() methods actually take the Mat as their first argument. Not a MatMFFD data structure
1682 
1683     Level: developer
1684 
1685 .seealso: MATMFFD, MatCreateMFFD(), MatMFFDSetFuction(), MatMFFDSetType(), MatMFFDRegister()
1686 S*/
1687 typedef struct _p_MatMFFD* MatMFFD;
1688 
1689 /*J
1690     MatMFFDType - algorithm used to compute the h used in computing matrix-vector products via differencing of the function
1691 
1692    Level: beginner
1693 
1694 .seealso: MatMFFDSetType(), MatMFFDRegister()
1695 J*/
1696 #define MatMFFDType char*
1697 #define MATMFFD_DS  "ds"
1698 #define MATMFFD_WP  "wp"
1699 
1700 extern PetscErrorCode  MatMFFDSetType(Mat,const MatMFFDType);
1701 extern PetscErrorCode  MatMFFDRegister(const char[],const char[],const char[],PetscErrorCode (*)(MatMFFD));
1702 
1703 /*MC
1704    MatMFFDRegisterDynamic - Adds a method to the MatMFFD registry.
1705 
1706    Synopsis:
1707    PetscErrorCode MatMFFDRegisterDynamic(const char *name_solver,const char *path,const char *name_create,PetscErrorCode (*routine_create)(MatMFFD))
1708 
1709    Not Collective
1710 
1711    Input Parameters:
1712 +  name_solver - name of a new user-defined compute-h module
1713 .  path - path (either absolute or relative) the library containing this solver
1714 .  name_create - name of routine to create method context
1715 -  routine_create - routine to create method context
1716 
1717    Level: developer
1718 
1719    Notes:
1720    MatMFFDRegisterDynamic() may be called multiple times to add several user-defined solvers.
1721 
1722    If dynamic libraries are used, then the fourth input argument (routine_create)
1723    is ignored.
1724 
1725    Sample usage:
1726 .vb
1727    MatMFFDRegisterDynamic("my_h",/home/username/my_lib/lib/libO/solaris/mylib.a,
1728                "MyHCreate",MyHCreate);
1729 .ve
1730 
1731    Then, your solver can be chosen with the procedural interface via
1732 $     MatMFFDSetType(mfctx,"my_h")
1733    or at runtime via the option
1734 $     -snes_mf_type my_h
1735 
1736 .keywords: MatMFFD, register
1737 
1738 .seealso: MatMFFDRegisterAll(), MatMFFDRegisterDestroy()
1739 M*/
1740 #if defined(PETSC_USE_DYNAMIC_LIBRARIES)
1741 #define MatMFFDRegisterDynamic(a,b,c,d) MatMFFDRegister(a,b,c,0)
1742 #else
1743 #define MatMFFDRegisterDynamic(a,b,c,d) MatMFFDRegister(a,b,c,d)
1744 #endif
1745 
1746 extern PetscErrorCode  MatMFFDRegisterAll(const char[]);
1747 extern PetscErrorCode  MatMFFDRegisterDestroy(void);
1748 extern PetscErrorCode  MatMFFDDSSetUmin(Mat,PetscReal);
1749 extern PetscErrorCode  MatMFFDWPSetComputeNormU(Mat,PetscBool );
1750 
1751 
1752 extern PetscErrorCode  PetscViewerMathematicaPutMatrix(PetscViewer, PetscInt, PetscInt, PetscReal *);
1753 extern PetscErrorCode  PetscViewerMathematicaPutCSRMatrix(PetscViewer, PetscInt, PetscInt, PetscInt *, PetscInt *, PetscReal *);
1754 
1755 /*
1756    PETSc interface to MUMPS
1757 */
1758 #ifdef PETSC_HAVE_MUMPS
1759 extern PetscErrorCode  MatMumpsSetIcntl(Mat,PetscInt,PetscInt);
1760 #endif
1761 
1762 /*
1763    PETSc interface to SUPERLU
1764 */
1765 #ifdef PETSC_HAVE_SUPERLU
1766 extern PetscErrorCode  MatSuperluSetILUDropTol(Mat,PetscReal);
1767 #endif
1768 
1769 #if defined(PETSC_HAVE_CUSP)
1770 extern PetscErrorCode  MatCreateSeqAIJCUSP(MPI_Comm,PetscInt,PetscInt,PetscInt,const PetscInt[],Mat*);
1771 extern PetscErrorCode  MatCreateMPIAIJCUSP(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt,const PetscInt[],PetscInt,const PetscInt[],Mat*);
1772 #endif
1773 
1774 /*
1775    PETSc interface to FFTW
1776 */
1777 #if defined(PETSC_HAVE_FFTW)
1778 extern PetscErrorCode VecScatterPetscToFFTW(Mat,Vec,Vec);
1779 extern PetscErrorCode VecScatterFFTWToPetsc(Mat,Vec,Vec);
1780 extern PetscErrorCode MatGetVecsFFTW(Mat,Vec*,Vec*,Vec*);
1781 #endif
1782 
1783 extern PetscErrorCode MatCreateNest(MPI_Comm,PetscInt,const IS[],PetscInt,const IS[],const Mat[],Mat*);
1784 extern PetscErrorCode MatNestGetSize(Mat,PetscInt*,PetscInt*);
1785 extern PetscErrorCode MatNestGetSubMats(Mat,PetscInt*,PetscInt*,Mat***);
1786 extern PetscErrorCode MatNestGetSubMat(Mat,PetscInt,PetscInt,Mat*);
1787 extern PetscErrorCode MatNestSetVecType(Mat,const VecType);
1788 extern PetscErrorCode MatNestSetSubMats(Mat,PetscInt,const IS[],PetscInt,const IS[],const Mat[]);
1789 extern PetscErrorCode MatNestSetSubMat(Mat,PetscInt,PetscInt,Mat);
1790 
1791 /*
1792  MatIJ:
1793  An unweighted directed pseudograph
1794  An interpretation of this matrix as a (pseudo)graph allows us to define additional operations on it:
1795  A MatIJ can act on sparse arrays: arrays of indices, or index arrays of integers, scalars, or integer-scalar pairs
1796  by mapping the indices to the indices connected to them by the (pseudo)graph ed
1797  */
1798 typedef enum {MATIJ_LOCAL, MATIJ_GLOBAL} MatIJIndexType;
1799 extern  PetscErrorCode MatIJSetMultivalued(Mat, PetscBool);
1800 extern  PetscErrorCode MatIJGetMultivalued(Mat, PetscBool*);
1801 extern  PetscErrorCode MatIJSetEdges(Mat, PetscInt, const PetscInt*, const PetscInt*);
1802 extern  PetscErrorCode MatIJGetEdges(Mat, PetscInt *, PetscInt **, PetscInt **);
1803 extern  PetscErrorCode MatIJSetEdgesIS(Mat, IS, IS);
1804 extern  PetscErrorCode MatIJGetEdgesIS(Mat, IS*, IS*);
1805 extern  PetscErrorCode MatIJGetRowSizes(Mat, MatIJIndexType, PetscInt, const PetscInt *, PetscInt **);
1806 extern  PetscErrorCode MatIJGetMinRowSize(Mat, PetscInt *);
1807 extern  PetscErrorCode MatIJGetMaxRowSize(Mat, PetscInt *);
1808 extern  PetscErrorCode MatIJGetSupport(Mat,  PetscInt *, PetscInt **);
1809 extern  PetscErrorCode MatIJGetSupportIS(Mat, IS *);
1810 extern  PetscErrorCode MatIJGetImage(Mat, PetscInt*, PetscInt**);
1811 extern  PetscErrorCode MatIJGetImageIS(Mat, IS *);
1812 extern  PetscErrorCode MatIJGetSupportSize(Mat, PetscInt *);
1813 extern  PetscErrorCode MatIJGetImageSize(Mat, PetscInt *);
1814 
1815 extern  PetscErrorCode MatIJBinRenumber(Mat, Mat*);
1816 
1817 extern  PetscErrorCode MatIJMap(Mat, MatIJIndexType, PetscInt,const PetscInt*,const PetscInt*,const PetscScalar*, MatIJIndexType,PetscInt*,PetscInt**,PetscInt**,PetscScalar**,PetscInt**);
1818 extern  PetscErrorCode MatIJBin(Mat, MatIJIndexType, PetscInt,const PetscInt*,const PetscInt*,const PetscScalar*,PetscInt*,PetscInt**,PetscInt**,PetscScalar**,PetscInt**);
1819 extern  PetscErrorCode MatIJBinMap(Mat,Mat, MatIJIndexType,PetscInt,const PetscInt*,const PetscInt*,const PetscScalar*,MatIJIndexType,PetscInt*,PetscInt**,PetscInt**,PetscScalar**,PetscInt**);
1820 
1821 PETSC_EXTERN_CXX_END
1822 #endif
1823