xref: /petsc/include/petscmat.h (revision 1c1325fed0af5dba760aaa2c4ef182ea17894bcc)
1 /*
2      Include file for the matrix component of PETSc
3 */
4 #ifndef __PETSCMAT_H
5 #define __PETSCMAT_H
6 #include "petscvec.h"
7 PETSC_EXTERN_CXX_BEGIN
8 
9 /*S
10      Mat - Abstract PETSc matrix object
11 
12    Level: beginner
13 
14   Concepts: matrix; linear operator
15 
16 .seealso:  MatCreate(), MatType, MatSetType()
17 S*/
18 typedef struct _p_Mat*           Mat;
19 
20 /*J
21     MatType - String with the name of a PETSc matrix or the creation function
22        with an optional dynamic library name, for example
23        http://www.mcs.anl.gov/petsc/lib.a:mymatcreate()
24 
25    Level: beginner
26 
27 .seealso: MatSetType(), Mat, MatSolverPackage
28 J*/
29 #define MatType char*
30 #define MATSAME            "same"
31 #define MATMAIJ            "maij"
32 #define MATSEQMAIJ         "seqmaij"
33 #define MATMPIMAIJ         "mpimaij"
34 #define MATIS              "is"
35 #define MATAIJ             "aij"
36 #define MATSEQAIJ          "seqaij"
37 #define MATSEQAIJPTHREAD   "seqaijpthread"
38 #define MATAIJPTHREAD      "aijpthread"
39 #define MATMPIAIJ          "mpiaij"
40 #define MATAIJCRL          "aijcrl"
41 #define MATSEQAIJCRL       "seqaijcrl"
42 #define MATMPIAIJCRL       "mpiaijcrl"
43 #define MATAIJCUSP         "aijcusp"
44 #define MATSEQAIJCUSP      "seqaijcusp"
45 #define MATMPIAIJCUSP      "mpiaijcusp"
46 #define MATAIJPERM         "aijperm"
47 #define MATSEQAIJPERM      "seqaijperm"
48 #define MATMPIAIJPERM      "mpiaijperm"
49 #define MATSHELL           "shell"
50 #define MATDENSE           "dense"
51 #define MATSEQDENSE        "seqdense"
52 #define MATMPIDENSE        "mpidense"
53 #define MATBAIJ            "baij"
54 #define MATSEQBAIJ         "seqbaij"
55 #define MATMPIBAIJ         "mpibaij"
56 #define MATMPIADJ          "mpiadj"
57 #define MATSBAIJ           "sbaij"
58 #define MATSEQSBAIJ        "seqsbaij"
59 #define MATMPISBAIJ        "mpisbaij"
60 #define MATSEQBSTRM        "seqbstrm"
61 #define MATMPIBSTRM        "mpibstrm"
62 #define MATBSTRM           "bstrm"
63 #define MATSEQSBSTRM       "seqsbstrm"
64 #define MATMPISBSTRM       "mpisbstrm"
65 #define MATSBSTRM          "sbstrm"
66 #define MATDAAD            "daad"
67 #define MATMFFD            "mffd"
68 #define MATNORMAL          "normal"
69 #define MATLRC             "lrc"
70 #define MATSCATTER         "scatter"
71 #define MATBLOCKMAT        "blockmat"
72 #define MATCOMPOSITE       "composite"
73 #define MATFFT             "fft"
74 #define MATFFTW            "fftw"
75 #define MATSEQCUFFT        "seqcufft"
76 #define MATTRANSPOSEMAT    "transpose"
77 #define MATSCHURCOMPLEMENT "schurcomplement"
78 #define MATPYTHON          "python"
79 #define MATHYPRESTRUCT     "hyprestruct"
80 #define MATHYPRESSTRUCT    "hypresstruct"
81 #define MATSUBMATRIX       "submatrix"
82 #define MATLOCALREF        "localref"
83 #define MATNEST            "nest"
84 #define MATIJ              "ij"
85 
86 /*J
87     MatSolverPackage - String with the name of a PETSc matrix solver type.
88 
89     For example: "petsc" indicates what PETSc provides, "superlu" indicates either
90        SuperLU or SuperLU_Dist etc.
91 
92 
93    Level: beginner
94 
95 .seealso: MatGetFactor(), Mat, MatSetType(), MatType
96 J*/
97 #define MatSolverPackage char*
98 #define MATSOLVERSPOOLES      "spooles"
99 #define MATSOLVERSUPERLU      "superlu"
100 #define MATSOLVERSUPERLU_DIST "superlu_dist"
101 #define MATSOLVERUMFPACK      "umfpack"
102 #define MATSOLVERCHOLMOD      "cholmod"
103 #define MATSOLVERESSL         "essl"
104 #define MATSOLVERLUSOL        "lusol"
105 #define MATSOLVERMUMPS        "mumps"
106 #define MATSOLVERPASTIX       "pastix"
107 #define MATSOLVERMATLAB       "matlab"
108 #define MATSOLVERPETSC        "petsc"
109 #define MATSOLVERPLAPACK      "plapack"
110 #define MATSOLVERBAS          "bas"
111 
112 #define MATSOLVERBSTRM        "bstrm"
113 #define MATSOLVERSBSTRM       "sbstrm"
114 
115 /*E
116     MatFactorType - indicates what type of factorization is requested
117 
118     Level: beginner
119 
120    Any additions/changes here MUST also be made in include/finclude/petscmat.h
121 
122 .seealso: MatSolverPackage, MatGetFactor()
123 E*/
124 typedef enum {MAT_FACTOR_NONE, MAT_FACTOR_LU, MAT_FACTOR_CHOLESKY, MAT_FACTOR_ILU, MAT_FACTOR_ICC,MAT_FACTOR_ILUDT} MatFactorType;
125 extern const char *const MatFactorTypes[];
126 
127 extern PetscErrorCode  MatGetFactor(Mat,const MatSolverPackage,MatFactorType,Mat*);
128 extern PetscErrorCode  MatGetFactorAvailable(Mat,const MatSolverPackage,MatFactorType,PetscBool *);
129 extern PetscErrorCode  MatFactorGetSolverPackage(Mat,const MatSolverPackage*);
130 extern PetscErrorCode  MatGetFactorType(Mat,MatFactorType*);
131 
132 /* Logging support */
133 #define    MAT_FILE_CLASSID 1211216    /* used to indicate matrices in binary files */
134 extern PetscClassId  MAT_CLASSID;
135 extern PetscClassId  MAT_FDCOLORING_CLASSID;
136 extern PetscClassId  MAT_PARTITIONING_CLASSID;
137 extern PetscClassId  MAT_NULLSPACE_CLASSID;
138 extern PetscClassId  MATMFFD_CLASSID;
139 
140 /*E
141     MatReuse - Indicates if matrices obtained from a previous call to MatGetSubMatrices()
142      or MatGetSubMatrix() are to be reused to store the new matrix values. For MatConvert() is used to indicate
143      that the input matrix is to be replaced with the converted matrix.
144 
145     Level: beginner
146 
147    Any additions/changes here MUST also be made in include/finclude/petscmat.h
148 
149 .seealso: MatGetSubMatrices(), MatGetSubMatrix(), MatDestroyMatrices(), MatConvert()
150 E*/
151 typedef enum {MAT_INITIAL_MATRIX,MAT_REUSE_MATRIX,MAT_IGNORE_MATRIX} MatReuse;
152 
153 /*E
154     MatGetSubMatrixOption - Indicates if matrices obtained from a call to MatGetSubMatrices()
155      include the matrix values. Currently it is only used by MatGetSeqNonzerostructure().
156 
157     Level: beginner
158 
159 .seealso: MatGetSeqNonzerostructure()
160 E*/
161 typedef enum {MAT_DO_NOT_GET_VALUES,MAT_GET_VALUES} MatGetSubMatrixOption;
162 
163 extern PetscErrorCode  MatInitializePackage(const char[]);
164 
165 extern PetscErrorCode  MatCreate(MPI_Comm,Mat*);
166 PetscPolymorphicFunction(MatCreate,(MPI_Comm comm),(comm,&A),Mat,A)
167 PetscPolymorphicFunction(MatCreate,(),(PETSC_COMM_WORLD,&A),Mat,A)
168 extern PetscErrorCode  MatSetSizes(Mat,PetscInt,PetscInt,PetscInt,PetscInt);
169 extern PetscErrorCode  MatSetType(Mat,const MatType);
170 extern PetscErrorCode  MatSetFromOptions(Mat);
171 extern PetscErrorCode  MatSetUpPreallocation(Mat);
172 extern PetscErrorCode  MatRegisterAll(const char[]);
173 extern PetscErrorCode  MatRegister(const char[],const char[],const char[],PetscErrorCode(*)(Mat));
174 extern PetscErrorCode  MatRegisterBaseName(const char[],const char[],const char[]);
175 extern PetscErrorCode  MatSetOptionsPrefix(Mat,const char[]);
176 extern PetscErrorCode  MatAppendOptionsPrefix(Mat,const char[]);
177 extern PetscErrorCode  MatGetOptionsPrefix(Mat,const char*[]);
178 
179 /*MC
180    MatRegisterDynamic - Adds a new matrix type
181 
182    Synopsis:
183    PetscErrorCode MatRegisterDynamic(const char *name,const char *path,const char *name_create,PetscErrorCode (*routine_create)(Mat))
184 
185    Not Collective
186 
187    Input Parameters:
188 +  name - name of a new user-defined matrix type
189 .  path - path (either absolute or relative) the library containing this solver
190 .  name_create - name of routine to create method context
191 -  routine_create - routine to create method context
192 
193    Notes:
194    MatRegisterDynamic() may be called multiple times to add several user-defined solvers.
195 
196    If dynamic libraries are used, then the fourth input argument (routine_create)
197    is ignored.
198 
199    Sample usage:
200 .vb
201    MatRegisterDynamic("my_mat",/home/username/my_lib/lib/libO/solaris/mylib.a,
202                "MyMatCreate",MyMatCreate);
203 .ve
204 
205    Then, your solver can be chosen with the procedural interface via
206 $     MatSetType(Mat,"my_mat")
207    or at runtime via the option
208 $     -mat_type my_mat
209 
210    Level: advanced
211 
212    Notes: ${PETSC_ARCH} occuring in pathname will be replaced with appropriate values.
213          If your function is not being put into a shared library then use VecRegister() instead
214 
215 .keywords: Mat, register
216 
217 .seealso: MatRegisterAll(), MatRegisterDestroy()
218 
219 M*/
220 #if defined(PETSC_USE_DYNAMIC_LIBRARIES)
221 #define MatRegisterDynamic(a,b,c,d) MatRegister(a,b,c,0)
222 #else
223 #define MatRegisterDynamic(a,b,c,d) MatRegister(a,b,c,d)
224 #endif
225 
226 extern PetscBool  MatRegisterAllCalled;
227 extern PetscFList MatList;
228 extern PetscFList MatColoringList;
229 extern PetscFList MatPartitioningList;
230 
231 /*E
232     MatStructure - Indicates if the matrix has the same nonzero structure
233 
234     Level: beginner
235 
236    Any additions/changes here MUST also be made in include/finclude/petscmat.h
237 
238 .seealso: MatCopy(), KSPSetOperators(), PCSetOperators()
239 E*/
240 typedef enum {DIFFERENT_NONZERO_PATTERN,SUBSET_NONZERO_PATTERN,SAME_NONZERO_PATTERN,SAME_PRECONDITIONER} MatStructure;
241 
242 extern PetscErrorCode  MatCreateSeqDense(MPI_Comm,PetscInt,PetscInt,PetscScalar[],Mat*);
243 extern PetscErrorCode  MatCreateMPIDense(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,PetscScalar[],Mat*);
244 extern PetscErrorCode  MatCreateSeqAIJ(MPI_Comm,PetscInt,PetscInt,PetscInt,const PetscInt[],Mat*);
245 PetscPolymorphicFunction(MatCreateSeqAIJ,(PetscInt m,PetscInt n,PetscInt nz,const PetscInt nnz[]),(PETSC_COMM_SELF,m,n,nz,nnz,&A),Mat,A)
246 PetscPolymorphicFunction(MatCreateSeqAIJ,(PetscInt m,PetscInt n,PetscInt nz),(PETSC_COMM_SELF,m,n,nz,PETSC_NULL,&A),Mat,A)
247 PetscPolymorphicFunction(MatCreateSeqAIJ,(PetscInt m,PetscInt n,const PetscInt nnz[]),(PETSC_COMM_SELF,m,n,0,nnz,&A),Mat,A)
248 PetscPolymorphicFunction(MatCreateSeqAIJ,(PetscInt m,PetscInt n),(PETSC_COMM_SELF,m,n,0,PETSC_NULL,&A),Mat,A)
249 PetscPolymorphicSubroutine(MatCreateSeqAIJ,(PetscInt m,PetscInt n,PetscInt nz,Mat *A),(PETSC_COMM_SELF,m,n,nz,PETSC_NULL,A))
250 PetscPolymorphicSubroutine(MatCreateSeqAIJ,(PetscInt m,PetscInt n,const PetscInt nnz[],Mat *A),(PETSC_COMM_SELF,m,n,0,nnz,A))
251 PetscPolymorphicSubroutine(MatCreateSeqAIJ,(PetscInt m,PetscInt n,Mat *A),(PETSC_COMM_SELF,m,n,0,PETSC_NULL,A))
252 extern PetscErrorCode  MatCreateMPIAIJ(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt,const PetscInt[],PetscInt,const PetscInt[],Mat*);
253 PetscPolymorphicFunction(MatCreateMPIAIJ,(MPI_Comm comm,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,const PetscInt nnz[],PetscInt onz,const PetscInt onnz[]),(comm,m,n,M,N,nz,nnz,onz,onnz,&A),Mat,A)
254 PetscPolymorphicFunction(MatCreateMPIAIJ,(MPI_Comm comm,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,PetscInt nnz),(comm,m,n,M,N,nz,PETSC_NULL,nnz,PETSC_NULL,&A),Mat,A)
255 PetscPolymorphicFunction(MatCreateMPIAIJ,(MPI_Comm comm,PetscInt m,PetscInt n,PetscInt M,PetscInt N,const PetscInt nnz[],const PetscInt onz[]),(comm,m,n,M,N,0,nnz,0,onz,&A),Mat,A)
256 PetscPolymorphicFunction(MatCreateMPIAIJ,(MPI_Comm comm,PetscInt m,PetscInt n,PetscInt M,PetscInt N),(comm,m,n,M,N,0,PETSC_NULL,0,PETSC_NULL,&A),Mat,A)
257 PetscPolymorphicSubroutine(MatCreateMPIAIJ,(MPI_Comm comm,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,PetscInt nnz,Mat *A),(comm,m,n,M,N,nz,PETSC_NULL,nnz,PETSC_NULL,A))
258 PetscPolymorphicSubroutine(MatCreateMPIAIJ,(MPI_Comm comm,PetscInt m,PetscInt n,PetscInt M,PetscInt N,const PetscInt nnz[],const PetscInt onz[],Mat *A),(comm,m,n,M,N,0,nnz,0,onz,A))
259 PetscPolymorphicSubroutine(MatCreateMPIAIJ,(MPI_Comm comm,PetscInt m,PetscInt n,PetscInt M,PetscInt N,Mat *A),(comm,m,n,M,N,0,PETSC_NULL,0,PETSC_NULL,A))
260 PetscPolymorphicFunction(MatCreateMPIAIJ,(PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,const PetscInt nnz[],PetscInt onz,const PetscInt onnz[]),(PETSC_COMM_WORLD,m,n,M,N,nz,nnz,onz,onnz,&A),Mat,A)
261 PetscPolymorphicFunction(MatCreateMPIAIJ,(PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,PetscInt nnz),(PETSC_COMM_WORLD,m,n,M,N,nz,PETSC_NULL,nnz,PETSC_NULL,&A),Mat,A)
262 PetscPolymorphicFunction(MatCreateMPIAIJ,(PetscInt m,PetscInt n,PetscInt M,PetscInt N,const PetscInt nnz[],const PetscInt onz[]),(PETSC_COMM_WORLD,m,n,M,N,0,nnz,0,onz,&A),Mat,A)
263 PetscPolymorphicFunction(MatCreateMPIAIJ,(PetscInt m,PetscInt n,PetscInt M,PetscInt N),(PETSC_COMM_WORLD,m,n,M,N,0,PETSC_NULL,0,PETSC_NULL,&A),Mat,A)
264 PetscPolymorphicSubroutine(MatCreateMPIAIJ,(PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,PetscInt nnz,Mat *A),(PETSC_COMM_WORLD,m,n,M,N,nz,PETSC_NULL,nnz,PETSC_NULL,A))
265 PetscPolymorphicSubroutine(MatCreateMPIAIJ,(PetscInt m,PetscInt n,PetscInt M,PetscInt N,const PetscInt nnz[],const PetscInt onz[],Mat *A),(PETSC_COMM_WORLD,m,n,M,N,0,nnz,0,onz,A))
266 PetscPolymorphicSubroutine(MatCreateMPIAIJ,(PetscInt m,PetscInt n,PetscInt M,PetscInt N,Mat *A),(PETSC_COMM_WORLD,m,n,M,N,0,PETSC_NULL,0,PETSC_NULL,A))
267 extern PetscErrorCode  MatCreateMPIAIJWithArrays(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,const PetscInt[],const PetscInt[],const PetscScalar[],Mat *);
268 extern PetscErrorCode  MatCreateMPIAIJWithSplitArrays(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt[],PetscInt[],PetscScalar[],PetscInt[],PetscInt[],PetscScalar[],Mat*);
269 
270 extern PetscErrorCode  MatCreateSeqBAIJ(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,const PetscInt[],Mat*);
271 PetscPolymorphicFunction(MatCreateSeqBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt nz,const PetscInt nnz[]),(PETSC_COMM_SELF,bs,m,n,nz,nnz,&A),Mat,A)
272 PetscPolymorphicFunction(MatCreateSeqBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt nz),(PETSC_COMM_SELF,bs,m,n,nz,PETSC_NULL,&A),Mat,A)
273 PetscPolymorphicFunction(MatCreateSeqBAIJ,(PetscInt bs,PetscInt m,PetscInt n,const PetscInt nnz[]),(PETSC_COMM_SELF,bs,m,n,0,nnz,&A),Mat,A)
274 PetscPolymorphicFunction(MatCreateSeqBAIJ,(PetscInt bs,PetscInt m,PetscInt n),(PETSC_COMM_SELF,bs,m,n,0,PETSC_NULL,&A),Mat,A)
275 PetscPolymorphicSubroutine(MatCreateSeqBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt nz,Mat *A),(PETSC_COMM_SELF,bs,m,n,nz,PETSC_NULL,A))
276 PetscPolymorphicSubroutine(MatCreateSeqBAIJ,(PetscInt bs,PetscInt m,PetscInt n,const PetscInt nnz[],Mat *A),(PETSC_COMM_SELF,bs,m,n,0,nnz,A))
277 PetscPolymorphicSubroutine(MatCreateSeqBAIJ,(PetscInt bs,PetscInt m,PetscInt n,Mat *A),(PETSC_COMM_SELF,bs,m,n,0,PETSC_NULL,A))
278 extern PetscErrorCode  MatCreateMPIBAIJ(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt,const PetscInt[],PetscInt,const PetscInt[],Mat*);
279 PetscPolymorphicFunction(MatCreateMPIBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,const PetscInt nnz[],PetscInt onz,const PetscInt onnz[]),(comm,bs,m,n,M,N,nz,nnz,onz,onnz,&A),Mat,A)
280 PetscPolymorphicFunction(MatCreateMPIBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,PetscInt nnz),(comm,bs,m,n,M,N,nz,PETSC_NULL,nnz,PETSC_NULL,&A),Mat,A)
281 PetscPolymorphicFunction(MatCreateMPIBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,const PetscInt nnz[],const PetscInt onz[]),(comm,bs,m,n,M,N,0,nnz,0,onz,&A),Mat,A)
282 PetscPolymorphicFunction(MatCreateMPIBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N),(comm,bs,m,n,M,N,0,PETSC_NULL,0,PETSC_NULL,&A),Mat,A)
283 PetscPolymorphicSubroutine(MatCreateMPIBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,PetscInt nnz,Mat *A),(comm,bs,m,n,M,N,nz,PETSC_NULL,nnz,PETSC_NULL,A))
284 PetscPolymorphicSubroutine(MatCreateMPIBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,const PetscInt nnz[],const PetscInt onz[],Mat *A),(comm,bs,m,n,M,N,0,nnz,0,onz,A))
285 PetscPolymorphicSubroutine(MatCreateMPIBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,Mat *A),(comm,bs,m,n,M,N,0,PETSC_NULL,0,PETSC_NULL,A))
286 PetscPolymorphicFunction(MatCreateMPIBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,const PetscInt nnz[],PetscInt onz,const PetscInt onnz[]),(PETSC_COMM_WORLD,bs,m,n,M,N,nz,nnz,onz,onnz,&A),Mat,A)
287 PetscPolymorphicFunction(MatCreateMPIBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,PetscInt nnz),(PETSC_COMM_WORLD,bs,m,n,M,N,nz,PETSC_NULL,nnz,PETSC_NULL,&A),Mat,A)
288 PetscPolymorphicFunction(MatCreateMPIBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,const PetscInt nnz[],const PetscInt onz[]),(PETSC_COMM_WORLD,bs,m,n,M,N,0,nnz,0,onz,&A),Mat,A)
289 PetscPolymorphicFunction(MatCreateMPIBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N),(PETSC_COMM_WORLD,bs,m,n,M,N,0,PETSC_NULL,0,PETSC_NULL,&A),Mat,A)
290 PetscPolymorphicSubroutine(MatCreateMPIBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,PetscInt nnz,Mat *A),(PETSC_COMM_WORLD,bs,m,n,M,N,nz,PETSC_NULL,nnz,PETSC_NULL,A))
291 PetscPolymorphicSubroutine(MatCreateMPIBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,const PetscInt nnz[],const PetscInt onz[],Mat *A),(PETSC_COMM_WORLD,bs,m,n,M,N,0,nnz,0,onz,A))
292 PetscPolymorphicSubroutine(MatCreateMPIBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,Mat *A),(PETSC_COMM_WORLD,bs,m,n,M,N,0,PETSC_NULL,0,PETSC_NULL,A))
293 extern PetscErrorCode  MatCreateMPIBAIJWithArrays(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt,const PetscInt[],const PetscInt[],const PetscScalar[],Mat*);
294 
295 extern PetscErrorCode  MatCreateMPIAdj(MPI_Comm,PetscInt,PetscInt,PetscInt[],PetscInt[],PetscInt[],Mat*);
296 extern PetscErrorCode  MatCreateSeqSBAIJ(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,const PetscInt[],Mat*);
297 PetscPolymorphicFunction(MatCreateSeqSBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt nz,const PetscInt nnz[]),(PETSC_COMM_SELF,bs,m,n,nz,nnz,&A),Mat,A)
298 PetscPolymorphicFunction(MatCreateSeqSBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt nz),(PETSC_COMM_SELF,bs,m,n,nz,PETSC_NULL,&A),Mat,A)
299 PetscPolymorphicFunction(MatCreateSeqSBAIJ,(PetscInt bs,PetscInt m,PetscInt n,const PetscInt nnz[]),(PETSC_COMM_SELF,bs,m,n,0,nnz,&A),Mat,A)
300 PetscPolymorphicFunction(MatCreateSeqSBAIJ,(PetscInt bs,PetscInt m,PetscInt n),(PETSC_COMM_SELF,bs,m,n,0,PETSC_NULL,&A),Mat,A)
301 PetscPolymorphicSubroutine(MatCreateSeqSBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt nz,Mat *A),(PETSC_COMM_SELF,bs,m,n,nz,PETSC_NULL,A))
302 PetscPolymorphicSubroutine(MatCreateSeqSBAIJ,(PetscInt bs,PetscInt m,PetscInt n,const PetscInt nnz[],Mat *A),(PETSC_COMM_SELF,bs,m,n,0,nnz,A))
303 PetscPolymorphicSubroutine(MatCreateSeqSBAIJ,(PetscInt bs,PetscInt m,PetscInt n,Mat *A),(PETSC_COMM_SELF,bs,m,n,0,PETSC_NULL,A))
304 
305 extern PetscErrorCode  MatCreateMPISBAIJ(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt,const PetscInt[],PetscInt,const PetscInt[],Mat*);
306 PetscPolymorphicFunction(MatCreateMPISBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,const PetscInt nnz[],PetscInt onz,const PetscInt onnz[]),(comm,bs,m,n,M,N,nz,nnz,onz,onnz,&A),Mat,A)
307 PetscPolymorphicFunction(MatCreateMPISBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,PetscInt nnz),(comm,bs,m,n,M,N,nz,PETSC_NULL,nnz,PETSC_NULL,&A),Mat,A)
308 PetscPolymorphicFunction(MatCreateMPISBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,const PetscInt nnz[],const PetscInt onz[]),(comm,bs,m,n,M,N,0,nnz,0,onz,&A),Mat,A)
309 PetscPolymorphicFunction(MatCreateMPISBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N),(comm,bs,m,n,M,N,0,PETSC_NULL,0,PETSC_NULL,&A),Mat,A)
310 PetscPolymorphicSubroutine(MatCreateMPISBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,PetscInt nnz,Mat *A),(comm,bs,m,n,M,N,nz,PETSC_NULL,nnz,PETSC_NULL,A))
311 PetscPolymorphicSubroutine(MatCreateMPISBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,const PetscInt nnz[],const PetscInt onz[],Mat *A),(comm,bs,m,n,M,N,0,nnz,0,onz,A))
312 PetscPolymorphicSubroutine(MatCreateMPISBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,Mat *A),(comm,bs,m,n,M,N,0,PETSC_NULL,0,PETSC_NULL,A))
313 PetscPolymorphicFunction(MatCreateMPISBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,const PetscInt nnz[],PetscInt onz,const PetscInt onnz[]),(PETSC_COMM_WORLD,bs,m,n,M,N,nz,nnz,onz,onnz,&A),Mat,A)
314 PetscPolymorphicFunction(MatCreateMPISBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,PetscInt nnz),(PETSC_COMM_WORLD,bs,m,n,M,N,nz,PETSC_NULL,nnz,PETSC_NULL,&A),Mat,A)
315 PetscPolymorphicFunction(MatCreateMPISBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,const PetscInt nnz[],const PetscInt onz[]),(PETSC_COMM_WORLD,bs,m,n,M,N,0,nnz,0,onz,&A),Mat,A)
316 PetscPolymorphicFunction(MatCreateMPISBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N),(PETSC_COMM_WORLD,bs,m,n,M,N,0,PETSC_NULL,0,PETSC_NULL,&A),Mat,A)
317 PetscPolymorphicSubroutine(MatCreateMPISBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,PetscInt nnz,Mat *A),(PETSC_COMM_WORLD,bs,m,n,M,N,nz,PETSC_NULL,nnz,PETSC_NULL,A))
318 PetscPolymorphicSubroutine(MatCreateMPISBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,const PetscInt nnz[],const PetscInt onz[],Mat *A),(PETSC_COMM_WORLD,bs,m,n,M,N,0,nnz,0,onz,A))
319 PetscPolymorphicSubroutine(MatCreateMPISBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,Mat *A),(PETSC_COMM_WORLD,bs,m,n,M,N,0,PETSC_NULL,0,PETSC_NULL,A))
320 extern PetscErrorCode  MatCreateMPISBAIJWithArrays(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt,const PetscInt[],const PetscInt[],const PetscScalar[],Mat *);
321 extern PetscErrorCode  MatMPISBAIJSetPreallocationCSR(Mat,PetscInt,const PetscInt[],const PetscInt[],const PetscScalar[]);
322 
323 extern PetscErrorCode  MatCreateShell(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,void *,Mat*);
324 PetscPolymorphicFunction(MatCreateShell,(MPI_Comm comm,PetscInt m,PetscInt n,PetscInt M,PetscInt N,void *ctx),(comm,m,n,M,N,ctx,&A),Mat,A)
325 PetscPolymorphicFunction(MatCreateShell,(PetscInt m,PetscInt n,PetscInt M,PetscInt N,void *ctx),(PETSC_COMM_WORLD,m,n,M,N,ctx,&A),Mat,A)
326 extern PetscErrorCode  MatCreateAdic(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt,void (*)(void),Mat*);
327 extern PetscErrorCode  MatCreateNormal(Mat,Mat*);
328 PetscPolymorphicFunction(MatCreateNormal,(Mat mat),(mat,&A),Mat,A)
329 extern PetscErrorCode  MatCreateLRC(Mat,Mat,Mat,Mat*);
330 extern PetscErrorCode  MatCreateIS(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,ISLocalToGlobalMapping,Mat*);
331 extern PetscErrorCode  MatCreateSeqAIJCRL(MPI_Comm,PetscInt,PetscInt,PetscInt,const PetscInt[],Mat*);
332 extern PetscErrorCode  MatCreateMPIAIJCRL(MPI_Comm,PetscInt,PetscInt,PetscInt,const PetscInt[],PetscInt,const PetscInt[],Mat*);
333 
334 extern PetscErrorCode  MatCreateSeqBSTRM(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,const PetscInt[],Mat*);
335 extern PetscErrorCode  MatCreateMPIBSTRM(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt,const PetscInt[],PetscInt,const PetscInt[],Mat*);
336 extern PetscErrorCode  MatCreateSeqSBSTRM(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,const PetscInt[],Mat*);
337 extern PetscErrorCode  MatCreateMPISBSTRM(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt,const PetscInt[],PetscInt,const PetscInt[],Mat*);
338 
339 extern PetscErrorCode  MatCreateScatter(MPI_Comm,VecScatter,Mat*);
340 extern PetscErrorCode  MatScatterSetVecScatter(Mat,VecScatter);
341 extern PetscErrorCode  MatScatterGetVecScatter(Mat,VecScatter*);
342 extern PetscErrorCode  MatCreateBlockMat(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt*,Mat*);
343 extern PetscErrorCode  MatCompositeAddMat(Mat,Mat);
344 extern PetscErrorCode  MatCompositeMerge(Mat);
345 extern PetscErrorCode  MatCreateComposite(MPI_Comm,PetscInt,const Mat*,Mat*);
346 typedef enum {MAT_COMPOSITE_ADDITIVE,MAT_COMPOSITE_MULTIPLICATIVE} MatCompositeType;
347 extern PetscErrorCode  MatCompositeSetType(Mat,MatCompositeType);
348 
349 extern PetscErrorCode  MatCreateFFT(MPI_Comm,PetscInt,const PetscInt[],const MatType,Mat*);
350 extern PetscErrorCode  MatCreateSeqCUFFT(MPI_Comm,PetscInt,const PetscInt[],Mat*);
351 
352 extern PetscErrorCode  MatCreateTranspose(Mat,Mat*);
353 extern PetscErrorCode  MatCreateSubMatrix(Mat,IS,IS,Mat*);
354 extern PetscErrorCode  MatSubMatrixUpdate(Mat,Mat,IS,IS);
355 extern PetscErrorCode  MatCreateLocalRef(Mat,IS,IS,Mat*);
356 
357 extern PetscErrorCode  MatPythonSetType(Mat,const char[]);
358 
359 extern PetscErrorCode  MatSetUp(Mat);
360 extern PetscErrorCode  MatDestroy(Mat*);
361 
362 extern PetscErrorCode  MatConjugate(Mat);
363 extern PetscErrorCode  MatRealPart(Mat);
364 extern PetscErrorCode  MatImaginaryPart(Mat);
365 extern PetscErrorCode  MatGetDiagonalBlock(Mat,Mat*);
366 extern PetscErrorCode  MatGetTrace(Mat,PetscScalar*);
367 extern PetscErrorCode  MatInvertBlockDiagonal(Mat,PetscScalar **);
368 
369 /* ------------------------------------------------------------*/
370 extern PetscErrorCode  MatSetValues(Mat,PetscInt,const PetscInt[],PetscInt,const PetscInt[],const PetscScalar[],InsertMode);
371 extern PetscErrorCode  MatSetValuesBlocked(Mat,PetscInt,const PetscInt[],PetscInt,const PetscInt[],const PetscScalar[],InsertMode);
372 extern PetscErrorCode  MatSetValuesRow(Mat,PetscInt,const PetscScalar[]);
373 extern PetscErrorCode  MatSetValuesRowLocal(Mat,PetscInt,const PetscScalar[]);
374 extern PetscErrorCode  MatSetValuesBatch(Mat,PetscInt,PetscInt,PetscInt[],const PetscScalar[]);
375 
376 /*S
377      MatStencil - Data structure (C struct) for storing information about a single row or
378         column of a matrix as index on an associated grid.
379 
380    Level: beginner
381 
382   Concepts: matrix; linear operator
383 
384 .seealso:  MatSetValuesStencil(), MatSetStencil(), MatSetValuesBlockStencil()
385 S*/
386 typedef struct {
387   PetscInt k,j,i,c;
388 } MatStencil;
389 
390 extern PetscErrorCode  MatSetValuesStencil(Mat,PetscInt,const MatStencil[],PetscInt,const MatStencil[],const PetscScalar[],InsertMode);
391 extern PetscErrorCode  MatSetValuesBlockedStencil(Mat,PetscInt,const MatStencil[],PetscInt,const MatStencil[],const PetscScalar[],InsertMode);
392 extern PetscErrorCode  MatSetStencil(Mat,PetscInt,const PetscInt[],const PetscInt[],PetscInt);
393 
394 extern PetscErrorCode  MatSetColoring(Mat,ISColoring);
395 extern PetscErrorCode  MatSetValuesAdic(Mat,void*);
396 extern PetscErrorCode  MatSetValuesAdifor(Mat,PetscInt,void*);
397 
398 /*E
399     MatAssemblyType - Indicates if the matrix is now to be used, or if you plan
400      to continue to add values to it
401 
402     Level: beginner
403 
404 .seealso: MatAssemblyBegin(), MatAssemblyEnd()
405 E*/
406 typedef enum {MAT_FLUSH_ASSEMBLY=1,MAT_FINAL_ASSEMBLY=0} MatAssemblyType;
407 extern PetscErrorCode  MatAssemblyBegin(Mat,MatAssemblyType);
408 extern PetscErrorCode  MatAssemblyEnd(Mat,MatAssemblyType);
409 extern PetscErrorCode  MatAssembled(Mat,PetscBool *);
410 
411 
412 
413 /*E
414     MatOption - Options that may be set for a matrix and its behavior or storage
415 
416     Level: beginner
417 
418    Any additions/changes here MUST also be made in include/finclude/petscmat.h
419 
420 .seealso: MatSetOption()
421 E*/
422 typedef enum {MAT_ROW_ORIENTED,MAT_NEW_NONZERO_LOCATIONS,
423               MAT_SYMMETRIC,
424               MAT_STRUCTURALLY_SYMMETRIC,
425               MAT_NEW_DIAGONALS,MAT_IGNORE_OFF_PROC_ENTRIES,
426               MAT_NEW_NONZERO_LOCATION_ERR,
427               MAT_NEW_NONZERO_ALLOCATION_ERR,MAT_USE_HASH_TABLE,
428               MAT_KEEP_NONZERO_PATTERN,MAT_IGNORE_ZERO_ENTRIES,
429               MAT_USE_INODES,
430               MAT_HERMITIAN,
431               MAT_SYMMETRY_ETERNAL,
432               MAT_CHECK_COMPRESSED_ROW,
433               MAT_IGNORE_LOWER_TRIANGULAR,MAT_ERROR_LOWER_TRIANGULAR,
434               MAT_GETROW_UPPERTRIANGULAR,MAT_UNUSED_NONZERO_LOCATION_ERR,
435               MAT_SPD,MAT_NO_OFF_PROC_ENTRIES,MAT_NO_OFF_PROC_ZERO_ROWS,
436               NUM_MAT_OPTIONS} MatOption;
437 extern const char *MatOptions[];
438 extern PetscErrorCode  MatSetOption(Mat,MatOption,PetscBool );
439 extern PetscErrorCode  MatGetType(Mat,const MatType*);
440 PetscPolymorphicFunction(MatGetType,(Mat mat),(mat,&t),const MatType,t)
441 
442 extern PetscErrorCode  MatGetValues(Mat,PetscInt,const PetscInt[],PetscInt,const PetscInt[],PetscScalar[]);
443 extern PetscErrorCode  MatGetRow(Mat,PetscInt,PetscInt *,const PetscInt *[],const PetscScalar*[]);
444 extern PetscErrorCode  MatRestoreRow(Mat,PetscInt,PetscInt *,const PetscInt *[],const PetscScalar*[]);
445 extern PetscErrorCode  MatGetRowUpperTriangular(Mat);
446 extern PetscErrorCode  MatRestoreRowUpperTriangular(Mat);
447 extern PetscErrorCode  MatGetColumn(Mat,PetscInt,PetscInt *,const PetscInt *[],const PetscScalar*[]);
448 extern PetscErrorCode  MatRestoreColumn(Mat,PetscInt,PetscInt *,const PetscInt *[],const PetscScalar*[]);
449 extern PetscErrorCode  MatGetColumnVector(Mat,Vec,PetscInt);
450 extern PetscErrorCode  MatGetArray(Mat,PetscScalar *[]);
451 PetscPolymorphicFunction(MatGetArray,(Mat mat),(mat,&a),PetscScalar*,a)
452 extern PetscErrorCode  MatRestoreArray(Mat,PetscScalar *[]);
453 extern PetscErrorCode  MatGetBlockSize(Mat,PetscInt *);
454 PetscPolymorphicFunction(MatGetBlockSize,(Mat mat),(mat,&a),PetscInt,a)
455 extern PetscErrorCode  MatSetBlockSize(Mat,PetscInt);
456 
457 
458 extern PetscErrorCode  MatMult(Mat,Vec,Vec);
459 extern PetscErrorCode  MatMultDiagonalBlock(Mat,Vec,Vec);
460 extern PetscErrorCode  MatMultAdd(Mat,Vec,Vec,Vec);
461 PetscPolymorphicSubroutine(MatMultAdd,(Mat A,Vec x,Vec y),(A,x,y,y))
462 extern PetscErrorCode  MatMultTranspose(Mat,Vec,Vec);
463 extern PetscErrorCode  MatMultHermitianTranspose(Mat,Vec,Vec);
464 extern PetscErrorCode  MatIsTranspose(Mat,Mat,PetscReal,PetscBool *);
465 PetscPolymorphicFunction(MatIsTranspose,(Mat A,Mat B,PetscReal tol),(A,B,tol,&t),PetscBool ,t)
466 PetscPolymorphicFunction(MatIsTranspose,(Mat A,Mat B),(A,B,0,&t),PetscBool ,t)
467 extern PetscErrorCode  MatIsHermitianTranspose(Mat,Mat,PetscReal,PetscBool *);
468 extern PetscErrorCode  MatMultTransposeAdd(Mat,Vec,Vec,Vec);
469 extern PetscErrorCode  MatMultHermitianTransposeAdd(Mat,Vec,Vec,Vec);
470 PetscPolymorphicSubroutine(MatMultTransposeAdd,(Mat A,Vec x,Vec y),(A,x,y,y))
471 extern PetscErrorCode  MatMultConstrained(Mat,Vec,Vec);
472 extern PetscErrorCode  MatMultTransposeConstrained(Mat,Vec,Vec);
473 extern PetscErrorCode  MatMatSolve(Mat,Mat,Mat);
474 
475 /*E
476     MatDuplicateOption - Indicates if a duplicated sparse matrix should have
477   its numerical values copied over or just its nonzero structure.
478 
479     Level: beginner
480 
481    Any additions/changes here MUST also be made in include/finclude/petscmat.h
482 
483 $   MAT_SHARE_NONZERO_PATTERN - the i and j arrays in the new matrix will be shared with the original matrix
484 $                               this also triggers the MAT_DO_NOT_COPY_VALUES option. This is used when you
485 $                               have several matrices with the same nonzero pattern.
486 
487 .seealso: MatDuplicate()
488 E*/
489 typedef enum {MAT_DO_NOT_COPY_VALUES,MAT_COPY_VALUES,MAT_SHARE_NONZERO_PATTERN} MatDuplicateOption;
490 
491 extern PetscErrorCode  MatConvert(Mat,const MatType,MatReuse,Mat*);
492 PetscPolymorphicFunction(MatConvert,(Mat A,const MatType t),(A,t,MAT_INITIAL_MATRIX,&a),Mat,a)
493 extern PetscErrorCode  MatDuplicate(Mat,MatDuplicateOption,Mat*);
494 PetscPolymorphicFunction(MatDuplicate,(Mat A,MatDuplicateOption o),(A,o,&a),Mat,a)
495 PetscPolymorphicFunction(MatDuplicate,(Mat A),(A,MAT_COPY_VALUES,&a),Mat,a)
496 
497 
498 extern PetscErrorCode  MatCopy(Mat,Mat,MatStructure);
499 extern PetscErrorCode  MatView(Mat,PetscViewer);
500 extern PetscErrorCode  MatIsSymmetric(Mat,PetscReal,PetscBool *);
501 PetscPolymorphicFunction(MatIsSymmetric,(Mat A,PetscReal tol),(A,tol,&t),PetscBool ,t)
502 PetscPolymorphicFunction(MatIsSymmetric,(Mat A),(A,0,&t),PetscBool ,t)
503 extern PetscErrorCode  MatIsStructurallySymmetric(Mat,PetscBool *);
504 PetscPolymorphicFunction(MatIsStructurallySymmetric,(Mat A),(A,&t),PetscBool ,t)
505 extern PetscErrorCode  MatIsHermitian(Mat,PetscReal,PetscBool *);
506 PetscPolymorphicFunction(MatIsHermitian,(Mat A),(A,0,&t),PetscBool ,t)
507 extern PetscErrorCode  MatIsSymmetricKnown(Mat,PetscBool *,PetscBool *);
508 extern PetscErrorCode  MatIsHermitianKnown(Mat,PetscBool *,PetscBool *);
509 extern PetscErrorCode  MatMissingDiagonal(Mat,PetscBool  *,PetscInt *);
510 extern PetscErrorCode  MatLoad(Mat, PetscViewer);
511 
512 extern PetscErrorCode  MatGetRowIJ(Mat,PetscInt,PetscBool ,PetscBool ,PetscInt*,PetscInt *[],PetscInt *[],PetscBool  *);
513 extern PetscErrorCode  MatRestoreRowIJ(Mat,PetscInt,PetscBool ,PetscBool ,PetscInt *,PetscInt *[],PetscInt *[],PetscBool  *);
514 extern PetscErrorCode  MatGetColumnIJ(Mat,PetscInt,PetscBool ,PetscBool ,PetscInt*,PetscInt *[],PetscInt *[],PetscBool  *);
515 extern PetscErrorCode  MatRestoreColumnIJ(Mat,PetscInt,PetscBool ,PetscBool ,PetscInt *,PetscInt *[],PetscInt *[],PetscBool  *);
516 
517 /*S
518      MatInfo - Context of matrix information, used with MatGetInfo()
519 
520    In Fortran this is simply a double precision array of dimension MAT_INFO_SIZE
521 
522    Level: intermediate
523 
524   Concepts: matrix^nonzero information
525 
526 .seealso:  MatGetInfo(), MatInfoType
527 S*/
528 typedef struct {
529   PetscLogDouble block_size;                         /* block size */
530   PetscLogDouble nz_allocated,nz_used,nz_unneeded;   /* number of nonzeros */
531   PetscLogDouble memory;                             /* memory allocated */
532   PetscLogDouble assemblies;                         /* number of matrix assemblies called */
533   PetscLogDouble mallocs;                            /* number of mallocs during MatSetValues() */
534   PetscLogDouble fill_ratio_given,fill_ratio_needed; /* fill ratio for LU/ILU */
535   PetscLogDouble factor_mallocs;                     /* number of mallocs during factorization */
536 } MatInfo;
537 
538 /*E
539     MatInfoType - Indicates if you want information about the local part of the matrix,
540      the entire parallel matrix or the maximum over all the local parts.
541 
542     Level: beginner
543 
544    Any additions/changes here MUST also be made in include/finclude/petscmat.h
545 
546 .seealso: MatGetInfo(), MatInfo
547 E*/
548 typedef enum {MAT_LOCAL=1,MAT_GLOBAL_MAX=2,MAT_GLOBAL_SUM=3} MatInfoType;
549 extern PetscErrorCode  MatGetInfo(Mat,MatInfoType,MatInfo*);
550 extern PetscErrorCode  MatGetDiagonal(Mat,Vec);
551 extern PetscErrorCode  MatGetRowMax(Mat,Vec,PetscInt[]);
552 extern PetscErrorCode  MatGetRowMin(Mat,Vec,PetscInt[]);
553 extern PetscErrorCode  MatGetRowMaxAbs(Mat,Vec,PetscInt[]);
554 extern PetscErrorCode  MatGetRowMinAbs(Mat,Vec,PetscInt[]);
555 extern PetscErrorCode  MatGetRowSum(Mat,Vec);
556 extern PetscErrorCode  MatTranspose(Mat,MatReuse,Mat*);
557 PetscPolymorphicFunction(MatTranspose,(Mat A),(A,MAT_INITIAL_MATRIX,&t),Mat,t)
558 extern PetscErrorCode  MatHermitianTranspose(Mat,MatReuse,Mat*);
559 extern PetscErrorCode  MatPermute(Mat,IS,IS,Mat *);
560 PetscPolymorphicFunction(MatPermute,(Mat A,IS is1,IS is2),(A,is1,is2,&t),Mat,t)
561 extern PetscErrorCode  MatDiagonalScale(Mat,Vec,Vec);
562 extern PetscErrorCode  MatDiagonalSet(Mat,Vec,InsertMode);
563 extern PetscErrorCode  MatEqual(Mat,Mat,PetscBool *);
564 PetscPolymorphicFunction(MatEqual,(Mat A,Mat B),(A,B,&t),PetscBool ,t)
565 extern PetscErrorCode  MatMultEqual(Mat,Mat,PetscInt,PetscBool *);
566 extern PetscErrorCode  MatMultAddEqual(Mat,Mat,PetscInt,PetscBool *);
567 extern PetscErrorCode  MatMultTransposeEqual(Mat,Mat,PetscInt,PetscBool *);
568 extern PetscErrorCode  MatMultTransposeAddEqual(Mat,Mat,PetscInt,PetscBool *);
569 
570 extern PetscErrorCode  MatNorm(Mat,NormType,PetscReal *);
571 PetscPolymorphicFunction(MatNorm,(Mat A,NormType t),(A,t,&n),PetscReal,n)
572 extern PetscErrorCode  MatGetColumnNorms(Mat,NormType,PetscReal *);
573 extern PetscErrorCode  MatZeroEntries(Mat);
574 extern PetscErrorCode  MatZeroRows(Mat,PetscInt,const PetscInt [],PetscScalar,Vec,Vec);
575 extern PetscErrorCode  MatZeroRowsIS(Mat,IS,PetscScalar,Vec,Vec);
576 extern PetscErrorCode  MatZeroRowsStencil(Mat,PetscInt,const MatStencil [],PetscScalar,Vec,Vec);
577 extern PetscErrorCode  MatZeroRowsColumns(Mat,PetscInt,const PetscInt [],PetscScalar,Vec,Vec);
578 extern PetscErrorCode  MatZeroRowsColumnsIS(Mat,IS,PetscScalar,Vec,Vec);
579 
580 extern PetscErrorCode  MatUseScaledForm(Mat,PetscBool );
581 extern PetscErrorCode  MatScaleSystem(Mat,Vec,Vec);
582 extern PetscErrorCode  MatUnScaleSystem(Mat,Vec,Vec);
583 
584 extern PetscErrorCode  MatGetSize(Mat,PetscInt*,PetscInt*);
585 extern PetscErrorCode  MatGetLocalSize(Mat,PetscInt*,PetscInt*);
586 extern PetscErrorCode  MatGetOwnershipRange(Mat,PetscInt*,PetscInt*);
587 extern PetscErrorCode  MatGetOwnershipRanges(Mat,const PetscInt**);
588 extern PetscErrorCode  MatGetOwnershipRangeColumn(Mat,PetscInt*,PetscInt*);
589 extern PetscErrorCode  MatGetOwnershipRangesColumn(Mat,const PetscInt**);
590 
591 extern PetscErrorCode  MatGetSubMatrices(Mat,PetscInt,const IS[],const IS[],MatReuse,Mat *[]);
592 extern PetscErrorCode  MatGetSubMatricesParallel(Mat,PetscInt,const IS[],const IS[],MatReuse,Mat *[]);
593 extern PetscErrorCode  MatDestroyMatrices(PetscInt,Mat *[]);
594 extern PetscErrorCode  MatGetSubMatrix(Mat,IS,IS,MatReuse,Mat *);
595 extern PetscErrorCode  MatGetLocalSubMatrix(Mat,IS,IS,Mat*);
596 extern PetscErrorCode  MatRestoreLocalSubMatrix(Mat,IS,IS,Mat*);
597 extern PetscErrorCode  MatGetSeqNonzeroStructure(Mat,Mat*);
598 extern PetscErrorCode  MatDestroySeqNonzeroStructure(Mat*);
599 
600 extern PetscErrorCode  MatMerge(MPI_Comm,Mat,PetscInt,MatReuse,Mat*);
601 extern PetscErrorCode  MatMerge_SeqsToMPI(MPI_Comm,Mat,PetscInt,PetscInt,MatReuse,Mat*);
602 extern PetscErrorCode  MatMerge_SeqsToMPISymbolic(MPI_Comm,Mat,PetscInt,PetscInt,Mat*);
603 extern PetscErrorCode  MatMerge_SeqsToMPINumeric(Mat,Mat);
604 extern PetscErrorCode  MatMPIAIJGetLocalMat(Mat,MatReuse,Mat*);
605 extern PetscErrorCode  MatMPIAIJGetLocalMatCondensed(Mat,MatReuse,IS*,IS*,Mat*);
606 extern PetscErrorCode  MatGetBrowsOfAcols(Mat,Mat,MatReuse,IS*,IS*,PetscInt*,Mat*);
607 extern PetscErrorCode  MatGetBrowsOfAoCols(Mat,Mat,MatReuse,PetscInt**,PetscInt**,MatScalar**,Mat*);
608 #if defined (PETSC_USE_CTABLE)
609 extern PetscErrorCode  MatGetCommunicationStructs(Mat, Vec *, PetscTable *, VecScatter *);
610 #else
611 extern PetscErrorCode  MatGetCommunicationStructs(Mat, Vec *, PetscInt *[], VecScatter *);
612 #endif
613 extern PetscErrorCode  MatGetGhosts(Mat, PetscInt *,const PetscInt *[]);
614 
615 extern PetscErrorCode  MatIncreaseOverlap(Mat,PetscInt,IS[],PetscInt);
616 
617 extern PetscErrorCode  MatMatMult(Mat,Mat,MatReuse,PetscReal,Mat*);
618 extern PetscErrorCode  MatMatMultSymbolic(Mat,Mat,PetscReal,Mat*);
619 extern PetscErrorCode  MatMatMultNumeric(Mat,Mat,Mat);
620 
621 extern PetscErrorCode  MatPtAP(Mat,Mat,MatReuse,PetscReal,Mat*);
622 extern PetscErrorCode  MatPtAPSymbolic(Mat,Mat,PetscReal,Mat*);
623 extern PetscErrorCode  MatPtAPNumeric(Mat,Mat,Mat);
624 
625 extern PetscErrorCode  MatMatMultTranspose(Mat,Mat,MatReuse,PetscReal,Mat*);
626 extern PetscErrorCode  MatMatMultTransposeSymbolic(Mat,Mat,PetscReal,Mat*);
627 extern PetscErrorCode  MatMatMultTransposeNumeric(Mat,Mat,Mat);
628 
629 extern PetscErrorCode  MatAXPY(Mat,PetscScalar,Mat,MatStructure);
630 extern PetscErrorCode  MatAYPX(Mat,PetscScalar,Mat,MatStructure);
631 
632 extern PetscErrorCode  MatScale(Mat,PetscScalar);
633 extern PetscErrorCode  MatShift(Mat,PetscScalar);
634 
635 extern PetscErrorCode  MatSetLocalToGlobalMapping(Mat,ISLocalToGlobalMapping,ISLocalToGlobalMapping);
636 extern PetscErrorCode  MatSetLocalToGlobalMappingBlock(Mat,ISLocalToGlobalMapping,ISLocalToGlobalMapping);
637 extern PetscErrorCode  MatGetLocalToGlobalMapping(Mat,ISLocalToGlobalMapping*,ISLocalToGlobalMapping*);
638 extern PetscErrorCode  MatGetLocalToGlobalMappingBlock(Mat,ISLocalToGlobalMapping*,ISLocalToGlobalMapping*);
639 extern PetscErrorCode  MatZeroRowsLocal(Mat,PetscInt,const PetscInt [],PetscScalar,Vec,Vec);
640 extern PetscErrorCode  MatZeroRowsLocalIS(Mat,IS,PetscScalar,Vec,Vec);
641 extern PetscErrorCode  MatZeroRowsColumnsLocal(Mat,PetscInt,const PetscInt [],PetscScalar,Vec,Vec);
642 extern PetscErrorCode  MatZeroRowsColumnsLocalIS(Mat,IS,PetscScalar,Vec,Vec);
643 extern PetscErrorCode  MatSetValuesLocal(Mat,PetscInt,const PetscInt[],PetscInt,const PetscInt[],const PetscScalar[],InsertMode);
644 extern PetscErrorCode  MatSetValuesBlockedLocal(Mat,PetscInt,const PetscInt[],PetscInt,const PetscInt[],const PetscScalar[],InsertMode);
645 
646 extern PetscErrorCode  MatStashSetInitialSize(Mat,PetscInt,PetscInt);
647 extern PetscErrorCode  MatStashGetInfo(Mat,PetscInt*,PetscInt*,PetscInt*,PetscInt*);
648 
649 extern PetscErrorCode  MatInterpolate(Mat,Vec,Vec);
650 extern PetscErrorCode  MatInterpolateAdd(Mat,Vec,Vec,Vec);
651 PetscPolymorphicSubroutine(MatInterpolateAdd,(Mat A,Vec x,Vec y),(A,x,y,y))
652 extern PetscErrorCode  MatRestrict(Mat,Vec,Vec);
653 extern PetscErrorCode  MatGetVecs(Mat,Vec*,Vec*);
654 extern PetscErrorCode  MatGetRedundantMatrix(Mat,PetscInt,MPI_Comm,PetscInt,MatReuse,Mat*);
655 extern PetscErrorCode  MatGetMultiProcBlock(Mat,MPI_Comm,Mat*);
656 extern PetscErrorCode  MatFindZeroDiagonals(Mat,IS*);
657 
658 /*MC
659    MatSetValue - Set a single entry into a matrix.
660 
661    Not collective
662 
663    Input Parameters:
664 +  m - the matrix
665 .  row - the row location of the entry
666 .  col - the column location of the entry
667 .  value - the value to insert
668 -  mode - either INSERT_VALUES or ADD_VALUES
669 
670    Notes:
671    For efficiency one should use MatSetValues() and set several or many
672    values simultaneously if possible.
673 
674    Level: beginner
675 
676 .seealso: MatSetValues(), MatSetValueLocal()
677 M*/
678 PETSC_STATIC_INLINE PetscErrorCode MatSetValue(Mat v,PetscInt i,PetscInt j,PetscScalar va,InsertMode mode) {return MatSetValues(v,1,&i,1,&j,&va,mode);}
679 
680 PETSC_STATIC_INLINE PetscErrorCode MatGetValue(Mat v,PetscInt i,PetscInt j,PetscScalar *va) {return MatGetValues(v,1,&i,1,&j,va);}
681 
682 PETSC_STATIC_INLINE PetscErrorCode MatSetValueLocal(Mat v,PetscInt i,PetscInt j,PetscScalar va,InsertMode mode) {return MatSetValuesLocal(v,1,&i,1,&j,&va,mode);}
683 
684 /*MC
685    MatPreallocateInitialize - Begins the block of code that will count the number of nonzeros per
686        row in a matrix providing the data that one can use to correctly preallocate the matrix.
687 
688    Synopsis:
689    PetscErrorCode MatPreallocateInitialize(MPI_Comm comm, PetscInt nrows, PetscInt ncols, PetscInt *dnz, PetscInt *onz)
690 
691    Collective on MPI_Comm
692 
693    Input Parameters:
694 +  comm - the communicator that will share the eventually allocated matrix
695 .  nrows - the number of LOCAL rows in the matrix
696 -  ncols - the number of LOCAL columns in the matrix
697 
698    Output Parameters:
699 +  dnz - the array that will be passed to the matrix preallocation routines
700 -  ozn - the other array passed to the matrix preallocation routines
701 
702 
703    Level: intermediate
704 
705    Notes:
706     See the <A href="../../docs/manual.pdf#nameddest=ch_performance">Hints for Performance Improvment</A> chapter in the users manual for more details.
707 
708    Do not malloc or free dnz and onz, that is handled internally by these routines
709 
710    Use MatPreallocateInitializeSymmetric() for symmetric matrices (MPISBAIJ matrices)
711 
712    This is a MACRO not a function because it has a leading { that is closed by PetscPreallocateFinalize().
713 
714   Concepts: preallocation^Matrix
715 
716 .seealso: MatPreallocateFinalize(), MatPreallocateSet(), MatPreallocateSymmetricSet(), MatPreallocateSetLocal(),
717           MatPreallocateInitializeSymmetric(), MatPreallocateSymmetricSetLocal()
718 M*/
719 #define MatPreallocateInitialize(comm,nrows,ncols,dnz,onz) 0; \
720 { \
721   PetscErrorCode _4_ierr; PetscInt __nrows = (nrows),__ctmp = (ncols),__rstart,__start,__end; \
722   _4_ierr = PetscMalloc2(__nrows,PetscInt,&dnz,__nrows,PetscInt,&onz);CHKERRQ(_4_ierr); \
723   _4_ierr = PetscMemzero(dnz,__nrows*sizeof(PetscInt));CHKERRQ(_4_ierr);\
724   _4_ierr = PetscMemzero(onz,__nrows*sizeof(PetscInt));CHKERRQ(_4_ierr);\
725   _4_ierr = MPI_Scan(&__ctmp,&__end,1,MPIU_INT,MPI_SUM,comm);CHKERRQ(_4_ierr); __start = __end - __ctmp;\
726   _4_ierr = MPI_Scan(&__nrows,&__rstart,1,MPIU_INT,MPI_SUM,comm);CHKERRQ(_4_ierr); __rstart = __rstart - __nrows;
727 
728 /*MC
729    MatPreallocateSymmetricInitialize - Begins the block of code that will count the number of nonzeros per
730        row in a matrix providing the data that one can use to correctly preallocate the matrix.
731 
732    Synopsis:
733    PetscErrorCode MatPreallocateSymmetricInitialize(MPI_Comm comm, PetscInt nrows, PetscInt ncols, PetscInt *dnz, PetscInt *onz)
734 
735    Collective on MPI_Comm
736 
737    Input Parameters:
738 +  comm - the communicator that will share the eventually allocated matrix
739 .  nrows - the number of LOCAL rows in the matrix
740 -  ncols - the number of LOCAL columns in the matrix
741 
742    Output Parameters:
743 +  dnz - the array that will be passed to the matrix preallocation routines
744 -  ozn - the other array passed to the matrix preallocation routines
745 
746 
747    Level: intermediate
748 
749    Notes:
750     See the <A href="../../docs/manual.pdf#nameddest=ch_performance">Hints for Performance Improvment</A> chapter in the users manual for more details.
751 
752    Do not malloc or free dnz and onz, that is handled internally by these routines
753 
754    This is a MACRO not a function because it has a leading { that is closed by PetscPreallocateFinalize().
755 
756   Concepts: preallocation^Matrix
757 
758 .seealso: MatPreallocateFinalize(), MatPreallocateSet(), MatPreallocateSymmetricSet(), MatPreallocateSetLocal(),
759           MatPreallocateInitialize(), MatPreallocateSymmetricSetLocal()
760 M*/
761 #define MatPreallocateSymmetricInitialize(comm,nrows,ncols,dnz,onz) 0; \
762 { \
763   PetscErrorCode _4_ierr; PetscInt __nrows = (nrows),__ctmp = (ncols),__rstart,__end; \
764   _4_ierr = PetscMalloc2(__nrows,PetscInt,&dnz,__nrows,PetscInt,&onz);CHKERRQ(_4_ierr); \
765   _4_ierr = PetscMemzero(dnz,__nrows*sizeof(PetscInt));CHKERRQ(_4_ierr);\
766   _4_ierr = PetscMemzero(onz,__nrows*sizeof(PetscInt));CHKERRQ(_4_ierr);\
767   _4_ierr = MPI_Scan(&__ctmp,&__end,1,MPIU_INT,MPI_SUM,comm);CHKERRQ(_4_ierr);\
768   _4_ierr = MPI_Scan(&__nrows,&__rstart,1,MPIU_INT,MPI_SUM,comm);CHKERRQ(_4_ierr); __rstart = __rstart - __nrows;
769 
770 /*MC
771    MatPreallocateSetLocal - Indicates the locations (rows and columns) in the matrix where nonzeros will be
772        inserted using a local number of the rows and columns
773 
774    Synopsis:
775    PetscErrorCode MatPreallocateSetLocal(ISLocalToGlobalMappping map,PetscInt nrows, PetscInt *rows,PetscInt ncols, PetscInt *cols,PetscInt *dnz, PetscInt *onz)
776 
777    Not Collective
778 
779    Input Parameters:
780 +  map - the row mapping from local numbering to global numbering
781 .  nrows - the number of rows indicated
782 .  rows - the indices of the rows
783 .  cmap - the column mapping from local to global numbering
784 .  ncols - the number of columns in the matrix
785 .  cols - the columns indicated
786 .  dnz - the array that will be passed to the matrix preallocation routines
787 -  ozn - the other array passed to the matrix preallocation routines
788 
789 
790    Level: intermediate
791 
792    Notes:
793     See the <A href="../../docs/manual.pdf#nameddest=ch_performance">Hints for Performance Improvment</A> chapter in the users manual for more details.
794 
795    Do not malloc or free dnz and onz, that is handled internally by these routines
796 
797   Concepts: preallocation^Matrix
798 
799 .seealso: MatPreallocateFinalize(), MatPreallocateSet(), MatPreallocateSymmetricSet(), MatPreallocateInitialize(),
800           MatPreallocateInitialize(), MatPreallocateSymmetricSetLocal()
801 M*/
802 #define MatPreallocateSetLocal(rmap,nrows,rows,cmap,ncols,cols,dnz,onz) 0; \
803 {\
804   PetscInt __l;\
805   _4_ierr = ISLocalToGlobalMappingApply(rmap,nrows,rows,rows);CHKERRQ(_4_ierr);\
806   _4_ierr = ISLocalToGlobalMappingApply(cmap,ncols,cols,cols);CHKERRQ(_4_ierr);\
807   for (__l=0;__l<nrows;__l++) {\
808     _4_ierr = MatPreallocateSet((rows)[__l],ncols,cols,dnz,onz);CHKERRQ(_4_ierr);\
809   }\
810 }
811 
812 /*MC
813    MatPreallocateSymmetricSetLocal - Indicates the locations (rows and columns) in the matrix where nonzeros will be
814        inserted using a local number of the rows and columns
815 
816    Synopsis:
817    PetscErrorCode MatPreallocateSymmetricSetLocal(ISLocalToGlobalMappping map,PetscInt nrows, PetscInt *rows,PetscInt ncols, PetscInt *cols,PetscInt *dnz, PetscInt *onz)
818 
819    Not Collective
820 
821    Input Parameters:
822 +  map - the mapping between local numbering and global numbering
823 .  nrows - the number of rows indicated
824 .  rows - the indices of the rows
825 .  ncols - the number of columns in the matrix
826 .  cols - the columns indicated
827 .  dnz - the array that will be passed to the matrix preallocation routines
828 -  ozn - the other array passed to the matrix preallocation routines
829 
830 
831    Level: intermediate
832 
833    Notes:
834     See the <A href="../../docs/manual.pdf#nameddest=ch_performance">Hints for Performance Improvment</A> chapter in the users manual for more details.
835 
836    Do not malloc or free dnz and onz that is handled internally by these routines
837 
838   Concepts: preallocation^Matrix
839 
840 .seealso: MatPreallocateFinalize(), MatPreallocateSet(), MatPreallocateSymmetricSet(), MatPreallocateInitialize(),
841           MatPreallocateInitialize(), MatPreallocateSymmetricSetLocal(), MatPreallocateSetLocal()
842 M*/
843 #define MatPreallocateSymmetricSetLocal(map,nrows,rows,ncols,cols,dnz,onz) 0;\
844 {\
845   PetscInt __l;\
846   _4_ierr = ISLocalToGlobalMappingApply(map,nrows,rows,rows);CHKERRQ(_4_ierr);\
847   _4_ierr = ISLocalToGlobalMappingApply(map,ncols,cols,cols);CHKERRQ(_4_ierr);\
848   for (__l=0;__l<nrows;__l++) {\
849     _4_ierr = MatPreallocateSymmetricSet((rows)[__l],ncols,cols,dnz,onz);CHKERRQ(_4_ierr);\
850   }\
851 }
852 
853 /*MC
854    MatPreallocateSet - Indicates the locations (rows and columns) in the matrix where nonzeros will be
855        inserted using a local number of the rows and columns
856 
857    Synopsis:
858    PetscErrorCode MatPreallocateSet(PetscInt nrows, PetscInt *rows,PetscInt ncols, PetscInt *cols,PetscInt *dnz, PetscInt *onz)
859 
860    Not Collective
861 
862    Input Parameters:
863 +  row - the row
864 .  ncols - the number of columns in the matrix
865 -  cols - the columns indicated
866 
867    Output Parameters:
868 +  dnz - the array that will be passed to the matrix preallocation routines
869 -  ozn - the other array passed to the matrix preallocation routines
870 
871 
872    Level: intermediate
873 
874    Notes:
875     See the <A href="../../docs/manual.pdf#nameddest=ch_performance">Hints for Performance Improvment</A> chapter in the users manual for more details.
876 
877    Do not malloc or free dnz and onz that is handled internally by these routines
878 
879    This is a MACRO not a function because it uses variables declared in MatPreallocateInitialize().
880 
881   Concepts: preallocation^Matrix
882 
883 .seealso: MatPreallocateFinalize(), MatPreallocateSet(), MatPreallocateSymmetricSet(), MatPreallocateInitialize(),
884           MatPreallocateInitialize(), MatPreallocateSymmetricSetLocal(), MatPreallocateSetLocal()
885 M*/
886 #define MatPreallocateSet(row,nc,cols,dnz,onz) 0;\
887 { PetscInt __i; \
888   if (row < __rstart) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Trying to set preallocation for row %D less than first local row %D",row,__rstart);\
889   if (row >= __rstart+__nrows) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Trying to set preallocation for row %D greater than last local row %D",row,__rstart+__nrows-1);\
890   for (__i=0; __i<nc; __i++) {\
891     if ((cols)[__i] < __start || (cols)[__i] >= __end) onz[row - __rstart]++; \
892     else dnz[row - __rstart]++;\
893   }\
894 }
895 
896 /*MC
897    MatPreallocateSymmetricSet - Indicates the locations (rows and columns) in the matrix where nonzeros will be
898        inserted using a local number of the rows and columns
899 
900    Synopsis:
901    PetscErrorCode MatPreallocateSymmetricSet(PetscInt nrows, PetscInt *rows,PetscInt ncols, PetscInt *cols,PetscInt *dnz, PetscInt *onz)
902 
903    Not Collective
904 
905    Input Parameters:
906 +  nrows - the number of rows indicated
907 .  rows - the indices of the rows
908 .  ncols - the number of columns in the matrix
909 .  cols - the columns indicated
910 .  dnz - the array that will be passed to the matrix preallocation routines
911 -  ozn - the other array passed to the matrix preallocation routines
912 
913 
914    Level: intermediate
915 
916    Notes:
917     See the <A href="../../docs/manual.pdf#nameddest=ch_performance">Hints for Performance Improvment</A> chapter in the users manual for more details.
918 
919    Do not malloc or free dnz and onz that is handled internally by these routines
920 
921    This is a MACRO not a function because it uses variables declared in MatPreallocateInitialize().
922 
923   Concepts: preallocation^Matrix
924 
925 .seealso: MatPreallocateFinalize(), MatPreallocateSet(), MatPreallocateSymmetricSet(), MatPreallocateInitialize(),
926           MatPreallocateInitialize(), MatPreallocateSymmetricSetLocal(), MatPreallocateSetLocal()
927 M*/
928 #define MatPreallocateSymmetricSet(row,nc,cols,dnz,onz) 0;\
929 { PetscInt __i; \
930   for (__i=0; __i<nc; __i++) {\
931     if (cols[__i] >= __end) onz[row - __rstart]++; \
932     else if (cols[__i] >= row) dnz[row - __rstart]++;\
933   }\
934 }
935 
936 /*MC
937    MatPreallocateLocation -  An alternative to MatPreallocationSet() that puts the nonzero locations into the matrix if it exists
938 
939    Synopsis:
940    PetscErrorCode MatPreallocateLocations(Mat A,PetscInt row,PetscInt ncols,PetscInt *cols,PetscInt *dnz,PetscInt *onz)
941 
942    Not Collective
943 
944    Input Parameters:
945 .  A - matrix
946 .  row - row where values exist (must be local to this process)
947 .  ncols - number of columns
948 .  cols - columns with nonzeros
949 .  dnz - the array that will be passed to the matrix preallocation routines
950 -  ozn - the other array passed to the matrix preallocation routines
951 
952 
953    Level: intermediate
954 
955    Notes:
956     See the <A href="../../docs/manual.pdf#nameddest=ch_performance">Hints for Performance Improvment</A> chapter in the users manual for more details.
957 
958    Do not malloc or free dnz and onz that is handled internally by these routines
959 
960    This is a MACRO not a function because it uses a bunch of variables private to the MatPreallocation.... routines.
961 
962   Concepts: preallocation^Matrix
963 
964 .seealso: MatPreallocateInitialize(), MatPreallocateSet(), MatPreallocateSymmetricSet(), MatPreallocateSetLocal(),
965           MatPreallocateSymmetricInitialize(), MatPreallocateSymmetricSetLocal()
966 M*/
967 #define MatPreallocateLocation(A,row,ncols,cols,dnz,onz) 0;if (A) {ierr = MatSetValues(A,1,&row,ncols,cols,PETSC_NULL,INSERT_VALUES);CHKERRQ(ierr);} else {ierr =  MatPreallocateSet(row,ncols,cols,dnz,onz);CHKERRQ(ierr);}
968 
969 
970 /*MC
971    MatPreallocateFinalize - Ends the block of code that will count the number of nonzeros per
972        row in a matrix providing the data that one can use to correctly preallocate the matrix.
973 
974    Synopsis:
975    PetscErrorCode MatPreallocateFinalize(PetscInt *dnz, PetscInt *onz)
976 
977    Collective on MPI_Comm
978 
979    Input Parameters:
980 +  dnz - the array that was be passed to the matrix preallocation routines
981 -  ozn - the other array passed to the matrix preallocation routines
982 
983 
984    Level: intermediate
985 
986    Notes:
987     See the <A href="../../docs/manual.pdf#nameddest=ch_performance">Hints for Performance Improvment</A> chapter in the users manual for more details.
988 
989    Do not malloc or free dnz and onz that is handled internally by these routines
990 
991    This is a MACRO not a function because it closes the { started in MatPreallocateInitialize().
992 
993   Concepts: preallocation^Matrix
994 
995 .seealso: MatPreallocateInitialize(), MatPreallocateSet(), MatPreallocateSymmetricSet(), MatPreallocateSetLocal(),
996           MatPreallocateSymmetricInitialize(), MatPreallocateSymmetricSetLocal()
997 M*/
998 #define MatPreallocateFinalize(dnz,onz) 0;_4_ierr = PetscFree2(dnz,onz);CHKERRQ(_4_ierr);}
999 
1000 
1001 
1002 /* Routines unique to particular data structures */
1003 extern PetscErrorCode  MatShellGetContext(Mat,void *);
1004 PetscPolymorphicFunction(MatShellGetContext,(Mat A),(A,&t),void*,t)
1005 
1006 extern PetscErrorCode  MatInodeAdjustForInodes(Mat,IS*,IS*);
1007 extern PetscErrorCode  MatInodeGetInodeSizes(Mat,PetscInt *,PetscInt *[],PetscInt *);
1008 
1009 extern PetscErrorCode  MatSeqAIJSetColumnIndices(Mat,PetscInt[]);
1010 extern PetscErrorCode  MatSeqBAIJSetColumnIndices(Mat,PetscInt[]);
1011 extern PetscErrorCode  MatCreateSeqAIJWithArrays(MPI_Comm,PetscInt,PetscInt,PetscInt[],PetscInt[],PetscScalar[],Mat*);
1012 extern PetscErrorCode  MatCreateSeqBAIJWithArrays(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt[],PetscInt[],PetscScalar[],Mat*);
1013 extern PetscErrorCode  MatCreateSeqSBAIJWithArrays(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt[],PetscInt[],PetscScalar[],Mat*);
1014 
1015 #define MAT_SKIP_ALLOCATION -4
1016 
1017 extern PetscErrorCode  MatSeqBAIJSetPreallocation(Mat,PetscInt,PetscInt,const PetscInt[]);
1018 PetscPolymorphicSubroutine(MatSeqBAIJSetPreallocation,(Mat A,PetscInt bs,const PetscInt nnz[]),(A,bs,0,nnz))
1019 extern PetscErrorCode  MatSeqSBAIJSetPreallocation(Mat,PetscInt,PetscInt,const PetscInt[]);
1020 PetscPolymorphicSubroutine(MatSeqSBAIJSetPreallocation,(Mat A,PetscInt bs,const PetscInt nnz[]),(A,bs,0,nnz))
1021 extern PetscErrorCode  MatSeqAIJSetPreallocation(Mat,PetscInt,const PetscInt[]);
1022 PetscPolymorphicSubroutine(MatSeqAIJSetPreallocation,(Mat A,const PetscInt nnz[]),(A,0,nnz))
1023 
1024 extern PetscErrorCode  MatMPIBAIJSetPreallocation(Mat,PetscInt,PetscInt,const PetscInt[],PetscInt,const PetscInt[]);
1025 PetscPolymorphicSubroutine(MatMPIBAIJSetPreallocation,(Mat A,PetscInt bs,const PetscInt nnz[],const PetscInt onz[]),(A,bs,0,nnz,0,onz))
1026 extern PetscErrorCode  MatMPISBAIJSetPreallocation(Mat,PetscInt,PetscInt,const PetscInt[],PetscInt,const PetscInt[]);
1027 extern PetscErrorCode  MatMPIAIJSetPreallocation(Mat,PetscInt,const PetscInt[],PetscInt,const PetscInt[]);
1028 extern PetscErrorCode  MatSeqAIJSetPreallocationCSR(Mat,const PetscInt [],const PetscInt [],const PetscScalar []);
1029 extern PetscErrorCode  MatSeqBAIJSetPreallocationCSR(Mat,PetscInt,const PetscInt[],const PetscInt[],const PetscScalar[]);
1030 extern PetscErrorCode  MatMPIAIJSetPreallocationCSR(Mat,const PetscInt[],const PetscInt[],const PetscScalar[]);
1031 extern PetscErrorCode  MatMPIBAIJSetPreallocationCSR(Mat,PetscInt,const PetscInt[],const PetscInt[],const PetscScalar[]);
1032 extern PetscErrorCode  MatMPIAdjSetPreallocation(Mat,PetscInt[],PetscInt[],PetscInt[]);
1033 extern PetscErrorCode  MatMPIDenseSetPreallocation(Mat,PetscScalar[]);
1034 extern PetscErrorCode  MatSeqDenseSetPreallocation(Mat,PetscScalar[]);
1035 extern PetscErrorCode  MatMPIAIJGetSeqAIJ(Mat,Mat*,Mat*,PetscInt*[]);
1036 extern PetscErrorCode  MatMPIBAIJGetSeqBAIJ(Mat,Mat*,Mat*,PetscInt*[]);
1037 extern PetscErrorCode  MatAdicSetLocalFunction(Mat,void (*)(void));
1038 
1039 extern PetscErrorCode  MatSeqDenseSetLDA(Mat,PetscInt);
1040 extern PetscErrorCode  MatDenseGetLocalMatrix(Mat,Mat*);
1041 
1042 extern PetscErrorCode  MatStoreValues(Mat);
1043 extern PetscErrorCode  MatRetrieveValues(Mat);
1044 
1045 extern PetscErrorCode  MatDAADSetCtx(Mat,void*);
1046 
1047 extern PetscErrorCode  MatFindNonzeroRows(Mat,IS*);
1048 /*
1049   These routines are not usually accessed directly, rather solving is
1050   done through the KSP and PC interfaces.
1051 */
1052 
1053 /*J
1054     MatOrderingType - String with the name of a PETSc matrix ordering or the creation function
1055        with an optional dynamic library name, for example
1056        http://www.mcs.anl.gov/petsc/lib.a:orderingcreate()
1057 
1058    Level: beginner
1059 
1060    Cannot use const because the PC objects manipulate the string
1061 
1062 .seealso: MatGetOrdering()
1063 J*/
1064 #define MatOrderingType char*
1065 #define MATORDERINGNATURAL     "natural"
1066 #define MATORDERINGND          "nd"
1067 #define MATORDERING1WD         "1wd"
1068 #define MATORDERINGRCM         "rcm"
1069 #define MATORDERINGQMD         "qmd"
1070 #define MATORDERINGROWLENGTH   "rowlength"
1071 #define MATORDERINGAMD         "amd"            /* only works if UMFPACK is installed with PETSc */
1072 
1073 extern PetscErrorCode  MatGetOrdering(Mat,const MatOrderingType,IS*,IS*);
1074 extern PetscErrorCode  MatGetOrderingList(PetscFList *list);
1075 extern PetscErrorCode  MatOrderingRegister(const char[],const char[],const char[],PetscErrorCode(*)(Mat,const MatOrderingType,IS*,IS*));
1076 
1077 /*MC
1078    MatOrderingRegisterDynamic - Adds a new sparse matrix ordering to the matrix package.
1079 
1080    Synopsis:
1081    PetscErrorCode MatOrderingRegisterDynamic(const char *name_ordering,const char *path,const char *name_create,PetscErrorCode (*routine_create)(MatOrdering))
1082 
1083    Not Collective
1084 
1085    Input Parameters:
1086 +  sname - name of ordering (for example MATORDERINGND)
1087 .  path - location of library where creation routine is
1088 .  name - name of function that creates the ordering type,a string
1089 -  function - function pointer that creates the ordering
1090 
1091    Level: developer
1092 
1093    If dynamic libraries are used, then the fourth input argument (function)
1094    is ignored.
1095 
1096    Sample usage:
1097 .vb
1098    MatOrderingRegisterDynamic("my_order",/home/username/my_lib/lib/libO/solaris/mylib.a,
1099                "MyOrder",MyOrder);
1100 .ve
1101 
1102    Then, your partitioner can be chosen with the procedural interface via
1103 $     MatOrderingSetType(part,"my_order)
1104    or at runtime via the option
1105 $     -pc_factor_mat_ordering_type my_order
1106 
1107    ${PETSC_ARCH} occuring in pathname will be replaced with appropriate values.
1108 
1109 .keywords: matrix, ordering, register
1110 
1111 .seealso: MatOrderingRegisterDestroy(), MatOrderingRegisterAll()
1112 M*/
1113 #if defined(PETSC_USE_DYNAMIC_LIBRARIES)
1114 #define MatOrderingRegisterDynamic(a,b,c,d) MatOrderingRegister(a,b,c,0)
1115 #else
1116 #define MatOrderingRegisterDynamic(a,b,c,d) MatOrderingRegister(a,b,c,d)
1117 #endif
1118 
1119 extern PetscErrorCode  MatOrderingRegisterDestroy(void);
1120 extern PetscErrorCode  MatOrderingRegisterAll(const char[]);
1121 extern PetscBool  MatOrderingRegisterAllCalled;
1122 extern PetscFList MatOrderingList;
1123 
1124 extern PetscErrorCode  MatReorderForNonzeroDiagonal(Mat,PetscReal,IS,IS);
1125 
1126 /*S
1127     MatFactorShiftType - Numeric Shift.
1128 
1129    Level: beginner
1130 
1131 S*/
1132 typedef enum {MAT_SHIFT_NONE,MAT_SHIFT_NONZERO,MAT_SHIFT_POSITIVE_DEFINITE,MAT_SHIFT_INBLOCKS} MatFactorShiftType;
1133 extern const char *MatFactorShiftTypes[];
1134 
1135 /*S
1136    MatFactorInfo - Data passed into the matrix factorization routines
1137 
1138    In Fortran these are simply double precision arrays of size MAT_FACTORINFO_SIZE, that is use
1139 $     MatFactorInfo  info(MAT_FACTORINFO_SIZE)
1140 
1141    Notes: These are not usually directly used by users, instead use PC type of LU, ILU, CHOLESKY or ICC.
1142 
1143       You can use MatFactorInfoInitialize() to set default values.
1144 
1145    Level: developer
1146 
1147 .seealso: MatLUFactorSymbolic(), MatILUFactorSymbolic(), MatCholeskyFactorSymbolic(), MatICCFactorSymbolic(), MatICCFactor(),
1148           MatFactorInfoInitialize()
1149 
1150 S*/
1151 typedef struct {
1152   PetscReal     diagonal_fill;  /* force diagonal to fill in if initially not filled */
1153   PetscReal     usedt;
1154   PetscReal     dt;             /* drop tolerance */
1155   PetscReal     dtcol;          /* tolerance for pivoting */
1156   PetscReal     dtcount;        /* maximum nonzeros to be allowed per row */
1157   PetscReal     fill;           /* expected fill, nonzeros in factored matrix/nonzeros in original matrix */
1158   PetscReal     levels;         /* ICC/ILU(levels) */
1159   PetscReal     pivotinblocks;  /* for BAIJ and SBAIJ matrices pivot in factorization on blocks, default 1.0
1160                                    factorization may be faster if do not pivot */
1161   PetscReal     zeropivot;      /* pivot is called zero if less than this */
1162   PetscReal     shifttype;      /* type of shift added to matrix factor to prevent zero pivots */
1163   PetscReal     shiftamount;     /* how large the shift is */
1164 } MatFactorInfo;
1165 
1166 extern PetscErrorCode  MatFactorInfoInitialize(MatFactorInfo*);
1167 extern PetscErrorCode  MatCholeskyFactor(Mat,IS,const MatFactorInfo*);
1168 extern PetscErrorCode  MatCholeskyFactorSymbolic(Mat,Mat,IS,const MatFactorInfo*);
1169 extern PetscErrorCode  MatCholeskyFactorNumeric(Mat,Mat,const MatFactorInfo*);
1170 extern PetscErrorCode  MatLUFactor(Mat,IS,IS,const MatFactorInfo*);
1171 extern PetscErrorCode  MatILUFactor(Mat,IS,IS,const MatFactorInfo*);
1172 extern PetscErrorCode  MatLUFactorSymbolic(Mat,Mat,IS,IS,const MatFactorInfo*);
1173 extern PetscErrorCode  MatILUFactorSymbolic(Mat,Mat,IS,IS,const MatFactorInfo*);
1174 extern PetscErrorCode  MatICCFactorSymbolic(Mat,Mat,IS,const MatFactorInfo*);
1175 extern PetscErrorCode  MatICCFactor(Mat,IS,const MatFactorInfo*);
1176 extern PetscErrorCode  MatLUFactorNumeric(Mat,Mat,const MatFactorInfo*);
1177 extern PetscErrorCode  MatGetInertia(Mat,PetscInt*,PetscInt*,PetscInt*);
1178 extern PetscErrorCode  MatSolve(Mat,Vec,Vec);
1179 extern PetscErrorCode  MatForwardSolve(Mat,Vec,Vec);
1180 extern PetscErrorCode  MatBackwardSolve(Mat,Vec,Vec);
1181 extern PetscErrorCode  MatSolveAdd(Mat,Vec,Vec,Vec);
1182 extern PetscErrorCode  MatSolveTranspose(Mat,Vec,Vec);
1183 extern PetscErrorCode  MatSolveTransposeAdd(Mat,Vec,Vec,Vec);
1184 extern PetscErrorCode  MatSolves(Mat,Vecs,Vecs);
1185 
1186 extern PetscErrorCode  MatSetUnfactored(Mat);
1187 
1188 /*E
1189     MatSORType - What type of (S)SOR to perform
1190 
1191     Level: beginner
1192 
1193    May be bitwise ORd together
1194 
1195    Any additions/changes here MUST also be made in include/finclude/petscmat.h
1196 
1197    MatSORType may be bitwise ORd together, so do not change the numbers
1198 
1199 .seealso: MatSOR()
1200 E*/
1201 typedef enum {SOR_FORWARD_SWEEP=1,SOR_BACKWARD_SWEEP=2,SOR_SYMMETRIC_SWEEP=3,
1202               SOR_LOCAL_FORWARD_SWEEP=4,SOR_LOCAL_BACKWARD_SWEEP=8,
1203               SOR_LOCAL_SYMMETRIC_SWEEP=12,SOR_ZERO_INITIAL_GUESS=16,
1204               SOR_EISENSTAT=32,SOR_APPLY_UPPER=64,SOR_APPLY_LOWER=128} MatSORType;
1205 extern PetscErrorCode  MatSOR(Mat,Vec,PetscReal,MatSORType,PetscReal,PetscInt,PetscInt,Vec);
1206 
1207 /*
1208     These routines are for efficiently computing Jacobians via finite differences.
1209 */
1210 
1211 /*J
1212     MatColoringType - String with the name of a PETSc matrix coloring or the creation function
1213        with an optional dynamic library name, for example
1214        http://www.mcs.anl.gov/petsc/lib.a:coloringcreate()
1215 
1216    Level: beginner
1217 
1218 .seealso: MatGetColoring()
1219 J*/
1220 #define MatColoringType char*
1221 #define MATCOLORINGNATURAL "natural"
1222 #define MATCOLORINGSL      "sl"
1223 #define MATCOLORINGLF      "lf"
1224 #define MATCOLORINGID      "id"
1225 
1226 extern PetscErrorCode  MatGetColoring(Mat,const MatColoringType,ISColoring*);
1227 extern PetscErrorCode  MatColoringRegister(const char[],const char[],const char[],PetscErrorCode(*)(Mat,MatColoringType,ISColoring *));
1228 
1229 /*MC
1230    MatColoringRegisterDynamic - Adds a new sparse matrix coloring to the
1231                                matrix package.
1232 
1233    Synopsis:
1234    PetscErrorCode MatColoringRegisterDynamic(const char *name_coloring,const char *path,const char *name_create,PetscErrorCode (*routine_create)(MatColoring))
1235 
1236    Not Collective
1237 
1238    Input Parameters:
1239 +  sname - name of Coloring (for example MATCOLORINGSL)
1240 .  path - location of library where creation routine is
1241 .  name - name of function that creates the Coloring type, a string
1242 -  function - function pointer that creates the coloring
1243 
1244    Level: developer
1245 
1246    If dynamic libraries are used, then the fourth input argument (function)
1247    is ignored.
1248 
1249    Sample usage:
1250 .vb
1251    MatColoringRegisterDynamic("my_color",/home/username/my_lib/lib/libO/solaris/mylib.a,
1252                "MyColor",MyColor);
1253 .ve
1254 
1255    Then, your partitioner can be chosen with the procedural interface via
1256 $     MatColoringSetType(part,"my_color")
1257    or at runtime via the option
1258 $     -mat_coloring_type my_color
1259 
1260    $PETSC_ARCH occuring in pathname will be replaced with appropriate values.
1261 
1262 .keywords: matrix, Coloring, register
1263 
1264 .seealso: MatColoringRegisterDestroy(), MatColoringRegisterAll()
1265 M*/
1266 #if defined(PETSC_USE_DYNAMIC_LIBRARIES)
1267 #define MatColoringRegisterDynamic(a,b,c,d) MatColoringRegister(a,b,c,0)
1268 #else
1269 #define MatColoringRegisterDynamic(a,b,c,d) MatColoringRegister(a,b,c,d)
1270 #endif
1271 
1272 extern PetscBool  MatColoringRegisterAllCalled;
1273 
1274 extern PetscErrorCode  MatColoringRegisterAll(const char[]);
1275 extern PetscErrorCode  MatColoringRegisterDestroy(void);
1276 extern PetscErrorCode  MatColoringPatch(Mat,PetscInt,PetscInt,ISColoringValue[],ISColoring*);
1277 
1278 /*S
1279      MatFDColoring - Object for computing a sparse Jacobian via finite differences
1280         and coloring
1281 
1282    Level: beginner
1283 
1284   Concepts: coloring, sparse Jacobian, finite differences
1285 
1286 .seealso:  MatFDColoringCreate()
1287 S*/
1288 typedef struct _p_MatFDColoring* MatFDColoring;
1289 
1290 extern PetscErrorCode  MatFDColoringCreate(Mat,ISColoring,MatFDColoring *);
1291 extern PetscErrorCode  MatFDColoringDestroy(MatFDColoring*);
1292 extern PetscErrorCode  MatFDColoringView(MatFDColoring,PetscViewer);
1293 extern PetscErrorCode  MatFDColoringSetFunction(MatFDColoring,PetscErrorCode (*)(void),void*);
1294 extern PetscErrorCode  MatFDColoringGetFunction(MatFDColoring,PetscErrorCode (**)(void),void**);
1295 extern PetscErrorCode  MatFDColoringSetParameters(MatFDColoring,PetscReal,PetscReal);
1296 extern PetscErrorCode  MatFDColoringSetFromOptions(MatFDColoring);
1297 extern PetscErrorCode  MatFDColoringApply(Mat,MatFDColoring,Vec,MatStructure*,void *);
1298 extern PetscErrorCode  MatFDColoringApplyTS(Mat,MatFDColoring,PetscReal,Vec,MatStructure*,void *);
1299 extern PetscErrorCode  MatFDColoringSetF(MatFDColoring,Vec);
1300 extern PetscErrorCode  MatFDColoringGetPerturbedColumns(MatFDColoring,PetscInt*,PetscInt*[]);
1301 /*
1302     These routines are for partitioning matrices: currently used only
1303   for adjacency matrix, MatCreateMPIAdj().
1304 */
1305 
1306 /*S
1307      MatPartitioning - Object for managing the partitioning of a matrix or graph
1308 
1309    Level: beginner
1310 
1311   Concepts: partitioning
1312 
1313 .seealso:  MatPartitioningCreate(), MatPartitioningType
1314 S*/
1315 typedef struct _p_MatPartitioning* MatPartitioning;
1316 
1317 /*J
1318     MatPartitioningType - String with the name of a PETSc matrix partitioning or the creation function
1319        with an optional dynamic library name, for example
1320        http://www.mcs.anl.gov/petsc/lib.a:partitioningcreate()
1321 
1322    Level: beginner
1323 
1324 .seealso: MatPartitioningCreate(), MatPartitioning
1325 J*/
1326 #define MatPartitioningType char*
1327 #define MATPARTITIONINGCURRENT  "current"
1328 #define MATPARTITIONINGSQUARE   "square"
1329 #define MATPARTITIONINGPARMETIS "parmetis"
1330 #define MATPARTITIONINGCHACO    "chaco"
1331 #define MATPARTITIONINGPARTY    "party"
1332 #define MATPARTITIONINGPTSCOTCH "ptscotch"
1333 
1334 
1335 extern PetscErrorCode  MatPartitioningCreate(MPI_Comm,MatPartitioning*);
1336 extern PetscErrorCode  MatPartitioningSetType(MatPartitioning,const MatPartitioningType);
1337 extern PetscErrorCode  MatPartitioningSetNParts(MatPartitioning,PetscInt);
1338 extern PetscErrorCode  MatPartitioningSetAdjacency(MatPartitioning,Mat);
1339 extern PetscErrorCode  MatPartitioningSetVertexWeights(MatPartitioning,const PetscInt[]);
1340 extern PetscErrorCode  MatPartitioningSetPartitionWeights(MatPartitioning,const PetscReal []);
1341 extern PetscErrorCode  MatPartitioningApply(MatPartitioning,IS*);
1342 extern PetscErrorCode  MatPartitioningDestroy(MatPartitioning*);
1343 
1344 extern PetscErrorCode  MatPartitioningRegister(const char[],const char[],const char[],PetscErrorCode (*)(MatPartitioning));
1345 
1346 /*MC
1347    MatPartitioningRegisterDynamic - Adds a new sparse matrix partitioning to the
1348    matrix package.
1349 
1350    Synopsis:
1351    PetscErrorCode MatPartitioningRegisterDynamic(const char *name_partitioning,const char *path,const char *name_create,PetscErrorCode (*routine_create)(MatPartitioning))
1352 
1353    Not Collective
1354 
1355    Input Parameters:
1356 +  sname - name of partitioning (for example MATPARTITIONINGCURRENT) or parmetis
1357 .  path - location of library where creation routine is
1358 .  name - name of function that creates the partitioning type, a string
1359 -  function - function pointer that creates the partitioning type
1360 
1361    Level: developer
1362 
1363    If dynamic libraries are used, then the fourth input argument (function)
1364    is ignored.
1365 
1366    Sample usage:
1367 .vb
1368    MatPartitioningRegisterDynamic("my_part",/home/username/my_lib/lib/libO/solaris/mylib.a,
1369                "MyPartCreate",MyPartCreate);
1370 .ve
1371 
1372    Then, your partitioner can be chosen with the procedural interface via
1373 $     MatPartitioningSetType(part,"my_part")
1374    or at runtime via the option
1375 $     -mat_partitioning_type my_part
1376 
1377    $PETSC_ARCH occuring in pathname will be replaced with appropriate values.
1378 
1379 .keywords: matrix, partitioning, register
1380 
1381 .seealso: MatPartitioningRegisterDestroy(), MatPartitioningRegisterAll()
1382 M*/
1383 #if defined(PETSC_USE_DYNAMIC_LIBRARIES)
1384 #define MatPartitioningRegisterDynamic(a,b,c,d) MatPartitioningRegister(a,b,c,0)
1385 #else
1386 #define MatPartitioningRegisterDynamic(a,b,c,d) MatPartitioningRegister(a,b,c,d)
1387 #endif
1388 
1389 extern PetscBool  MatPartitioningRegisterAllCalled;
1390 
1391 extern PetscErrorCode  MatPartitioningRegisterAll(const char[]);
1392 extern PetscErrorCode  MatPartitioningRegisterDestroy(void);
1393 
1394 extern PetscErrorCode  MatPartitioningView(MatPartitioning,PetscViewer);
1395 extern PetscErrorCode  MatPartitioningSetFromOptions(MatPartitioning);
1396 extern PetscErrorCode  MatPartitioningGetType(MatPartitioning,const MatPartitioningType*);
1397 
1398 extern PetscErrorCode  MatPartitioningParmetisSetCoarseSequential(MatPartitioning);
1399 extern PetscErrorCode  MatPartitioningParmetisGetEdgeCut(MatPartitioning, PetscInt *);
1400 
1401 typedef enum { MP_CHACO_MULTILEVEL=1,MP_CHACO_SPECTRAL=2,MP_CHACO_LINEAR=4,MP_CHACO_RANDOM=5,MP_CHACO_SCATTERED=6 } MPChacoGlobalType;
1402 extern const char *MPChacoGlobalTypes[];
1403 typedef enum { MP_CHACO_KERNIGHAN=1,MP_CHACO_NONE=2 } MPChacoLocalType;
1404 extern const char *MPChacoLocalTypes[];
1405 typedef enum { MP_CHACO_LANCZOS=0,MP_CHACO_RQI=1 } MPChacoEigenType;
1406 extern const char *MPChacoEigenTypes[];
1407 
1408 extern PetscErrorCode  MatPartitioningChacoSetGlobal(MatPartitioning,MPChacoGlobalType);
1409 extern PetscErrorCode  MatPartitioningChacoGetGlobal(MatPartitioning,MPChacoGlobalType*);
1410 extern PetscErrorCode  MatPartitioningChacoSetLocal(MatPartitioning,MPChacoLocalType);
1411 extern PetscErrorCode  MatPartitioningChacoGetLocal(MatPartitioning,MPChacoLocalType*);
1412 extern PetscErrorCode  MatPartitioningChacoSetCoarseLevel(MatPartitioning,PetscReal);
1413 extern PetscErrorCode  MatPartitioningChacoSetEigenSolver(MatPartitioning,MPChacoEigenType);
1414 extern PetscErrorCode  MatPartitioningChacoGetEigenSolver(MatPartitioning,MPChacoEigenType*);
1415 extern PetscErrorCode  MatPartitioningChacoSetEigenTol(MatPartitioning,PetscReal);
1416 extern PetscErrorCode  MatPartitioningChacoGetEigenTol(MatPartitioning,PetscReal*);
1417 extern PetscErrorCode  MatPartitioningChacoSetEigenNumber(MatPartitioning,PetscInt);
1418 extern PetscErrorCode  MatPartitioningChacoGetEigenNumber(MatPartitioning,PetscInt*);
1419 
1420 #define MP_PARTY_OPT "opt"
1421 #define MP_PARTY_LIN "lin"
1422 #define MP_PARTY_SCA "sca"
1423 #define MP_PARTY_RAN "ran"
1424 #define MP_PARTY_GBF "gbf"
1425 #define MP_PARTY_GCF "gcf"
1426 #define MP_PARTY_BUB "bub"
1427 #define MP_PARTY_DEF "def"
1428 extern PetscErrorCode  MatPartitioningPartySetGlobal(MatPartitioning,const char*);
1429 #define MP_PARTY_HELPFUL_SETS "hs"
1430 #define MP_PARTY_KERNIGHAN_LIN "kl"
1431 #define MP_PARTY_NONE "no"
1432 extern PetscErrorCode  MatPartitioningPartySetLocal(MatPartitioning,const char*);
1433 extern PetscErrorCode  MatPartitioningPartySetCoarseLevel(MatPartitioning,PetscReal);
1434 extern PetscErrorCode  MatPartitioningPartySetBipart(MatPartitioning,PetscBool);
1435 extern PetscErrorCode  MatPartitioningPartySetMatchOptimization(MatPartitioning,PetscBool);
1436 
1437 typedef enum { MP_PTSCOTCH_QUALITY,MP_PTSCOTCH_SPEED,MP_PTSCOTCH_BALANCE,MP_PTSCOTCH_SAFETY,MP_PTSCOTCH_SCALABILITY } MPPTScotchStrategyType;
1438 extern const char *MPPTScotchStrategyTypes[];
1439 
1440 extern PetscErrorCode MatPartitioningPTScotchSetImbalance(MatPartitioning,PetscReal);
1441 extern PetscErrorCode MatPartitioningPTScotchGetImbalance(MatPartitioning,PetscReal*);
1442 extern PetscErrorCode MatPartitioningPTScotchSetStrategy(MatPartitioning,MPPTScotchStrategyType);
1443 extern PetscErrorCode MatPartitioningPTScotchGetStrategy(MatPartitioning,MPPTScotchStrategyType*);
1444 
1445 extern PetscErrorCode MatMeshToVertexGraph(Mat,PetscInt,Mat*);
1446 extern PetscErrorCode MatMeshToCellGraph(Mat,PetscInt,Mat*);
1447 
1448 /*
1449     If you add entries here you must also add them to finclude/petscmat.h
1450 */
1451 typedef enum { MATOP_SET_VALUES=0,
1452                MATOP_GET_ROW=1,
1453                MATOP_RESTORE_ROW=2,
1454                MATOP_MULT=3,
1455                MATOP_MULT_ADD=4,
1456                MATOP_MULT_TRANSPOSE=5,
1457                MATOP_MULT_TRANSPOSE_ADD=6,
1458                MATOP_SOLVE=7,
1459                MATOP_SOLVE_ADD=8,
1460                MATOP_SOLVE_TRANSPOSE=9,
1461                MATOP_SOLVE_TRANSPOSE_ADD=10,
1462                MATOP_LUFACTOR=11,
1463                MATOP_CHOLESKYFACTOR=12,
1464                MATOP_SOR=13,
1465                MATOP_TRANSPOSE=14,
1466                MATOP_GETINFO=15,
1467                MATOP_EQUAL=16,
1468                MATOP_GET_DIAGONAL=17,
1469                MATOP_DIAGONAL_SCALE=18,
1470                MATOP_NORM=19,
1471                MATOP_ASSEMBLY_BEGIN=20,
1472                MATOP_ASSEMBLY_END=21,
1473                MATOP_SET_OPTION=22,
1474                MATOP_ZERO_ENTRIES=23,
1475                MATOP_ZERO_ROWS=24,
1476                MATOP_LUFACTOR_SYMBOLIC=25,
1477                MATOP_LUFACTOR_NUMERIC=26,
1478                MATOP_CHOLESKY_FACTOR_SYMBOLIC=27,
1479                MATOP_CHOLESKY_FACTOR_NUMERIC=28,
1480                MATOP_SETUP_PREALLOCATION=29,
1481                MATOP_ILUFACTOR_SYMBOLIC=30,
1482                MATOP_ICCFACTOR_SYMBOLIC=31,
1483                MATOP_GET_ARRAY=32,
1484                MATOP_RESTORE_ARRAY=33,
1485                MATOP_DUPLICATE=34,
1486                MATOP_FORWARD_SOLVE=35,
1487                MATOP_BACKWARD_SOLVE=36,
1488                MATOP_ILUFACTOR=37,
1489                MATOP_ICCFACTOR=38,
1490                MATOP_AXPY=39,
1491                MATOP_GET_SUBMATRICES=40,
1492                MATOP_INCREASE_OVERLAP=41,
1493                MATOP_GET_VALUES=42,
1494                MATOP_COPY=43,
1495                MATOP_GET_ROW_MAX=44,
1496                MATOP_SCALE=45,
1497                MATOP_SHIFT=46,
1498                MATOP_DIAGONAL_SET=47,
1499                MATOP_ILUDT_FACTOR=48,
1500                MATOP_SET_BLOCK_SIZE=49,
1501                MATOP_GET_ROW_IJ=50,
1502                MATOP_RESTORE_ROW_IJ=51,
1503                MATOP_GET_COLUMN_IJ=52,
1504                MATOP_RESTORE_COLUMN_IJ=53,
1505                MATOP_FDCOLORING_CREATE=54,
1506                MATOP_COLORING_PATCH=55,
1507                MATOP_SET_UNFACTORED=56,
1508                MATOP_PERMUTE=57,
1509                MATOP_SET_VALUES_BLOCKED=58,
1510                MATOP_GET_SUBMATRIX=59,
1511                MATOP_DESTROY=60,
1512                MATOP_VIEW=61,
1513                MATOP_CONVERT_FROM=62,
1514                MATOP_USE_SCALED_FORM=63,
1515                MATOP_SCALE_SYSTEM=64,
1516                MATOP_UNSCALE_SYSTEM=65,
1517                MATOP_SET_LOCAL_TO_GLOBAL_MAP=66,
1518                MATOP_SET_VALUES_LOCAL=67,
1519                MATOP_ZERO_ROWS_LOCAL=68,
1520                MATOP_GET_ROW_MAX_ABS=69,
1521                MATOP_GET_ROW_MIN_ABS=70,
1522                MATOP_CONVERT=71,
1523                MATOP_SET_COLORING=72,
1524                MATOP_SET_VALUES_ADIC=73,
1525                MATOP_SET_VALUES_ADIFOR=74,
1526                MATOP_FD_COLORING_APPLY=75,
1527                MATOP_SET_FROM_OPTIONS=76,
1528                MATOP_MULT_CON=77,
1529                MATOP_MULT_TRANSPOSE_CON=78,
1530                MATOP_PERMUTE_SPARSIFY=79,
1531                MATOP_MULT_MULTIPLE=80,
1532                MATOP_SOLVE_MULTIPLE=81,
1533                MATOP_GET_INERTIA=82,
1534                MATOP_LOAD=83,
1535                MATOP_IS_SYMMETRIC=84,
1536                MATOP_IS_HERMITIAN=85,
1537                MATOP_IS_STRUCTURALLY_SYMMETRIC=86,
1538                MATOP_DUMMY=87,
1539                MATOP_GET_VECS=88,
1540                MATOP_MAT_MULT=89,
1541                MATOP_MAT_MULT_SYMBOLIC=90,
1542                MATOP_MAT_MULT_NUMERIC=91,
1543                MATOP_PTAP=92,
1544                MATOP_PTAP_SYMBOLIC=93,
1545                MATOP_PTAP_NUMERIC=94,
1546                MATOP_MAT_MULTTRANSPOSE=95,
1547                MATOP_MAT_MULTTRANSPOSE_SYM=96,
1548                MATOP_MAT_MULTTRANSPOSE_NUM=97,
1549                MATOP_PTAP_SYMBOLIC_SEQAIJ=98,
1550                MATOP_PTAP_NUMERIC_SEQAIJ=99,
1551                MATOP_PTAP_SYMBOLIC_MPIAIJ=100,
1552                MATOP_PTAP_NUMERIC_MPIAIJ=101,
1553                MATOP_CONJUGATE=102,
1554                MATOP_SET_SIZES=103,
1555                MATOP_SET_VALUES_ROW=104,
1556                MATOP_REAL_PART=105,
1557                MATOP_IMAG_PART=106,
1558                MATOP_GET_ROW_UTRIANGULAR=107,
1559                MATOP_RESTORE_ROW_UTRIANGULAR=108,
1560                MATOP_MATSOLVE=109,
1561                MATOP_GET_REDUNDANTMATRIX=110,
1562                MATOP_GET_ROW_MIN=111,
1563                MATOP_GET_COLUMN_VEC=112,
1564                MATOP_MISSING_DIAGONAL=113,
1565                MATOP_MATGETSEQNONZEROSTRUCTURE=114,
1566                MATOP_CREATE=115,
1567                MATOP_GET_GHOSTS=116,
1568                MATOP_GET_LOCALSUBMATRIX=117,
1569                MATOP_RESTORE_LOCALSUBMATRIX=118,
1570                MATOP_MULT_DIAGONAL_BLOCK=119,
1571                MATOP_HERMITIANTRANSPOSE=120,
1572                MATOP_MULTHERMITIANTRANSPOSE=121,
1573                MATOP_MULTHERMITIANTRANSPOSEADD=122,
1574                MATOP_GETMULTIPROCBLOCK=123,
1575                MATOP_GETCOLUMNNORMS=125,
1576 	       MATOP_GET_SUBMATRICES_PARALLEL=128,
1577                MATOP_SET_VALUES_BATCH=129
1578              } MatOperation;
1579 extern PetscErrorCode  MatHasOperation(Mat,MatOperation,PetscBool *);
1580 extern PetscErrorCode  MatShellSetOperation(Mat,MatOperation,void(*)(void));
1581 extern PetscErrorCode  MatShellGetOperation(Mat,MatOperation,void(**)(void));
1582 extern PetscErrorCode  MatShellSetContext(Mat,void*);
1583 
1584 /*
1585    Codes for matrices stored on disk. By default they are
1586    stored in a universal format. By changing the format with
1587    PetscViewerSetFormat(viewer,PETSC_VIEWER_NATIVE); the matrices will
1588    be stored in a way natural for the matrix, for example dense matrices
1589    would be stored as dense. Matrices stored this way may only be
1590    read into matrices of the same type.
1591 */
1592 #define MATRIX_BINARY_FORMAT_DENSE -1
1593 
1594 extern PetscErrorCode  MatMPIBAIJSetHashTableFactor(Mat,PetscReal);
1595 extern PetscErrorCode  MatISGetLocalMat(Mat,Mat*);
1596 
1597 /*S
1598      MatNullSpace - Object that removes a null space from a vector, i.e.
1599          orthogonalizes the vector to a subsapce
1600 
1601    Level: advanced
1602 
1603   Concepts: matrix; linear operator, null space
1604 
1605   Users manual sections:
1606 .   sec_singular
1607 
1608 .seealso:  MatNullSpaceCreate()
1609 S*/
1610 typedef struct _p_MatNullSpace* MatNullSpace;
1611 
1612 extern PetscErrorCode  MatNullSpaceCreate(MPI_Comm,PetscBool ,PetscInt,const Vec[],MatNullSpace*);
1613 extern PetscErrorCode  MatNullSpaceSetFunction(MatNullSpace,PetscErrorCode (*)(MatNullSpace,Vec,void*),void*);
1614 extern PetscErrorCode  MatNullSpaceDestroy(MatNullSpace*);
1615 extern PetscErrorCode  MatNullSpaceRemove(MatNullSpace,Vec,Vec*);
1616 extern PetscErrorCode  MatSetNullSpace(Mat,MatNullSpace);
1617 extern PetscErrorCode  MatNullSpaceTest(MatNullSpace,Mat,PetscBool  *);
1618 extern PetscErrorCode  MatNullSpaceView(MatNullSpace,PetscViewer);
1619 
1620 extern PetscErrorCode  MatReorderingSeqSBAIJ(Mat,IS);
1621 extern PetscErrorCode  MatMPISBAIJSetHashTableFactor(Mat,PetscReal);
1622 extern PetscErrorCode  MatSeqSBAIJSetColumnIndices(Mat,PetscInt *);
1623 extern PetscErrorCode  MatSeqBAIJInvertBlockDiagonal(Mat);
1624 
1625 extern PetscErrorCode  MatCreateMAIJ(Mat,PetscInt,Mat*);
1626 extern PetscErrorCode  MatMAIJRedimension(Mat,PetscInt,Mat*);
1627 extern PetscErrorCode  MatMAIJGetAIJ(Mat,Mat*);
1628 
1629 extern PetscErrorCode  MatComputeExplicitOperator(Mat,Mat*);
1630 
1631 extern PetscErrorCode  MatDiagonalScaleLocal(Mat,Vec);
1632 
1633 extern PetscErrorCode  MatCreateMFFD(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,Mat*);
1634 extern PetscErrorCode  MatMFFDSetBase(Mat,Vec,Vec);
1635 extern PetscErrorCode  MatMFFDSetFunction(Mat,PetscErrorCode(*)(void*,Vec,Vec),void*);
1636 extern PetscErrorCode  MatMFFDSetFunctioni(Mat,PetscErrorCode (*)(void*,PetscInt,Vec,PetscScalar*));
1637 extern PetscErrorCode  MatMFFDSetFunctioniBase(Mat,PetscErrorCode (*)(void*,Vec));
1638 extern PetscErrorCode  MatMFFDAddNullSpace(Mat,MatNullSpace);
1639 extern PetscErrorCode  MatMFFDSetHHistory(Mat,PetscScalar[],PetscInt);
1640 extern PetscErrorCode  MatMFFDResetHHistory(Mat);
1641 extern PetscErrorCode  MatMFFDSetFunctionError(Mat,PetscReal);
1642 extern PetscErrorCode  MatMFFDSetPeriod(Mat,PetscInt);
1643 extern PetscErrorCode  MatMFFDGetH(Mat,PetscScalar *);
1644 extern PetscErrorCode  MatMFFDSetOptionsPrefix(Mat,const char[]);
1645 extern PetscErrorCode  MatMFFDCheckPositivity(void*,Vec,Vec,PetscScalar*);
1646 extern PetscErrorCode  MatMFFDSetCheckh(Mat,PetscErrorCode (*)(void*,Vec,Vec,PetscScalar*),void*);
1647 
1648 /*S
1649     MatMFFD - A data structured used to manage the computation of the h differencing parameter for matrix-free
1650               Jacobian vector products
1651 
1652     Notes: MATMFFD is a specific MatType which uses the MatMFFD data structure
1653 
1654            MatMFFD*() methods actually take the Mat as their first argument. Not a MatMFFD data structure
1655 
1656     Level: developer
1657 
1658 .seealso: MATMFFD, MatCreateMFFD(), MatMFFDSetFuction(), MatMFFDSetType(), MatMFFDRegister()
1659 S*/
1660 typedef struct _p_MatMFFD* MatMFFD;
1661 
1662 /*J
1663     MatMFFDType - algorithm used to compute the h used in computing matrix-vector products via differencing of the function
1664 
1665    Level: beginner
1666 
1667 .seealso: MatMFFDSetType(), MatMFFDRegister()
1668 J*/
1669 #define MatMFFDType char*
1670 #define MATMFFD_DS  "ds"
1671 #define MATMFFD_WP  "wp"
1672 
1673 extern PetscErrorCode  MatMFFDSetType(Mat,const MatMFFDType);
1674 extern PetscErrorCode  MatMFFDRegister(const char[],const char[],const char[],PetscErrorCode (*)(MatMFFD));
1675 
1676 /*MC
1677    MatMFFDRegisterDynamic - Adds a method to the MatMFFD registry.
1678 
1679    Synopsis:
1680    PetscErrorCode MatMFFDRegisterDynamic(const char *name_solver,const char *path,const char *name_create,PetscErrorCode (*routine_create)(MatMFFD))
1681 
1682    Not Collective
1683 
1684    Input Parameters:
1685 +  name_solver - name of a new user-defined compute-h module
1686 .  path - path (either absolute or relative) the library containing this solver
1687 .  name_create - name of routine to create method context
1688 -  routine_create - routine to create method context
1689 
1690    Level: developer
1691 
1692    Notes:
1693    MatMFFDRegisterDynamic() may be called multiple times to add several user-defined solvers.
1694 
1695    If dynamic libraries are used, then the fourth input argument (routine_create)
1696    is ignored.
1697 
1698    Sample usage:
1699 .vb
1700    MatMFFDRegisterDynamic("my_h",/home/username/my_lib/lib/libO/solaris/mylib.a,
1701                "MyHCreate",MyHCreate);
1702 .ve
1703 
1704    Then, your solver can be chosen with the procedural interface via
1705 $     MatMFFDSetType(mfctx,"my_h")
1706    or at runtime via the option
1707 $     -snes_mf_type my_h
1708 
1709 .keywords: MatMFFD, register
1710 
1711 .seealso: MatMFFDRegisterAll(), MatMFFDRegisterDestroy()
1712 M*/
1713 #if defined(PETSC_USE_DYNAMIC_LIBRARIES)
1714 #define MatMFFDRegisterDynamic(a,b,c,d) MatMFFDRegister(a,b,c,0)
1715 #else
1716 #define MatMFFDRegisterDynamic(a,b,c,d) MatMFFDRegister(a,b,c,d)
1717 #endif
1718 
1719 extern PetscErrorCode  MatMFFDRegisterAll(const char[]);
1720 extern PetscErrorCode  MatMFFDRegisterDestroy(void);
1721 extern PetscErrorCode  MatMFFDDSSetUmin(Mat,PetscReal);
1722 extern PetscErrorCode  MatMFFDWPSetComputeNormU(Mat,PetscBool );
1723 
1724 
1725 extern PetscErrorCode  PetscViewerMathematicaPutMatrix(PetscViewer, PetscInt, PetscInt, PetscReal *);
1726 extern PetscErrorCode  PetscViewerMathematicaPutCSRMatrix(PetscViewer, PetscInt, PetscInt, PetscInt *, PetscInt *, PetscReal *);
1727 
1728 /*
1729    PETSc interface to MUMPS
1730 */
1731 #ifdef PETSC_HAVE_MUMPS
1732 extern PetscErrorCode  MatMumpsSetIcntl(Mat,PetscInt,PetscInt);
1733 #endif
1734 
1735 /*
1736    PETSc interface to SUPERLU
1737 */
1738 #ifdef PETSC_HAVE_SUPERLU
1739 extern PetscErrorCode  MatSuperluSetILUDropTol(Mat,PetscReal);
1740 #endif
1741 
1742 #if defined(PETSC_HAVE_CUSP)
1743 extern PetscErrorCode  MatCreateSeqAIJCUSP(MPI_Comm,PetscInt,PetscInt,PetscInt,const PetscInt[],Mat*);
1744 extern PetscErrorCode  MatCreateMPIAIJCUSP(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt,const PetscInt[],PetscInt,const PetscInt[],Mat*);
1745 #endif
1746 
1747 /*
1748    PETSc interface to FFTW
1749 */
1750 #if defined(PETSC_HAVE_FFTW)
1751 extern PetscErrorCode VecScatterPetscToFFTW(Mat,Vec,Vec);
1752 extern PetscErrorCode VecScatterFFTWToPetsc(Mat,Vec,Vec);
1753 extern PetscErrorCode MatGetVecsFFTW(Mat,Vec*,Vec*,Vec*);
1754 #endif
1755 
1756 extern PetscErrorCode MatCreateNest(MPI_Comm,PetscInt,const IS[],PetscInt,const IS[],const Mat[],Mat*);
1757 extern PetscErrorCode MatNestGetSize(Mat,PetscInt*,PetscInt*);
1758 extern PetscErrorCode MatNestGetSubMats(Mat,PetscInt*,PetscInt*,Mat***);
1759 extern PetscErrorCode MatNestGetSubMat(Mat,PetscInt,PetscInt,Mat*);
1760 extern PetscErrorCode MatNestSetVecType(Mat,const VecType);
1761 extern PetscErrorCode MatNestSetSubMats(Mat,PetscInt,const IS[],PetscInt,const IS[],const Mat[]);
1762 extern PetscErrorCode MatNestSetSubMat(Mat,PetscInt,PetscInt,Mat);
1763 
1764 /*
1765  MatIJ:
1766  An unweighted directed pseudograph
1767  An interpretation of this matrix as a (pseudo)graph allows us to define additional operations on it:
1768  A MatIJ can act on sparse arrays: arrays of indices, or index arrays of integers, scalars, or integer-scalar pairs
1769  by mapping the indices to the indices connected to them by the (pseudo)graph ed
1770  */
1771 typedef enum {MATIJ_LOCAL, MATIJ_GLOBAL} MatIJIndexType;
1772 extern  PetscErrorCode MatIJSetMultivalued(Mat, PetscBool);
1773 extern  PetscErrorCode MatIJGetMultivalued(Mat, PetscBool*);
1774 extern  PetscErrorCode MatIJSetEdges(Mat, PetscInt, const PetscInt*, const PetscInt*);
1775 extern  PetscErrorCode MatIJGetEdges(Mat, PetscInt *, PetscInt **, PetscInt **);
1776 extern  PetscErrorCode MatIJSetEdgesIS(Mat, IS, IS);
1777 extern  PetscErrorCode MatIJGetEdgesIS(Mat, IS*, IS*);
1778 extern  PetscErrorCode MatIJGetRowSizes(Mat, MatIJIndexType, PetscInt, const PetscInt *, PetscInt **);
1779 extern  PetscErrorCode MatIJGetMinRowSize(Mat, PetscInt *);
1780 extern  PetscErrorCode MatIJGetMaxRowSize(Mat, PetscInt *);
1781 extern  PetscErrorCode MatIJGetSupport(Mat,  PetscInt *, PetscInt **);
1782 extern  PetscErrorCode MatIJGetSupportIS(Mat, IS *);
1783 extern  PetscErrorCode MatIJGetImage(Mat, PetscInt*, PetscInt**);
1784 extern  PetscErrorCode MatIJGetImageIS(Mat, IS *);
1785 extern  PetscErrorCode MatIJGetSupportSize(Mat, PetscInt *);
1786 extern  PetscErrorCode MatIJGetImageSize(Mat, PetscInt *);
1787 
1788 extern  PetscErrorCode MatIJBinRenumber(Mat, Mat*);
1789 
1790 extern  PetscErrorCode MatIJMap(Mat, MatIJIndexType, PetscInt,const PetscInt[],const PetscInt[],const PetscScalar[], MatIJIndexType,PetscInt*,PetscInt*[],PetscInt*[],PetscScalar*[],PetscInt*[]);
1791 extern  PetscErrorCode MatIJBin(Mat, MatIJIndexType, PetscInt,const PetscInt[],const PetscInt[],const PetscScalar[],PetscInt*,PetscInt*[],PetscInt*[],PetscScalar*[],PetscInt*[]);
1792 extern  PetscErrorCode MatIJBinMap(Mat,Mat, MatIJIndexType,PetscInt,const PetscInt[],const PetscInt[],const PetscScalar[],MatIJIndexType,PetscInt*,PetscInt*[],PetscInt*[],PetscScalar*[],PetscInt*[]);
1793 
1794 PETSC_EXTERN_CXX_END
1795 #endif
1796