xref: /petsc/include/petscmat.h (revision ab824b7887c7ada6d48dfd202a36a066073b50ec)
1 /*
2      Include file for the matrix component of PETSc
3 */
4 #ifndef __PETSCMAT_H
5 #define __PETSCMAT_H
6 #include "petscvec.h"
7 PETSC_EXTERN_CXX_BEGIN
8 
9 /*S
10      Mat - Abstract PETSc matrix object
11 
12    Level: beginner
13 
14   Concepts: matrix; linear operator
15 
16 .seealso:  MatCreate(), MatType, MatSetType()
17 S*/
18 typedef struct _p_Mat*           Mat;
19 
20 /*J
21     MatType - String with the name of a PETSc matrix or the creation function
22        with an optional dynamic library name, for example
23        http://www.mcs.anl.gov/petsc/lib.a:mymatcreate()
24 
25    Level: beginner
26 
27 .seealso: MatSetType(), Mat, MatSolverPackage
28 J*/
29 #define MatType char*
30 #define MATSAME            "same"
31 #define MATMAIJ            "maij"
32 #define MATSEQMAIJ         "seqmaij"
33 #define MATMPIMAIJ         "mpimaij"
34 #define MATIS              "is"
35 #define MATAIJ             "aij"
36 #define MATSEQAIJ          "seqaij"
37 #define MATSEQAIJPTHREAD   "seqaijpthread"
38 #define MATAIJPTHREAD      "aijpthread"
39 #define MATMPIAIJ          "mpiaij"
40 #define MATAIJCRL          "aijcrl"
41 #define MATSEQAIJCRL       "seqaijcrl"
42 #define MATMPIAIJCRL       "mpiaijcrl"
43 #define MATAIJCUSP         "aijcusp"
44 #define MATSEQAIJCUSP      "seqaijcusp"
45 #define MATMPIAIJCUSP      "mpiaijcusp"
46 #define MATAIJPERM         "aijperm"
47 #define MATSEQAIJPERM      "seqaijperm"
48 #define MATMPIAIJPERM      "mpiaijperm"
49 #define MATSHELL           "shell"
50 #define MATDENSE           "dense"
51 #define MATSEQDENSE        "seqdense"
52 #define MATMPIDENSE        "mpidense"
53 #define MATBAIJ            "baij"
54 #define MATSEQBAIJ         "seqbaij"
55 #define MATMPIBAIJ         "mpibaij"
56 #define MATMPIADJ          "mpiadj"
57 #define MATSBAIJ           "sbaij"
58 #define MATSEQSBAIJ        "seqsbaij"
59 #define MATMPISBAIJ        "mpisbaij"
60 #define MATSEQBSTRM        "seqbstrm"
61 #define MATMPIBSTRM        "mpibstrm"
62 #define MATBSTRM           "bstrm"
63 #define MATSEQSBSTRM       "seqsbstrm"
64 #define MATMPISBSTRM       "mpisbstrm"
65 #define MATSBSTRM          "sbstrm"
66 #define MATDAAD            "daad"
67 #define MATMFFD            "mffd"
68 #define MATNORMAL          "normal"
69 #define MATLRC             "lrc"
70 #define MATSCATTER         "scatter"
71 #define MATBLOCKMAT        "blockmat"
72 #define MATCOMPOSITE       "composite"
73 #define MATFFT             "fft"
74 #define MATFFTW            "fftw"
75 #define MATSEQCUFFT        "seqcufft"
76 #define MATTRANSPOSEMAT    "transpose"
77 #define MATSCHURCOMPLEMENT "schurcomplement"
78 #define MATPYTHON          "python"
79 #define MATHYPRESTRUCT     "hyprestruct"
80 #define MATHYPRESSTRUCT    "hypresstruct"
81 #define MATSUBMATRIX       "submatrix"
82 #define MATLOCALREF        "localref"
83 #define MATNEST            "nest"
84 
85 /*J
86     MatSolverPackage - String with the name of a PETSc matrix solver type.
87 
88     For example: "petsc" indicates what PETSc provides, "superlu" indicates either
89        SuperLU or SuperLU_Dist etc.
90 
91 
92    Level: beginner
93 
94 .seealso: MatGetFactor(), Mat, MatSetType(), MatType
95 J*/
96 #define MatSolverPackage char*
97 #define MATSOLVERSPOOLES      "spooles"
98 #define MATSOLVERSUPERLU      "superlu"
99 #define MATSOLVERSUPERLU_DIST "superlu_dist"
100 #define MATSOLVERUMFPACK      "umfpack"
101 #define MATSOLVERCHOLMOD      "cholmod"
102 #define MATSOLVERESSL         "essl"
103 #define MATSOLVERLUSOL        "lusol"
104 #define MATSOLVERMUMPS        "mumps"
105 #define MATSOLVERPASTIX       "pastix"
106 #define MATSOLVERMATLAB       "matlab"
107 #define MATSOLVERPETSC        "petsc"
108 #define MATSOLVERPLAPACK      "plapack"
109 #define MATSOLVERBAS          "bas"
110 
111 #define MATSOLVERBSTRM        "bstrm"
112 #define MATSOLVERSBSTRM       "sbstrm"
113 
114 /*E
115     MatFactorType - indicates what type of factorization is requested
116 
117     Level: beginner
118 
119    Any additions/changes here MUST also be made in include/finclude/petscmat.h
120 
121 .seealso: MatSolverPackage, MatGetFactor()
122 E*/
123 typedef enum {MAT_FACTOR_NONE, MAT_FACTOR_LU, MAT_FACTOR_CHOLESKY, MAT_FACTOR_ILU, MAT_FACTOR_ICC,MAT_FACTOR_ILUDT} MatFactorType;
124 extern const char *const MatFactorTypes[];
125 
126 extern PetscErrorCode  MatGetFactor(Mat,const MatSolverPackage,MatFactorType,Mat*);
127 extern PetscErrorCode  MatGetFactorAvailable(Mat,const MatSolverPackage,MatFactorType,PetscBool *);
128 extern PetscErrorCode  MatFactorGetSolverPackage(Mat,const MatSolverPackage*);
129 extern PetscErrorCode  MatGetFactorType(Mat,MatFactorType*);
130 
131 /* Logging support */
132 #define    MAT_FILE_CLASSID 1211216    /* used to indicate matrices in binary files */
133 extern PetscClassId  MAT_CLASSID;
134 extern PetscClassId  MAT_FDCOLORING_CLASSID;
135 extern PetscClassId  MAT_PARTITIONING_CLASSID;
136 extern PetscClassId  MAT_NULLSPACE_CLASSID;
137 extern PetscClassId  MATMFFD_CLASSID;
138 
139 /*E
140     MatReuse - Indicates if matrices obtained from a previous call to MatGetSubMatrices()
141      or MatGetSubMatrix() are to be reused to store the new matrix values. For MatConvert() is used to indicate
142      that the input matrix is to be replaced with the converted matrix.
143 
144     Level: beginner
145 
146    Any additions/changes here MUST also be made in include/finclude/petscmat.h
147 
148 .seealso: MatGetSubMatrices(), MatGetSubMatrix(), MatDestroyMatrices(), MatConvert()
149 E*/
150 typedef enum {MAT_INITIAL_MATRIX,MAT_REUSE_MATRIX,MAT_IGNORE_MATRIX} MatReuse;
151 
152 /*E
153     MatGetSubMatrixOption - Indicates if matrices obtained from a call to MatGetSubMatrices()
154      include the matrix values. Currently it is only used by MatGetSeqNonzerostructure().
155 
156     Level: beginner
157 
158 .seealso: MatGetSeqNonzerostructure()
159 E*/
160 typedef enum {MAT_DO_NOT_GET_VALUES,MAT_GET_VALUES} MatGetSubMatrixOption;
161 
162 extern PetscErrorCode  MatInitializePackage(const char[]);
163 
164 extern PetscErrorCode  MatCreate(MPI_Comm,Mat*);
165 PetscPolymorphicFunction(MatCreate,(MPI_Comm comm),(comm,&A),Mat,A)
166 PetscPolymorphicFunction(MatCreate,(),(PETSC_COMM_WORLD,&A),Mat,A)
167 extern PetscErrorCode  MatSetSizes(Mat,PetscInt,PetscInt,PetscInt,PetscInt);
168 extern PetscErrorCode  MatSetType(Mat,const MatType);
169 extern PetscErrorCode  MatSetFromOptions(Mat);
170 extern PetscErrorCode  MatSetUpPreallocation(Mat);
171 extern PetscErrorCode  MatRegisterAll(const char[]);
172 extern PetscErrorCode  MatRegister(const char[],const char[],const char[],PetscErrorCode(*)(Mat));
173 extern PetscErrorCode  MatRegisterBaseName(const char[],const char[],const char[]);
174 extern PetscErrorCode  MatSetOptionsPrefix(Mat,const char[]);
175 extern PetscErrorCode  MatAppendOptionsPrefix(Mat,const char[]);
176 extern PetscErrorCode  MatGetOptionsPrefix(Mat,const char*[]);
177 
178 /*MC
179    MatRegisterDynamic - Adds a new matrix type
180 
181    Synopsis:
182    PetscErrorCode MatRegisterDynamic(const char *name,const char *path,const char *name_create,PetscErrorCode (*routine_create)(Mat))
183 
184    Not Collective
185 
186    Input Parameters:
187 +  name - name of a new user-defined matrix type
188 .  path - path (either absolute or relative) the library containing this solver
189 .  name_create - name of routine to create method context
190 -  routine_create - routine to create method context
191 
192    Notes:
193    MatRegisterDynamic() may be called multiple times to add several user-defined solvers.
194 
195    If dynamic libraries are used, then the fourth input argument (routine_create)
196    is ignored.
197 
198    Sample usage:
199 .vb
200    MatRegisterDynamic("my_mat",/home/username/my_lib/lib/libO/solaris/mylib.a,
201                "MyMatCreate",MyMatCreate);
202 .ve
203 
204    Then, your solver can be chosen with the procedural interface via
205 $     MatSetType(Mat,"my_mat")
206    or at runtime via the option
207 $     -mat_type my_mat
208 
209    Level: advanced
210 
211    Notes: ${PETSC_ARCH} occuring in pathname will be replaced with appropriate values.
212          If your function is not being put into a shared library then use VecRegister() instead
213 
214 .keywords: Mat, register
215 
216 .seealso: MatRegisterAll(), MatRegisterDestroy()
217 
218 M*/
219 #if defined(PETSC_USE_DYNAMIC_LIBRARIES)
220 #define MatRegisterDynamic(a,b,c,d) MatRegister(a,b,c,0)
221 #else
222 #define MatRegisterDynamic(a,b,c,d) MatRegister(a,b,c,d)
223 #endif
224 
225 extern PetscBool  MatRegisterAllCalled;
226 extern PetscFList MatList;
227 extern PetscFList MatColoringList;
228 extern PetscFList MatPartitioningList;
229 
230 /*E
231     MatStructure - Indicates if the matrix has the same nonzero structure
232 
233     Level: beginner
234 
235    Any additions/changes here MUST also be made in include/finclude/petscmat.h
236 
237 .seealso: MatCopy(), KSPSetOperators(), PCSetOperators()
238 E*/
239 typedef enum {DIFFERENT_NONZERO_PATTERN,SUBSET_NONZERO_PATTERN,SAME_NONZERO_PATTERN,SAME_PRECONDITIONER} MatStructure;
240 
241 extern PetscErrorCode  MatCreateSeqDense(MPI_Comm,PetscInt,PetscInt,PetscScalar[],Mat*);
242 extern PetscErrorCode  MatCreateMPIDense(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,PetscScalar[],Mat*);
243 extern PetscErrorCode  MatCreateSeqAIJ(MPI_Comm,PetscInt,PetscInt,PetscInt,const PetscInt[],Mat*);
244 PetscPolymorphicFunction(MatCreateSeqAIJ,(PetscInt m,PetscInt n,PetscInt nz,const PetscInt nnz[]),(PETSC_COMM_SELF,m,n,nz,nnz,&A),Mat,A)
245 PetscPolymorphicFunction(MatCreateSeqAIJ,(PetscInt m,PetscInt n,PetscInt nz),(PETSC_COMM_SELF,m,n,nz,PETSC_NULL,&A),Mat,A)
246 PetscPolymorphicFunction(MatCreateSeqAIJ,(PetscInt m,PetscInt n,const PetscInt nnz[]),(PETSC_COMM_SELF,m,n,0,nnz,&A),Mat,A)
247 PetscPolymorphicFunction(MatCreateSeqAIJ,(PetscInt m,PetscInt n),(PETSC_COMM_SELF,m,n,0,PETSC_NULL,&A),Mat,A)
248 PetscPolymorphicSubroutine(MatCreateSeqAIJ,(PetscInt m,PetscInt n,PetscInt nz,Mat *A),(PETSC_COMM_SELF,m,n,nz,PETSC_NULL,A))
249 PetscPolymorphicSubroutine(MatCreateSeqAIJ,(PetscInt m,PetscInt n,const PetscInt nnz[],Mat *A),(PETSC_COMM_SELF,m,n,0,nnz,A))
250 PetscPolymorphicSubroutine(MatCreateSeqAIJ,(PetscInt m,PetscInt n,Mat *A),(PETSC_COMM_SELF,m,n,0,PETSC_NULL,A))
251 extern PetscErrorCode  MatCreateMPIAIJ(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt,const PetscInt[],PetscInt,const PetscInt[],Mat*);
252 PetscPolymorphicFunction(MatCreateMPIAIJ,(MPI_Comm comm,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,const PetscInt nnz[],PetscInt onz,const PetscInt onnz[]),(comm,m,n,M,N,nz,nnz,onz,onnz,&A),Mat,A)
253 PetscPolymorphicFunction(MatCreateMPIAIJ,(MPI_Comm comm,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,PetscInt nnz),(comm,m,n,M,N,nz,PETSC_NULL,nnz,PETSC_NULL,&A),Mat,A)
254 PetscPolymorphicFunction(MatCreateMPIAIJ,(MPI_Comm comm,PetscInt m,PetscInt n,PetscInt M,PetscInt N,const PetscInt nnz[],const PetscInt onz[]),(comm,m,n,M,N,0,nnz,0,onz,&A),Mat,A)
255 PetscPolymorphicFunction(MatCreateMPIAIJ,(MPI_Comm comm,PetscInt m,PetscInt n,PetscInt M,PetscInt N),(comm,m,n,M,N,0,PETSC_NULL,0,PETSC_NULL,&A),Mat,A)
256 PetscPolymorphicSubroutine(MatCreateMPIAIJ,(MPI_Comm comm,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,PetscInt nnz,Mat *A),(comm,m,n,M,N,nz,PETSC_NULL,nnz,PETSC_NULL,A))
257 PetscPolymorphicSubroutine(MatCreateMPIAIJ,(MPI_Comm comm,PetscInt m,PetscInt n,PetscInt M,PetscInt N,const PetscInt nnz[],const PetscInt onz[],Mat *A),(comm,m,n,M,N,0,nnz,0,onz,A))
258 PetscPolymorphicSubroutine(MatCreateMPIAIJ,(MPI_Comm comm,PetscInt m,PetscInt n,PetscInt M,PetscInt N,Mat *A),(comm,m,n,M,N,0,PETSC_NULL,0,PETSC_NULL,A))
259 PetscPolymorphicFunction(MatCreateMPIAIJ,(PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,const PetscInt nnz[],PetscInt onz,const PetscInt onnz[]),(PETSC_COMM_WORLD,m,n,M,N,nz,nnz,onz,onnz,&A),Mat,A)
260 PetscPolymorphicFunction(MatCreateMPIAIJ,(PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,PetscInt nnz),(PETSC_COMM_WORLD,m,n,M,N,nz,PETSC_NULL,nnz,PETSC_NULL,&A),Mat,A)
261 PetscPolymorphicFunction(MatCreateMPIAIJ,(PetscInt m,PetscInt n,PetscInt M,PetscInt N,const PetscInt nnz[],const PetscInt onz[]),(PETSC_COMM_WORLD,m,n,M,N,0,nnz,0,onz,&A),Mat,A)
262 PetscPolymorphicFunction(MatCreateMPIAIJ,(PetscInt m,PetscInt n,PetscInt M,PetscInt N),(PETSC_COMM_WORLD,m,n,M,N,0,PETSC_NULL,0,PETSC_NULL,&A),Mat,A)
263 PetscPolymorphicSubroutine(MatCreateMPIAIJ,(PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,PetscInt nnz,Mat *A),(PETSC_COMM_WORLD,m,n,M,N,nz,PETSC_NULL,nnz,PETSC_NULL,A))
264 PetscPolymorphicSubroutine(MatCreateMPIAIJ,(PetscInt m,PetscInt n,PetscInt M,PetscInt N,const PetscInt nnz[],const PetscInt onz[],Mat *A),(PETSC_COMM_WORLD,m,n,M,N,0,nnz,0,onz,A))
265 PetscPolymorphicSubroutine(MatCreateMPIAIJ,(PetscInt m,PetscInt n,PetscInt M,PetscInt N,Mat *A),(PETSC_COMM_WORLD,m,n,M,N,0,PETSC_NULL,0,PETSC_NULL,A))
266 extern PetscErrorCode  MatCreateMPIAIJWithArrays(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,const PetscInt[],const PetscInt[],const PetscScalar[],Mat *);
267 extern PetscErrorCode  MatCreateMPIAIJWithSplitArrays(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt[],PetscInt[],PetscScalar[],PetscInt[],PetscInt[],PetscScalar[],Mat*);
268 
269 extern PetscErrorCode  MatCreateSeqBAIJ(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,const PetscInt[],Mat*);
270 PetscPolymorphicFunction(MatCreateSeqBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt nz,const PetscInt nnz[]),(PETSC_COMM_SELF,bs,m,n,nz,nnz,&A),Mat,A)
271 PetscPolymorphicFunction(MatCreateSeqBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt nz),(PETSC_COMM_SELF,bs,m,n,nz,PETSC_NULL,&A),Mat,A)
272 PetscPolymorphicFunction(MatCreateSeqBAIJ,(PetscInt bs,PetscInt m,PetscInt n,const PetscInt nnz[]),(PETSC_COMM_SELF,bs,m,n,0,nnz,&A),Mat,A)
273 PetscPolymorphicFunction(MatCreateSeqBAIJ,(PetscInt bs,PetscInt m,PetscInt n),(PETSC_COMM_SELF,bs,m,n,0,PETSC_NULL,&A),Mat,A)
274 PetscPolymorphicSubroutine(MatCreateSeqBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt nz,Mat *A),(PETSC_COMM_SELF,bs,m,n,nz,PETSC_NULL,A))
275 PetscPolymorphicSubroutine(MatCreateSeqBAIJ,(PetscInt bs,PetscInt m,PetscInt n,const PetscInt nnz[],Mat *A),(PETSC_COMM_SELF,bs,m,n,0,nnz,A))
276 PetscPolymorphicSubroutine(MatCreateSeqBAIJ,(PetscInt bs,PetscInt m,PetscInt n,Mat *A),(PETSC_COMM_SELF,bs,m,n,0,PETSC_NULL,A))
277 extern PetscErrorCode  MatCreateMPIBAIJ(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt,const PetscInt[],PetscInt,const PetscInt[],Mat*);
278 PetscPolymorphicFunction(MatCreateMPIBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,const PetscInt nnz[],PetscInt onz,const PetscInt onnz[]),(comm,bs,m,n,M,N,nz,nnz,onz,onnz,&A),Mat,A)
279 PetscPolymorphicFunction(MatCreateMPIBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,PetscInt nnz),(comm,bs,m,n,M,N,nz,PETSC_NULL,nnz,PETSC_NULL,&A),Mat,A)
280 PetscPolymorphicFunction(MatCreateMPIBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,const PetscInt nnz[],const PetscInt onz[]),(comm,bs,m,n,M,N,0,nnz,0,onz,&A),Mat,A)
281 PetscPolymorphicFunction(MatCreateMPIBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N),(comm,bs,m,n,M,N,0,PETSC_NULL,0,PETSC_NULL,&A),Mat,A)
282 PetscPolymorphicSubroutine(MatCreateMPIBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,PetscInt nnz,Mat *A),(comm,bs,m,n,M,N,nz,PETSC_NULL,nnz,PETSC_NULL,A))
283 PetscPolymorphicSubroutine(MatCreateMPIBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,const PetscInt nnz[],const PetscInt onz[],Mat *A),(comm,bs,m,n,M,N,0,nnz,0,onz,A))
284 PetscPolymorphicSubroutine(MatCreateMPIBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,Mat *A),(comm,bs,m,n,M,N,0,PETSC_NULL,0,PETSC_NULL,A))
285 PetscPolymorphicFunction(MatCreateMPIBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,const PetscInt nnz[],PetscInt onz,const PetscInt onnz[]),(PETSC_COMM_WORLD,bs,m,n,M,N,nz,nnz,onz,onnz,&A),Mat,A)
286 PetscPolymorphicFunction(MatCreateMPIBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,PetscInt nnz),(PETSC_COMM_WORLD,bs,m,n,M,N,nz,PETSC_NULL,nnz,PETSC_NULL,&A),Mat,A)
287 PetscPolymorphicFunction(MatCreateMPIBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,const PetscInt nnz[],const PetscInt onz[]),(PETSC_COMM_WORLD,bs,m,n,M,N,0,nnz,0,onz,&A),Mat,A)
288 PetscPolymorphicFunction(MatCreateMPIBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N),(PETSC_COMM_WORLD,bs,m,n,M,N,0,PETSC_NULL,0,PETSC_NULL,&A),Mat,A)
289 PetscPolymorphicSubroutine(MatCreateMPIBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,PetscInt nnz,Mat *A),(PETSC_COMM_WORLD,bs,m,n,M,N,nz,PETSC_NULL,nnz,PETSC_NULL,A))
290 PetscPolymorphicSubroutine(MatCreateMPIBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,const PetscInt nnz[],const PetscInt onz[],Mat *A),(PETSC_COMM_WORLD,bs,m,n,M,N,0,nnz,0,onz,A))
291 PetscPolymorphicSubroutine(MatCreateMPIBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,Mat *A),(PETSC_COMM_WORLD,bs,m,n,M,N,0,PETSC_NULL,0,PETSC_NULL,A))
292 extern PetscErrorCode  MatCreateMPIBAIJWithArrays(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt,const PetscInt[],const PetscInt[],const PetscScalar[],Mat*);
293 
294 extern PetscErrorCode  MatCreateMPIAdj(MPI_Comm,PetscInt,PetscInt,PetscInt[],PetscInt[],PetscInt[],Mat*);
295 extern PetscErrorCode  MatCreateSeqSBAIJ(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,const PetscInt[],Mat*);
296 PetscPolymorphicFunction(MatCreateSeqSBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt nz,const PetscInt nnz[]),(PETSC_COMM_SELF,bs,m,n,nz,nnz,&A),Mat,A)
297 PetscPolymorphicFunction(MatCreateSeqSBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt nz),(PETSC_COMM_SELF,bs,m,n,nz,PETSC_NULL,&A),Mat,A)
298 PetscPolymorphicFunction(MatCreateSeqSBAIJ,(PetscInt bs,PetscInt m,PetscInt n,const PetscInt nnz[]),(PETSC_COMM_SELF,bs,m,n,0,nnz,&A),Mat,A)
299 PetscPolymorphicFunction(MatCreateSeqSBAIJ,(PetscInt bs,PetscInt m,PetscInt n),(PETSC_COMM_SELF,bs,m,n,0,PETSC_NULL,&A),Mat,A)
300 PetscPolymorphicSubroutine(MatCreateSeqSBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt nz,Mat *A),(PETSC_COMM_SELF,bs,m,n,nz,PETSC_NULL,A))
301 PetscPolymorphicSubroutine(MatCreateSeqSBAIJ,(PetscInt bs,PetscInt m,PetscInt n,const PetscInt nnz[],Mat *A),(PETSC_COMM_SELF,bs,m,n,0,nnz,A))
302 PetscPolymorphicSubroutine(MatCreateSeqSBAIJ,(PetscInt bs,PetscInt m,PetscInt n,Mat *A),(PETSC_COMM_SELF,bs,m,n,0,PETSC_NULL,A))
303 
304 extern PetscErrorCode  MatCreateMPISBAIJ(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt,const PetscInt[],PetscInt,const PetscInt[],Mat*);
305 PetscPolymorphicFunction(MatCreateMPISBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,const PetscInt nnz[],PetscInt onz,const PetscInt onnz[]),(comm,bs,m,n,M,N,nz,nnz,onz,onnz,&A),Mat,A)
306 PetscPolymorphicFunction(MatCreateMPISBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,PetscInt nnz),(comm,bs,m,n,M,N,nz,PETSC_NULL,nnz,PETSC_NULL,&A),Mat,A)
307 PetscPolymorphicFunction(MatCreateMPISBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,const PetscInt nnz[],const PetscInt onz[]),(comm,bs,m,n,M,N,0,nnz,0,onz,&A),Mat,A)
308 PetscPolymorphicFunction(MatCreateMPISBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N),(comm,bs,m,n,M,N,0,PETSC_NULL,0,PETSC_NULL,&A),Mat,A)
309 PetscPolymorphicSubroutine(MatCreateMPISBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,PetscInt nnz,Mat *A),(comm,bs,m,n,M,N,nz,PETSC_NULL,nnz,PETSC_NULL,A))
310 PetscPolymorphicSubroutine(MatCreateMPISBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,const PetscInt nnz[],const PetscInt onz[],Mat *A),(comm,bs,m,n,M,N,0,nnz,0,onz,A))
311 PetscPolymorphicSubroutine(MatCreateMPISBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,Mat *A),(comm,bs,m,n,M,N,0,PETSC_NULL,0,PETSC_NULL,A))
312 PetscPolymorphicFunction(MatCreateMPISBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,const PetscInt nnz[],PetscInt onz,const PetscInt onnz[]),(PETSC_COMM_WORLD,bs,m,n,M,N,nz,nnz,onz,onnz,&A),Mat,A)
313 PetscPolymorphicFunction(MatCreateMPISBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,PetscInt nnz),(PETSC_COMM_WORLD,bs,m,n,M,N,nz,PETSC_NULL,nnz,PETSC_NULL,&A),Mat,A)
314 PetscPolymorphicFunction(MatCreateMPISBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,const PetscInt nnz[],const PetscInt onz[]),(PETSC_COMM_WORLD,bs,m,n,M,N,0,nnz,0,onz,&A),Mat,A)
315 PetscPolymorphicFunction(MatCreateMPISBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N),(PETSC_COMM_WORLD,bs,m,n,M,N,0,PETSC_NULL,0,PETSC_NULL,&A),Mat,A)
316 PetscPolymorphicSubroutine(MatCreateMPISBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,PetscInt nnz,Mat *A),(PETSC_COMM_WORLD,bs,m,n,M,N,nz,PETSC_NULL,nnz,PETSC_NULL,A))
317 PetscPolymorphicSubroutine(MatCreateMPISBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,const PetscInt nnz[],const PetscInt onz[],Mat *A),(PETSC_COMM_WORLD,bs,m,n,M,N,0,nnz,0,onz,A))
318 PetscPolymorphicSubroutine(MatCreateMPISBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,Mat *A),(PETSC_COMM_WORLD,bs,m,n,M,N,0,PETSC_NULL,0,PETSC_NULL,A))
319 extern PetscErrorCode  MatCreateMPISBAIJWithArrays(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt,const PetscInt[],const PetscInt[],const PetscScalar[],Mat *);
320 extern PetscErrorCode  MatMPISBAIJSetPreallocationCSR(Mat,PetscInt,const PetscInt[],const PetscInt[],const PetscScalar[]);
321 
322 extern PetscErrorCode  MatCreateShell(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,void *,Mat*);
323 PetscPolymorphicFunction(MatCreateShell,(MPI_Comm comm,PetscInt m,PetscInt n,PetscInt M,PetscInt N,void *ctx),(comm,m,n,M,N,ctx,&A),Mat,A)
324 PetscPolymorphicFunction(MatCreateShell,(PetscInt m,PetscInt n,PetscInt M,PetscInt N,void *ctx),(PETSC_COMM_WORLD,m,n,M,N,ctx,&A),Mat,A)
325 extern PetscErrorCode  MatCreateAdic(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt,void (*)(void),Mat*);
326 extern PetscErrorCode  MatCreateNormal(Mat,Mat*);
327 PetscPolymorphicFunction(MatCreateNormal,(Mat mat),(mat,&A),Mat,A)
328 extern PetscErrorCode  MatCreateLRC(Mat,Mat,Mat,Mat*);
329 extern PetscErrorCode  MatCreateIS(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,ISLocalToGlobalMapping,Mat*);
330 extern PetscErrorCode  MatCreateSeqAIJCRL(MPI_Comm,PetscInt,PetscInt,PetscInt,const PetscInt[],Mat*);
331 extern PetscErrorCode  MatCreateMPIAIJCRL(MPI_Comm,PetscInt,PetscInt,PetscInt,const PetscInt[],PetscInt,const PetscInt[],Mat*);
332 
333 extern PetscErrorCode  MatCreateSeqBSTRM(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,const PetscInt[],Mat*);
334 extern PetscErrorCode  MatCreateMPIBSTRM(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt,const PetscInt[],PetscInt,const PetscInt[],Mat*);
335 extern PetscErrorCode  MatCreateSeqSBSTRM(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,const PetscInt[],Mat*);
336 extern PetscErrorCode  MatCreateMPISBSTRM(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt,const PetscInt[],PetscInt,const PetscInt[],Mat*);
337 
338 extern PetscErrorCode  MatCreateScatter(MPI_Comm,VecScatter,Mat*);
339 extern PetscErrorCode  MatScatterSetVecScatter(Mat,VecScatter);
340 extern PetscErrorCode  MatScatterGetVecScatter(Mat,VecScatter*);
341 extern PetscErrorCode  MatCreateBlockMat(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt*,Mat*);
342 extern PetscErrorCode  MatCompositeAddMat(Mat,Mat);
343 extern PetscErrorCode  MatCompositeMerge(Mat);
344 extern PetscErrorCode  MatCreateComposite(MPI_Comm,PetscInt,const Mat*,Mat*);
345 typedef enum {MAT_COMPOSITE_ADDITIVE,MAT_COMPOSITE_MULTIPLICATIVE} MatCompositeType;
346 extern PetscErrorCode  MatCompositeSetType(Mat,MatCompositeType);
347 
348 extern PetscErrorCode  MatCreateFFT(MPI_Comm,PetscInt,const PetscInt[],const MatType,Mat*);
349 extern PetscErrorCode  MatCreateSeqCUFFT(MPI_Comm,PetscInt,const PetscInt[],Mat*);
350 
351 extern PetscErrorCode  MatCreateTranspose(Mat,Mat*);
352 extern PetscErrorCode  MatCreateSubMatrix(Mat,IS,IS,Mat*);
353 extern PetscErrorCode  MatSubMatrixUpdate(Mat,Mat,IS,IS);
354 extern PetscErrorCode  MatCreateLocalRef(Mat,IS,IS,Mat*);
355 
356 extern PetscErrorCode  MatPythonSetType(Mat,const char[]);
357 
358 extern PetscErrorCode  MatSetUp(Mat);
359 extern PetscErrorCode  MatDestroy(Mat*);
360 
361 extern PetscErrorCode  MatConjugate(Mat);
362 extern PetscErrorCode  MatRealPart(Mat);
363 extern PetscErrorCode  MatImaginaryPart(Mat);
364 extern PetscErrorCode  MatGetDiagonalBlock(Mat,Mat*);
365 extern PetscErrorCode  MatGetTrace(Mat,PetscScalar*);
366 extern PetscErrorCode  MatInvertBlockDiagonal(Mat,PetscScalar **);
367 
368 /* ------------------------------------------------------------*/
369 extern PetscErrorCode  MatSetValues(Mat,PetscInt,const PetscInt[],PetscInt,const PetscInt[],const PetscScalar[],InsertMode);
370 extern PetscErrorCode  MatSetValuesBlocked(Mat,PetscInt,const PetscInt[],PetscInt,const PetscInt[],const PetscScalar[],InsertMode);
371 extern PetscErrorCode  MatSetValuesRow(Mat,PetscInt,const PetscScalar[]);
372 extern PetscErrorCode  MatSetValuesRowLocal(Mat,PetscInt,const PetscScalar[]);
373 extern PetscErrorCode  MatSetValuesBatch(Mat,PetscInt,PetscInt,PetscInt[],const PetscScalar[]);
374 
375 /*S
376      MatStencil - Data structure (C struct) for storing information about a single row or
377         column of a matrix as index on an associated grid.
378 
379    Level: beginner
380 
381   Concepts: matrix; linear operator
382 
383 .seealso:  MatSetValuesStencil(), MatSetStencil(), MatSetValuesBlockStencil()
384 S*/
385 typedef struct {
386   PetscInt k,j,i,c;
387 } MatStencil;
388 
389 extern PetscErrorCode  MatSetValuesStencil(Mat,PetscInt,const MatStencil[],PetscInt,const MatStencil[],const PetscScalar[],InsertMode);
390 extern PetscErrorCode  MatSetValuesBlockedStencil(Mat,PetscInt,const MatStencil[],PetscInt,const MatStencil[],const PetscScalar[],InsertMode);
391 extern PetscErrorCode  MatSetStencil(Mat,PetscInt,const PetscInt[],const PetscInt[],PetscInt);
392 
393 extern PetscErrorCode  MatSetColoring(Mat,ISColoring);
394 extern PetscErrorCode  MatSetValuesAdic(Mat,void*);
395 extern PetscErrorCode  MatSetValuesAdifor(Mat,PetscInt,void*);
396 
397 /*E
398     MatAssemblyType - Indicates if the matrix is now to be used, or if you plan
399      to continue to add values to it
400 
401     Level: beginner
402 
403 .seealso: MatAssemblyBegin(), MatAssemblyEnd()
404 E*/
405 typedef enum {MAT_FLUSH_ASSEMBLY=1,MAT_FINAL_ASSEMBLY=0} MatAssemblyType;
406 extern PetscErrorCode  MatAssemblyBegin(Mat,MatAssemblyType);
407 extern PetscErrorCode  MatAssemblyEnd(Mat,MatAssemblyType);
408 extern PetscErrorCode  MatAssembled(Mat,PetscBool *);
409 
410 
411 
412 /*E
413     MatOption - Options that may be set for a matrix and its behavior or storage
414 
415     Level: beginner
416 
417    Any additions/changes here MUST also be made in include/finclude/petscmat.h
418 
419 .seealso: MatSetOption()
420 E*/
421 typedef enum {MAT_ROW_ORIENTED,MAT_NEW_NONZERO_LOCATIONS,
422               MAT_SYMMETRIC,
423               MAT_STRUCTURALLY_SYMMETRIC,
424               MAT_NEW_DIAGONALS,MAT_IGNORE_OFF_PROC_ENTRIES,
425               MAT_NEW_NONZERO_LOCATION_ERR,
426               MAT_NEW_NONZERO_ALLOCATION_ERR,MAT_USE_HASH_TABLE,
427               MAT_KEEP_NONZERO_PATTERN,MAT_IGNORE_ZERO_ENTRIES,
428               MAT_USE_INODES,
429               MAT_HERMITIAN,
430               MAT_SYMMETRY_ETERNAL,
431               MAT_CHECK_COMPRESSED_ROW,
432               MAT_IGNORE_LOWER_TRIANGULAR,MAT_ERROR_LOWER_TRIANGULAR,
433               MAT_GETROW_UPPERTRIANGULAR,MAT_UNUSED_NONZERO_LOCATION_ERR,
434               MAT_SPD,MAT_NO_OFF_PROC_ENTRIES,MAT_NO_OFF_PROC_ZERO_ROWS,
435               NUM_MAT_OPTIONS} MatOption;
436 extern const char *MatOptions[];
437 extern PetscErrorCode  MatSetOption(Mat,MatOption,PetscBool );
438 extern PetscErrorCode  MatGetType(Mat,const MatType*);
439 PetscPolymorphicFunction(MatGetType,(Mat mat),(mat,&t),const MatType,t)
440 
441 extern PetscErrorCode  MatGetValues(Mat,PetscInt,const PetscInt[],PetscInt,const PetscInt[],PetscScalar[]);
442 extern PetscErrorCode  MatGetRow(Mat,PetscInt,PetscInt *,const PetscInt *[],const PetscScalar*[]);
443 extern PetscErrorCode  MatRestoreRow(Mat,PetscInt,PetscInt *,const PetscInt *[],const PetscScalar*[]);
444 extern PetscErrorCode  MatGetRowUpperTriangular(Mat);
445 extern PetscErrorCode  MatRestoreRowUpperTriangular(Mat);
446 extern PetscErrorCode  MatGetColumn(Mat,PetscInt,PetscInt *,const PetscInt *[],const PetscScalar*[]);
447 extern PetscErrorCode  MatRestoreColumn(Mat,PetscInt,PetscInt *,const PetscInt *[],const PetscScalar*[]);
448 extern PetscErrorCode  MatGetColumnVector(Mat,Vec,PetscInt);
449 extern PetscErrorCode  MatGetArray(Mat,PetscScalar *[]);
450 PetscPolymorphicFunction(MatGetArray,(Mat mat),(mat,&a),PetscScalar*,a)
451 extern PetscErrorCode  MatRestoreArray(Mat,PetscScalar *[]);
452 extern PetscErrorCode  MatGetBlockSize(Mat,PetscInt *);
453 PetscPolymorphicFunction(MatGetBlockSize,(Mat mat),(mat,&a),PetscInt,a)
454 extern PetscErrorCode  MatSetBlockSize(Mat,PetscInt);
455 
456 
457 extern PetscErrorCode  MatMult(Mat,Vec,Vec);
458 extern PetscErrorCode  MatMultDiagonalBlock(Mat,Vec,Vec);
459 extern PetscErrorCode  MatMultAdd(Mat,Vec,Vec,Vec);
460 PetscPolymorphicSubroutine(MatMultAdd,(Mat A,Vec x,Vec y),(A,x,y,y))
461 extern PetscErrorCode  MatMultTranspose(Mat,Vec,Vec);
462 extern PetscErrorCode  MatMultHermitianTranspose(Mat,Vec,Vec);
463 extern PetscErrorCode  MatIsTranspose(Mat,Mat,PetscReal,PetscBool *);
464 PetscPolymorphicFunction(MatIsTranspose,(Mat A,Mat B,PetscReal tol),(A,B,tol,&t),PetscBool ,t)
465 PetscPolymorphicFunction(MatIsTranspose,(Mat A,Mat B),(A,B,0,&t),PetscBool ,t)
466 extern PetscErrorCode  MatIsHermitianTranspose(Mat,Mat,PetscReal,PetscBool *);
467 extern PetscErrorCode  MatMultTransposeAdd(Mat,Vec,Vec,Vec);
468 extern PetscErrorCode  MatMultHermitianTransposeAdd(Mat,Vec,Vec,Vec);
469 PetscPolymorphicSubroutine(MatMultTransposeAdd,(Mat A,Vec x,Vec y),(A,x,y,y))
470 extern PetscErrorCode  MatMultConstrained(Mat,Vec,Vec);
471 extern PetscErrorCode  MatMultTransposeConstrained(Mat,Vec,Vec);
472 extern PetscErrorCode  MatMatSolve(Mat,Mat,Mat);
473 
474 /*E
475     MatDuplicateOption - Indicates if a duplicated sparse matrix should have
476   its numerical values copied over or just its nonzero structure.
477 
478     Level: beginner
479 
480    Any additions/changes here MUST also be made in include/finclude/petscmat.h
481 
482 $   MAT_SHARE_NONZERO_PATTERN - the i and j arrays in the new matrix will be shared with the original matrix
483 $                               this also triggers the MAT_DO_NOT_COPY_VALUES option. This is used when you
484 $                               have several matrices with the same nonzero pattern.
485 
486 .seealso: MatDuplicate()
487 E*/
488 typedef enum {MAT_DO_NOT_COPY_VALUES,MAT_COPY_VALUES,MAT_SHARE_NONZERO_PATTERN} MatDuplicateOption;
489 
490 extern PetscErrorCode  MatConvert(Mat,const MatType,MatReuse,Mat*);
491 PetscPolymorphicFunction(MatConvert,(Mat A,const MatType t),(A,t,MAT_INITIAL_MATRIX,&a),Mat,a)
492 extern PetscErrorCode  MatDuplicate(Mat,MatDuplicateOption,Mat*);
493 PetscPolymorphicFunction(MatDuplicate,(Mat A,MatDuplicateOption o),(A,o,&a),Mat,a)
494 PetscPolymorphicFunction(MatDuplicate,(Mat A),(A,MAT_COPY_VALUES,&a),Mat,a)
495 
496 
497 extern PetscErrorCode  MatCopy(Mat,Mat,MatStructure);
498 extern PetscErrorCode  MatView(Mat,PetscViewer);
499 extern PetscErrorCode  MatIsSymmetric(Mat,PetscReal,PetscBool *);
500 PetscPolymorphicFunction(MatIsSymmetric,(Mat A,PetscReal tol),(A,tol,&t),PetscBool ,t)
501 PetscPolymorphicFunction(MatIsSymmetric,(Mat A),(A,0,&t),PetscBool ,t)
502 extern PetscErrorCode  MatIsStructurallySymmetric(Mat,PetscBool *);
503 PetscPolymorphicFunction(MatIsStructurallySymmetric,(Mat A),(A,&t),PetscBool ,t)
504 extern PetscErrorCode  MatIsHermitian(Mat,PetscReal,PetscBool *);
505 PetscPolymorphicFunction(MatIsHermitian,(Mat A),(A,0,&t),PetscBool ,t)
506 extern PetscErrorCode  MatIsSymmetricKnown(Mat,PetscBool *,PetscBool *);
507 extern PetscErrorCode  MatIsHermitianKnown(Mat,PetscBool *,PetscBool *);
508 extern PetscErrorCode  MatMissingDiagonal(Mat,PetscBool  *,PetscInt *);
509 extern PetscErrorCode  MatLoad(Mat, PetscViewer);
510 
511 extern PetscErrorCode  MatGetRowIJ(Mat,PetscInt,PetscBool ,PetscBool ,PetscInt*,PetscInt *[],PetscInt *[],PetscBool  *);
512 extern PetscErrorCode  MatRestoreRowIJ(Mat,PetscInt,PetscBool ,PetscBool ,PetscInt *,PetscInt *[],PetscInt *[],PetscBool  *);
513 extern PetscErrorCode  MatGetColumnIJ(Mat,PetscInt,PetscBool ,PetscBool ,PetscInt*,PetscInt *[],PetscInt *[],PetscBool  *);
514 extern PetscErrorCode  MatRestoreColumnIJ(Mat,PetscInt,PetscBool ,PetscBool ,PetscInt *,PetscInt *[],PetscInt *[],PetscBool  *);
515 
516 /*S
517      MatInfo - Context of matrix information, used with MatGetInfo()
518 
519    In Fortran this is simply a double precision array of dimension MAT_INFO_SIZE
520 
521    Level: intermediate
522 
523   Concepts: matrix^nonzero information
524 
525 .seealso:  MatGetInfo(), MatInfoType
526 S*/
527 typedef struct {
528   PetscLogDouble block_size;                         /* block size */
529   PetscLogDouble nz_allocated,nz_used,nz_unneeded;   /* number of nonzeros */
530   PetscLogDouble memory;                             /* memory allocated */
531   PetscLogDouble assemblies;                         /* number of matrix assemblies called */
532   PetscLogDouble mallocs;                            /* number of mallocs during MatSetValues() */
533   PetscLogDouble fill_ratio_given,fill_ratio_needed; /* fill ratio for LU/ILU */
534   PetscLogDouble factor_mallocs;                     /* number of mallocs during factorization */
535 } MatInfo;
536 
537 /*E
538     MatInfoType - Indicates if you want information about the local part of the matrix,
539      the entire parallel matrix or the maximum over all the local parts.
540 
541     Level: beginner
542 
543    Any additions/changes here MUST also be made in include/finclude/petscmat.h
544 
545 .seealso: MatGetInfo(), MatInfo
546 E*/
547 typedef enum {MAT_LOCAL=1,MAT_GLOBAL_MAX=2,MAT_GLOBAL_SUM=3} MatInfoType;
548 extern PetscErrorCode  MatGetInfo(Mat,MatInfoType,MatInfo*);
549 extern PetscErrorCode  MatGetDiagonal(Mat,Vec);
550 extern PetscErrorCode  MatGetRowMax(Mat,Vec,PetscInt[]);
551 extern PetscErrorCode  MatGetRowMin(Mat,Vec,PetscInt[]);
552 extern PetscErrorCode  MatGetRowMaxAbs(Mat,Vec,PetscInt[]);
553 extern PetscErrorCode  MatGetRowMinAbs(Mat,Vec,PetscInt[]);
554 extern PetscErrorCode  MatGetRowSum(Mat,Vec);
555 extern PetscErrorCode  MatTranspose(Mat,MatReuse,Mat*);
556 PetscPolymorphicFunction(MatTranspose,(Mat A),(A,MAT_INITIAL_MATRIX,&t),Mat,t)
557 extern PetscErrorCode  MatHermitianTranspose(Mat,MatReuse,Mat*);
558 extern PetscErrorCode  MatPermute(Mat,IS,IS,Mat *);
559 PetscPolymorphicFunction(MatPermute,(Mat A,IS is1,IS is2),(A,is1,is2,&t),Mat,t)
560 extern PetscErrorCode  MatDiagonalScale(Mat,Vec,Vec);
561 extern PetscErrorCode  MatDiagonalSet(Mat,Vec,InsertMode);
562 extern PetscErrorCode  MatEqual(Mat,Mat,PetscBool *);
563 PetscPolymorphicFunction(MatEqual,(Mat A,Mat B),(A,B,&t),PetscBool ,t)
564 extern PetscErrorCode  MatMultEqual(Mat,Mat,PetscInt,PetscBool *);
565 extern PetscErrorCode  MatMultAddEqual(Mat,Mat,PetscInt,PetscBool *);
566 extern PetscErrorCode  MatMultTransposeEqual(Mat,Mat,PetscInt,PetscBool *);
567 extern PetscErrorCode  MatMultTransposeAddEqual(Mat,Mat,PetscInt,PetscBool *);
568 
569 extern PetscErrorCode  MatNorm(Mat,NormType,PetscReal *);
570 PetscPolymorphicFunction(MatNorm,(Mat A,NormType t),(A,t,&n),PetscReal,n)
571 extern PetscErrorCode  MatGetColumnNorms(Mat,NormType,PetscReal *);
572 extern PetscErrorCode  MatZeroEntries(Mat);
573 extern PetscErrorCode  MatZeroRows(Mat,PetscInt,const PetscInt [],PetscScalar,Vec,Vec);
574 extern PetscErrorCode  MatZeroRowsIS(Mat,IS,PetscScalar,Vec,Vec);
575 extern PetscErrorCode  MatZeroRowsStencil(Mat,PetscInt,const MatStencil [],PetscScalar,Vec,Vec);
576 extern PetscErrorCode  MatZeroRowsColumns(Mat,PetscInt,const PetscInt [],PetscScalar,Vec,Vec);
577 extern PetscErrorCode  MatZeroRowsColumnsIS(Mat,IS,PetscScalar,Vec,Vec);
578 
579 extern PetscErrorCode  MatUseScaledForm(Mat,PetscBool );
580 extern PetscErrorCode  MatScaleSystem(Mat,Vec,Vec);
581 extern PetscErrorCode  MatUnScaleSystem(Mat,Vec,Vec);
582 
583 extern PetscErrorCode  MatGetSize(Mat,PetscInt*,PetscInt*);
584 extern PetscErrorCode  MatGetLocalSize(Mat,PetscInt*,PetscInt*);
585 extern PetscErrorCode  MatGetOwnershipRange(Mat,PetscInt*,PetscInt*);
586 extern PetscErrorCode  MatGetOwnershipRanges(Mat,const PetscInt**);
587 extern PetscErrorCode  MatGetOwnershipRangeColumn(Mat,PetscInt*,PetscInt*);
588 extern PetscErrorCode  MatGetOwnershipRangesColumn(Mat,const PetscInt**);
589 
590 extern PetscErrorCode  MatGetSubMatrices(Mat,PetscInt,const IS[],const IS[],MatReuse,Mat *[]);
591 extern PetscErrorCode  MatGetSubMatricesParallel(Mat,PetscInt,const IS[],const IS[],MatReuse,Mat *[]);
592 extern PetscErrorCode  MatDestroyMatrices(PetscInt,Mat *[]);
593 extern PetscErrorCode  MatGetSubMatrix(Mat,IS,IS,MatReuse,Mat *);
594 extern PetscErrorCode  MatGetLocalSubMatrix(Mat,IS,IS,Mat*);
595 extern PetscErrorCode  MatRestoreLocalSubMatrix(Mat,IS,IS,Mat*);
596 extern PetscErrorCode  MatGetSeqNonzeroStructure(Mat,Mat*);
597 extern PetscErrorCode  MatDestroySeqNonzeroStructure(Mat*);
598 
599 extern PetscErrorCode  MatMerge(MPI_Comm,Mat,PetscInt,MatReuse,Mat*);
600 extern PetscErrorCode  MatMerge_SeqsToMPI(MPI_Comm,Mat,PetscInt,PetscInt,MatReuse,Mat*);
601 extern PetscErrorCode  MatMerge_SeqsToMPISymbolic(MPI_Comm,Mat,PetscInt,PetscInt,Mat*);
602 extern PetscErrorCode  MatMerge_SeqsToMPINumeric(Mat,Mat);
603 extern PetscErrorCode  MatMPIAIJGetLocalMat(Mat,MatReuse,Mat*);
604 extern PetscErrorCode  MatMPIAIJGetLocalMatCondensed(Mat,MatReuse,IS*,IS*,Mat*);
605 extern PetscErrorCode  MatGetBrowsOfAcols(Mat,Mat,MatReuse,IS*,IS*,PetscInt*,Mat*);
606 extern PetscErrorCode  MatGetBrowsOfAoCols(Mat,Mat,MatReuse,PetscInt**,PetscInt**,MatScalar**,Mat*);
607 #if defined (PETSC_USE_CTABLE)
608 extern PetscErrorCode  MatGetCommunicationStructs(Mat, Vec *, PetscTable *, VecScatter *);
609 #else
610 extern PetscErrorCode  MatGetCommunicationStructs(Mat, Vec *, PetscInt *[], VecScatter *);
611 #endif
612 extern PetscErrorCode  MatGetGhosts(Mat, PetscInt *,const PetscInt *[]);
613 
614 extern PetscErrorCode  MatIncreaseOverlap(Mat,PetscInt,IS[],PetscInt);
615 
616 extern PetscErrorCode  MatMatMult(Mat,Mat,MatReuse,PetscReal,Mat*);
617 extern PetscErrorCode  MatMatMultSymbolic(Mat,Mat,PetscReal,Mat*);
618 extern PetscErrorCode  MatMatMultNumeric(Mat,Mat,Mat);
619 
620 extern PetscErrorCode  MatPtAP(Mat,Mat,MatReuse,PetscReal,Mat*);
621 extern PetscErrorCode  MatPtAPSymbolic(Mat,Mat,PetscReal,Mat*);
622 extern PetscErrorCode  MatPtAPNumeric(Mat,Mat,Mat);
623 
624 extern PetscErrorCode  MatMatMultTranspose(Mat,Mat,MatReuse,PetscReal,Mat*);
625 extern PetscErrorCode  MatMatMultTransposeSymbolic(Mat,Mat,PetscReal,Mat*);
626 extern PetscErrorCode  MatMatMultTransposeNumeric(Mat,Mat,Mat);
627 
628 extern PetscErrorCode  MatAXPY(Mat,PetscScalar,Mat,MatStructure);
629 extern PetscErrorCode  MatAYPX(Mat,PetscScalar,Mat,MatStructure);
630 
631 extern PetscErrorCode  MatScale(Mat,PetscScalar);
632 extern PetscErrorCode  MatShift(Mat,PetscScalar);
633 
634 extern PetscErrorCode  MatSetLocalToGlobalMapping(Mat,ISLocalToGlobalMapping,ISLocalToGlobalMapping);
635 extern PetscErrorCode  MatSetLocalToGlobalMappingBlock(Mat,ISLocalToGlobalMapping,ISLocalToGlobalMapping);
636 extern PetscErrorCode  MatGetLocalToGlobalMapping(Mat,ISLocalToGlobalMapping*,ISLocalToGlobalMapping*);
637 extern PetscErrorCode  MatGetLocalToGlobalMappingBlock(Mat,ISLocalToGlobalMapping*,ISLocalToGlobalMapping*);
638 extern PetscErrorCode  MatZeroRowsLocal(Mat,PetscInt,const PetscInt [],PetscScalar,Vec,Vec);
639 extern PetscErrorCode  MatZeroRowsLocalIS(Mat,IS,PetscScalar,Vec,Vec);
640 extern PetscErrorCode  MatZeroRowsColumnsLocal(Mat,PetscInt,const PetscInt [],PetscScalar,Vec,Vec);
641 extern PetscErrorCode  MatZeroRowsColumnsLocalIS(Mat,IS,PetscScalar,Vec,Vec);
642 extern PetscErrorCode  MatSetValuesLocal(Mat,PetscInt,const PetscInt[],PetscInt,const PetscInt[],const PetscScalar[],InsertMode);
643 extern PetscErrorCode  MatSetValuesBlockedLocal(Mat,PetscInt,const PetscInt[],PetscInt,const PetscInt[],const PetscScalar[],InsertMode);
644 
645 extern PetscErrorCode  MatStashSetInitialSize(Mat,PetscInt,PetscInt);
646 extern PetscErrorCode  MatStashGetInfo(Mat,PetscInt*,PetscInt*,PetscInt*,PetscInt*);
647 
648 extern PetscErrorCode  MatInterpolate(Mat,Vec,Vec);
649 extern PetscErrorCode  MatInterpolateAdd(Mat,Vec,Vec,Vec);
650 PetscPolymorphicSubroutine(MatInterpolateAdd,(Mat A,Vec x,Vec y),(A,x,y,y))
651 extern PetscErrorCode  MatRestrict(Mat,Vec,Vec);
652 extern PetscErrorCode  MatGetVecs(Mat,Vec*,Vec*);
653 extern PetscErrorCode  MatGetRedundantMatrix(Mat,PetscInt,MPI_Comm,PetscInt,MatReuse,Mat*);
654 extern PetscErrorCode  MatGetMultiProcBlock(Mat,MPI_Comm,Mat*);
655 extern PetscErrorCode  MatFindZeroDiagonals(Mat,IS*);
656 
657 /*MC
658    MatSetValue - Set a single entry into a matrix.
659 
660    Not collective
661 
662    Input Parameters:
663 +  m - the matrix
664 .  row - the row location of the entry
665 .  col - the column location of the entry
666 .  value - the value to insert
667 -  mode - either INSERT_VALUES or ADD_VALUES
668 
669    Notes:
670    For efficiency one should use MatSetValues() and set several or many
671    values simultaneously if possible.
672 
673    Level: beginner
674 
675 .seealso: MatSetValues(), MatSetValueLocal()
676 M*/
677 PETSC_STATIC_INLINE PetscErrorCode MatSetValue(Mat v,PetscInt i,PetscInt j,PetscScalar va,InsertMode mode) {return MatSetValues(v,1,&i,1,&j,&va,mode);}
678 
679 PETSC_STATIC_INLINE PetscErrorCode MatGetValue(Mat v,PetscInt i,PetscInt j,PetscScalar *va) {return MatGetValues(v,1,&i,1,&j,va);}
680 
681 PETSC_STATIC_INLINE PetscErrorCode MatSetValueLocal(Mat v,PetscInt i,PetscInt j,PetscScalar va,InsertMode mode) {return MatSetValuesLocal(v,1,&i,1,&j,&va,mode);}
682 
683 /*MC
684    MatPreallocateInitialize - Begins the block of code that will count the number of nonzeros per
685        row in a matrix providing the data that one can use to correctly preallocate the matrix.
686 
687    Synopsis:
688    PetscErrorCode MatPreallocateInitialize(MPI_Comm comm, PetscInt nrows, PetscInt ncols, PetscInt *dnz, PetscInt *onz)
689 
690    Collective on MPI_Comm
691 
692    Input Parameters:
693 +  comm - the communicator that will share the eventually allocated matrix
694 .  nrows - the number of LOCAL rows in the matrix
695 -  ncols - the number of LOCAL columns in the matrix
696 
697    Output Parameters:
698 +  dnz - the array that will be passed to the matrix preallocation routines
699 -  ozn - the other array passed to the matrix preallocation routines
700 
701 
702    Level: intermediate
703 
704    Notes:
705     See the <A href="../../docs/manual.pdf#nameddest=ch_performance">Hints for Performance Improvment</A> chapter in the users manual for more details.
706 
707    Do not malloc or free dnz and onz, that is handled internally by these routines
708 
709    Use MatPreallocateInitializeSymmetric() for symmetric matrices (MPISBAIJ matrices)
710 
711    This is a MACRO not a function because it has a leading { that is closed by PetscPreallocateFinalize().
712 
713   Concepts: preallocation^Matrix
714 
715 .seealso: MatPreallocateFinalize(), MatPreallocateSet(), MatPreallocateSymmetricSet(), MatPreallocateSetLocal(),
716           MatPreallocateInitializeSymmetric(), MatPreallocateSymmetricSetLocal()
717 M*/
718 #define MatPreallocateInitialize(comm,nrows,ncols,dnz,onz) 0; \
719 { \
720   PetscErrorCode _4_ierr; PetscInt __nrows = (nrows),__ctmp = (ncols),__rstart,__start,__end; \
721   _4_ierr = PetscMalloc2(__nrows,PetscInt,&dnz,__nrows,PetscInt,&onz);CHKERRQ(_4_ierr); \
722   _4_ierr = PetscMemzero(dnz,__nrows*sizeof(PetscInt));CHKERRQ(_4_ierr);\
723   _4_ierr = PetscMemzero(onz,__nrows*sizeof(PetscInt));CHKERRQ(_4_ierr);\
724   _4_ierr = MPI_Scan(&__ctmp,&__end,1,MPIU_INT,MPI_SUM,comm);CHKERRQ(_4_ierr); __start = __end - __ctmp;\
725   _4_ierr = MPI_Scan(&__nrows,&__rstart,1,MPIU_INT,MPI_SUM,comm);CHKERRQ(_4_ierr); __rstart = __rstart - __nrows;
726 
727 /*MC
728    MatPreallocateSymmetricInitialize - Begins the block of code that will count the number of nonzeros per
729        row in a matrix providing the data that one can use to correctly preallocate the matrix.
730 
731    Synopsis:
732    PetscErrorCode MatPreallocateSymmetricInitialize(MPI_Comm comm, PetscInt nrows, PetscInt ncols, PetscInt *dnz, PetscInt *onz)
733 
734    Collective on MPI_Comm
735 
736    Input Parameters:
737 +  comm - the communicator that will share the eventually allocated matrix
738 .  nrows - the number of LOCAL rows in the matrix
739 -  ncols - the number of LOCAL columns in the matrix
740 
741    Output Parameters:
742 +  dnz - the array that will be passed to the matrix preallocation routines
743 -  ozn - the other array passed to the matrix preallocation routines
744 
745 
746    Level: intermediate
747 
748    Notes:
749     See the <A href="../../docs/manual.pdf#nameddest=ch_performance">Hints for Performance Improvment</A> chapter in the users manual for more details.
750 
751    Do not malloc or free dnz and onz, that is handled internally by these routines
752 
753    This is a MACRO not a function because it has a leading { that is closed by PetscPreallocateFinalize().
754 
755   Concepts: preallocation^Matrix
756 
757 .seealso: MatPreallocateFinalize(), MatPreallocateSet(), MatPreallocateSymmetricSet(), MatPreallocateSetLocal(),
758           MatPreallocateInitialize(), MatPreallocateSymmetricSetLocal()
759 M*/
760 #define MatPreallocateSymmetricInitialize(comm,nrows,ncols,dnz,onz) 0; \
761 { \
762   PetscErrorCode _4_ierr; PetscInt __nrows = (nrows),__ctmp = (ncols),__rstart,__end; \
763   _4_ierr = PetscMalloc2(__nrows,PetscInt,&dnz,__nrows,PetscInt,&onz);CHKERRQ(_4_ierr); \
764   _4_ierr = PetscMemzero(dnz,__nrows*sizeof(PetscInt));CHKERRQ(_4_ierr);\
765   _4_ierr = PetscMemzero(onz,__nrows*sizeof(PetscInt));CHKERRQ(_4_ierr);\
766   _4_ierr = MPI_Scan(&__ctmp,&__end,1,MPIU_INT,MPI_SUM,comm);CHKERRQ(_4_ierr);\
767   _4_ierr = MPI_Scan(&__nrows,&__rstart,1,MPIU_INT,MPI_SUM,comm);CHKERRQ(_4_ierr); __rstart = __rstart - __nrows;
768 
769 /*MC
770    MatPreallocateSetLocal - Indicates the locations (rows and columns) in the matrix where nonzeros will be
771        inserted using a local number of the rows and columns
772 
773    Synopsis:
774    PetscErrorCode MatPreallocateSetLocal(ISLocalToGlobalMappping map,PetscInt nrows, PetscInt *rows,PetscInt ncols, PetscInt *cols,PetscInt *dnz, PetscInt *onz)
775 
776    Not Collective
777 
778    Input Parameters:
779 +  map - the row mapping from local numbering to global numbering
780 .  nrows - the number of rows indicated
781 .  rows - the indices of the rows
782 .  cmap - the column mapping from local to global numbering
783 .  ncols - the number of columns in the matrix
784 .  cols - the columns indicated
785 .  dnz - the array that will be passed to the matrix preallocation routines
786 -  ozn - the other array passed to the matrix preallocation routines
787 
788 
789    Level: intermediate
790 
791    Notes:
792     See the <A href="../../docs/manual.pdf#nameddest=ch_performance">Hints for Performance Improvment</A> chapter in the users manual for more details.
793 
794    Do not malloc or free dnz and onz, that is handled internally by these routines
795 
796   Concepts: preallocation^Matrix
797 
798 .seealso: MatPreallocateFinalize(), MatPreallocateSet(), MatPreallocateSymmetricSet(), MatPreallocateInitialize(),
799           MatPreallocateInitialize(), MatPreallocateSymmetricSetLocal()
800 M*/
801 #define MatPreallocateSetLocal(rmap,nrows,rows,cmap,ncols,cols,dnz,onz) 0; \
802 {\
803   PetscInt __l;\
804   _4_ierr = ISLocalToGlobalMappingApply(rmap,nrows,rows,rows);CHKERRQ(_4_ierr);\
805   _4_ierr = ISLocalToGlobalMappingApply(cmap,ncols,cols,cols);CHKERRQ(_4_ierr);\
806   for (__l=0;__l<nrows;__l++) {\
807     _4_ierr = MatPreallocateSet((rows)[__l],ncols,cols,dnz,onz);CHKERRQ(_4_ierr);\
808   }\
809 }
810 
811 /*MC
812    MatPreallocateSymmetricSetLocal - Indicates the locations (rows and columns) in the matrix where nonzeros will be
813        inserted using a local number of the rows and columns
814 
815    Synopsis:
816    PetscErrorCode MatPreallocateSymmetricSetLocal(ISLocalToGlobalMappping map,PetscInt nrows, PetscInt *rows,PetscInt ncols, PetscInt *cols,PetscInt *dnz, PetscInt *onz)
817 
818    Not Collective
819 
820    Input Parameters:
821 +  map - the mapping between local numbering and global numbering
822 .  nrows - the number of rows indicated
823 .  rows - the indices of the rows
824 .  ncols - the number of columns in the matrix
825 .  cols - the columns indicated
826 .  dnz - the array that will be passed to the matrix preallocation routines
827 -  ozn - the other array passed to the matrix preallocation routines
828 
829 
830    Level: intermediate
831 
832    Notes:
833     See the <A href="../../docs/manual.pdf#nameddest=ch_performance">Hints for Performance Improvment</A> chapter in the users manual for more details.
834 
835    Do not malloc or free dnz and onz that is handled internally by these routines
836 
837   Concepts: preallocation^Matrix
838 
839 .seealso: MatPreallocateFinalize(), MatPreallocateSet(), MatPreallocateSymmetricSet(), MatPreallocateInitialize(),
840           MatPreallocateInitialize(), MatPreallocateSymmetricSetLocal(), MatPreallocateSetLocal()
841 M*/
842 #define MatPreallocateSymmetricSetLocal(map,nrows,rows,ncols,cols,dnz,onz) 0;\
843 {\
844   PetscInt __l;\
845   _4_ierr = ISLocalToGlobalMappingApply(map,nrows,rows,rows);CHKERRQ(_4_ierr);\
846   _4_ierr = ISLocalToGlobalMappingApply(map,ncols,cols,cols);CHKERRQ(_4_ierr);\
847   for (__l=0;__l<nrows;__l++) {\
848     _4_ierr = MatPreallocateSymmetricSet((rows)[__l],ncols,cols,dnz,onz);CHKERRQ(_4_ierr);\
849   }\
850 }
851 
852 /*MC
853    MatPreallocateSet - Indicates the locations (rows and columns) in the matrix where nonzeros will be
854        inserted using a local number of the rows and columns
855 
856    Synopsis:
857    PetscErrorCode MatPreallocateSet(PetscInt nrows, PetscInt *rows,PetscInt ncols, PetscInt *cols,PetscInt *dnz, PetscInt *onz)
858 
859    Not Collective
860 
861    Input Parameters:
862 +  row - the row
863 .  ncols - the number of columns in the matrix
864 -  cols - the columns indicated
865 
866    Output Parameters:
867 +  dnz - the array that will be passed to the matrix preallocation routines
868 -  ozn - the other array passed to the matrix preallocation routines
869 
870 
871    Level: intermediate
872 
873    Notes:
874     See the <A href="../../docs/manual.pdf#nameddest=ch_performance">Hints for Performance Improvment</A> chapter in the users manual for more details.
875 
876    Do not malloc or free dnz and onz that is handled internally by these routines
877 
878    This is a MACRO not a function because it uses variables declared in MatPreallocateInitialize().
879 
880   Concepts: preallocation^Matrix
881 
882 .seealso: MatPreallocateFinalize(), MatPreallocateSet(), MatPreallocateSymmetricSet(), MatPreallocateInitialize(),
883           MatPreallocateInitialize(), MatPreallocateSymmetricSetLocal(), MatPreallocateSetLocal()
884 M*/
885 #define MatPreallocateSet(row,nc,cols,dnz,onz) 0;\
886 { PetscInt __i; \
887   if (row < __rstart) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Trying to set preallocation for row %D less than first local row %D",row,__rstart);\
888   if (row >= __rstart+__nrows) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Trying to set preallocation for row %D greater than last local row %D",row,__rstart+__nrows-1);\
889   for (__i=0; __i<nc; __i++) {\
890     if ((cols)[__i] < __start || (cols)[__i] >= __end) onz[row - __rstart]++; \
891     else dnz[row - __rstart]++;\
892   }\
893 }
894 
895 /*MC
896    MatPreallocateSymmetricSet - Indicates the locations (rows and columns) in the matrix where nonzeros will be
897        inserted using a local number of the rows and columns
898 
899    Synopsis:
900    PetscErrorCode MatPreallocateSymmetricSet(PetscInt nrows, PetscInt *rows,PetscInt ncols, PetscInt *cols,PetscInt *dnz, PetscInt *onz)
901 
902    Not Collective
903 
904    Input Parameters:
905 +  nrows - the number of rows indicated
906 .  rows - the indices of the rows
907 .  ncols - the number of columns in the matrix
908 .  cols - the columns indicated
909 .  dnz - the array that will be passed to the matrix preallocation routines
910 -  ozn - the other array passed to the matrix preallocation routines
911 
912 
913    Level: intermediate
914 
915    Notes:
916     See the <A href="../../docs/manual.pdf#nameddest=ch_performance">Hints for Performance Improvment</A> chapter in the users manual for more details.
917 
918    Do not malloc or free dnz and onz that is handled internally by these routines
919 
920    This is a MACRO not a function because it uses variables declared in MatPreallocateInitialize().
921 
922   Concepts: preallocation^Matrix
923 
924 .seealso: MatPreallocateFinalize(), MatPreallocateSet(), MatPreallocateSymmetricSet(), MatPreallocateInitialize(),
925           MatPreallocateInitialize(), MatPreallocateSymmetricSetLocal(), MatPreallocateSetLocal()
926 M*/
927 #define MatPreallocateSymmetricSet(row,nc,cols,dnz,onz) 0;\
928 { PetscInt __i; \
929   for (__i=0; __i<nc; __i++) {\
930     if (cols[__i] >= __end) onz[row - __rstart]++; \
931     else if (cols[__i] >= row) dnz[row - __rstart]++;\
932   }\
933 }
934 
935 /*MC
936    MatPreallocateLocation -  An alternative to MatPreallocationSet() that puts the nonzero locations into the matrix if it exists
937 
938    Synopsis:
939    PetscErrorCode MatPreallocateLocations(Mat A,PetscInt row,PetscInt ncols,PetscInt *cols,PetscInt *dnz,PetscInt *onz)
940 
941    Not Collective
942 
943    Input Parameters:
944 .  A - matrix
945 .  row - row where values exist (must be local to this process)
946 .  ncols - number of columns
947 .  cols - columns with nonzeros
948 .  dnz - the array that will be passed to the matrix preallocation routines
949 -  ozn - the other array passed to the matrix preallocation routines
950 
951 
952    Level: intermediate
953 
954    Notes:
955     See the <A href="../../docs/manual.pdf#nameddest=ch_performance">Hints for Performance Improvment</A> chapter in the users manual for more details.
956 
957    Do not malloc or free dnz and onz that is handled internally by these routines
958 
959    This is a MACRO not a function because it uses a bunch of variables private to the MatPreallocation.... routines.
960 
961   Concepts: preallocation^Matrix
962 
963 .seealso: MatPreallocateInitialize(), MatPreallocateSet(), MatPreallocateSymmetricSet(), MatPreallocateSetLocal(),
964           MatPreallocateSymmetricInitialize(), MatPreallocateSymmetricSetLocal()
965 M*/
966 #define MatPreallocateLocation(A,row,ncols,cols,dnz,onz) 0;if (A) {ierr = MatSetValues(A,1,&row,ncols,cols,PETSC_NULL,INSERT_VALUES);CHKERRQ(ierr);} else {ierr =  MatPreallocateSet(row,ncols,cols,dnz,onz);CHKERRQ(ierr);}
967 
968 
969 /*MC
970    MatPreallocateFinalize - Ends the block of code that will count the number of nonzeros per
971        row in a matrix providing the data that one can use to correctly preallocate the matrix.
972 
973    Synopsis:
974    PetscErrorCode MatPreallocateFinalize(PetscInt *dnz, PetscInt *onz)
975 
976    Collective on MPI_Comm
977 
978    Input Parameters:
979 +  dnz - the array that was be passed to the matrix preallocation routines
980 -  ozn - the other array passed to the matrix preallocation routines
981 
982 
983    Level: intermediate
984 
985    Notes:
986     See the <A href="../../docs/manual.pdf#nameddest=ch_performance">Hints for Performance Improvment</A> chapter in the users manual for more details.
987 
988    Do not malloc or free dnz and onz that is handled internally by these routines
989 
990    This is a MACRO not a function because it closes the { started in MatPreallocateInitialize().
991 
992   Concepts: preallocation^Matrix
993 
994 .seealso: MatPreallocateInitialize(), MatPreallocateSet(), MatPreallocateSymmetricSet(), MatPreallocateSetLocal(),
995           MatPreallocateSymmetricInitialize(), MatPreallocateSymmetricSetLocal()
996 M*/
997 #define MatPreallocateFinalize(dnz,onz) 0;_4_ierr = PetscFree2(dnz,onz);CHKERRQ(_4_ierr);}
998 
999 
1000 
1001 /* Routines unique to particular data structures */
1002 extern PetscErrorCode  MatShellGetContext(Mat,void *);
1003 PetscPolymorphicFunction(MatShellGetContext,(Mat A),(A,&t),void*,t)
1004 
1005 extern PetscErrorCode  MatInodeAdjustForInodes(Mat,IS*,IS*);
1006 extern PetscErrorCode  MatInodeGetInodeSizes(Mat,PetscInt *,PetscInt *[],PetscInt *);
1007 
1008 extern PetscErrorCode  MatSeqAIJSetColumnIndices(Mat,PetscInt[]);
1009 extern PetscErrorCode  MatSeqBAIJSetColumnIndices(Mat,PetscInt[]);
1010 extern PetscErrorCode  MatCreateSeqAIJWithArrays(MPI_Comm,PetscInt,PetscInt,PetscInt[],PetscInt[],PetscScalar[],Mat*);
1011 extern PetscErrorCode  MatCreateSeqBAIJWithArrays(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt[],PetscInt[],PetscScalar[],Mat*);
1012 extern PetscErrorCode  MatCreateSeqSBAIJWithArrays(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt[],PetscInt[],PetscScalar[],Mat*);
1013 
1014 #define MAT_SKIP_ALLOCATION -4
1015 
1016 extern PetscErrorCode  MatSeqBAIJSetPreallocation(Mat,PetscInt,PetscInt,const PetscInt[]);
1017 PetscPolymorphicSubroutine(MatSeqBAIJSetPreallocation,(Mat A,PetscInt bs,const PetscInt nnz[]),(A,bs,0,nnz))
1018 extern PetscErrorCode  MatSeqSBAIJSetPreallocation(Mat,PetscInt,PetscInt,const PetscInt[]);
1019 PetscPolymorphicSubroutine(MatSeqSBAIJSetPreallocation,(Mat A,PetscInt bs,const PetscInt nnz[]),(A,bs,0,nnz))
1020 extern PetscErrorCode  MatSeqAIJSetPreallocation(Mat,PetscInt,const PetscInt[]);
1021 PetscPolymorphicSubroutine(MatSeqAIJSetPreallocation,(Mat A,const PetscInt nnz[]),(A,0,nnz))
1022 
1023 extern PetscErrorCode  MatMPIBAIJSetPreallocation(Mat,PetscInt,PetscInt,const PetscInt[],PetscInt,const PetscInt[]);
1024 PetscPolymorphicSubroutine(MatMPIBAIJSetPreallocation,(Mat A,PetscInt bs,const PetscInt nnz[],const PetscInt onz[]),(A,bs,0,nnz,0,onz))
1025 extern PetscErrorCode  MatMPISBAIJSetPreallocation(Mat,PetscInt,PetscInt,const PetscInt[],PetscInt,const PetscInt[]);
1026 extern PetscErrorCode  MatMPIAIJSetPreallocation(Mat,PetscInt,const PetscInt[],PetscInt,const PetscInt[]);
1027 extern PetscErrorCode  MatSeqAIJSetPreallocationCSR(Mat,const PetscInt [],const PetscInt [],const PetscScalar []);
1028 extern PetscErrorCode  MatSeqBAIJSetPreallocationCSR(Mat,PetscInt,const PetscInt[],const PetscInt[],const PetscScalar[]);
1029 extern PetscErrorCode  MatMPIAIJSetPreallocationCSR(Mat,const PetscInt[],const PetscInt[],const PetscScalar[]);
1030 extern PetscErrorCode  MatMPIBAIJSetPreallocationCSR(Mat,PetscInt,const PetscInt[],const PetscInt[],const PetscScalar[]);
1031 extern PetscErrorCode  MatMPIAdjSetPreallocation(Mat,PetscInt[],PetscInt[],PetscInt[]);
1032 extern PetscErrorCode  MatMPIDenseSetPreallocation(Mat,PetscScalar[]);
1033 extern PetscErrorCode  MatSeqDenseSetPreallocation(Mat,PetscScalar[]);
1034 extern PetscErrorCode  MatMPIAIJGetSeqAIJ(Mat,Mat*,Mat*,PetscInt*[]);
1035 extern PetscErrorCode  MatMPIBAIJGetSeqBAIJ(Mat,Mat*,Mat*,PetscInt*[]);
1036 extern PetscErrorCode  MatAdicSetLocalFunction(Mat,void (*)(void));
1037 
1038 extern PetscErrorCode  MatSeqDenseSetLDA(Mat,PetscInt);
1039 extern PetscErrorCode  MatDenseGetLocalMatrix(Mat,Mat*);
1040 
1041 extern PetscErrorCode  MatStoreValues(Mat);
1042 extern PetscErrorCode  MatRetrieveValues(Mat);
1043 
1044 extern PetscErrorCode  MatDAADSetCtx(Mat,void*);
1045 
1046 extern PetscErrorCode  MatFindNonzeroRows(Mat,IS*);
1047 /*
1048   These routines are not usually accessed directly, rather solving is
1049   done through the KSP and PC interfaces.
1050 */
1051 
1052 /*J
1053     MatOrderingType - String with the name of a PETSc matrix ordering or the creation function
1054        with an optional dynamic library name, for example
1055        http://www.mcs.anl.gov/petsc/lib.a:orderingcreate()
1056 
1057    Level: beginner
1058 
1059    Cannot use const because the PC objects manipulate the string
1060 
1061 .seealso: MatGetOrdering()
1062 J*/
1063 #define MatOrderingType char*
1064 #define MATORDERINGNATURAL     "natural"
1065 #define MATORDERINGND          "nd"
1066 #define MATORDERING1WD         "1wd"
1067 #define MATORDERINGRCM         "rcm"
1068 #define MATORDERINGQMD         "qmd"
1069 #define MATORDERINGROWLENGTH   "rowlength"
1070 #define MATORDERINGAMD         "amd"            /* only works if UMFPACK is installed with PETSc */
1071 
1072 extern PetscErrorCode  MatGetOrdering(Mat,const MatOrderingType,IS*,IS*);
1073 extern PetscErrorCode  MatGetOrderingList(PetscFList *list);
1074 extern PetscErrorCode  MatOrderingRegister(const char[],const char[],const char[],PetscErrorCode(*)(Mat,const MatOrderingType,IS*,IS*));
1075 
1076 /*MC
1077    MatOrderingRegisterDynamic - Adds a new sparse matrix ordering to the matrix package.
1078 
1079    Synopsis:
1080    PetscErrorCode MatOrderingRegisterDynamic(const char *name_ordering,const char *path,const char *name_create,PetscErrorCode (*routine_create)(MatOrdering))
1081 
1082    Not Collective
1083 
1084    Input Parameters:
1085 +  sname - name of ordering (for example MATORDERINGND)
1086 .  path - location of library where creation routine is
1087 .  name - name of function that creates the ordering type,a string
1088 -  function - function pointer that creates the ordering
1089 
1090    Level: developer
1091 
1092    If dynamic libraries are used, then the fourth input argument (function)
1093    is ignored.
1094 
1095    Sample usage:
1096 .vb
1097    MatOrderingRegisterDynamic("my_order",/home/username/my_lib/lib/libO/solaris/mylib.a,
1098                "MyOrder",MyOrder);
1099 .ve
1100 
1101    Then, your partitioner can be chosen with the procedural interface via
1102 $     MatOrderingSetType(part,"my_order)
1103    or at runtime via the option
1104 $     -pc_factor_mat_ordering_type my_order
1105 
1106    ${PETSC_ARCH} occuring in pathname will be replaced with appropriate values.
1107 
1108 .keywords: matrix, ordering, register
1109 
1110 .seealso: MatOrderingRegisterDestroy(), MatOrderingRegisterAll()
1111 M*/
1112 #if defined(PETSC_USE_DYNAMIC_LIBRARIES)
1113 #define MatOrderingRegisterDynamic(a,b,c,d) MatOrderingRegister(a,b,c,0)
1114 #else
1115 #define MatOrderingRegisterDynamic(a,b,c,d) MatOrderingRegister(a,b,c,d)
1116 #endif
1117 
1118 extern PetscErrorCode  MatOrderingRegisterDestroy(void);
1119 extern PetscErrorCode  MatOrderingRegisterAll(const char[]);
1120 extern PetscBool  MatOrderingRegisterAllCalled;
1121 extern PetscFList MatOrderingList;
1122 
1123 extern PetscErrorCode  MatReorderForNonzeroDiagonal(Mat,PetscReal,IS,IS);
1124 
1125 /*S
1126     MatFactorShiftType - Numeric Shift.
1127 
1128    Level: beginner
1129 
1130 S*/
1131 typedef enum {MAT_SHIFT_NONE,MAT_SHIFT_NONZERO,MAT_SHIFT_POSITIVE_DEFINITE,MAT_SHIFT_INBLOCKS} MatFactorShiftType;
1132 extern const char *MatFactorShiftTypes[];
1133 
1134 /*S
1135    MatFactorInfo - Data passed into the matrix factorization routines
1136 
1137    In Fortran these are simply double precision arrays of size MAT_FACTORINFO_SIZE, that is use
1138 $     MatFactorInfo  info(MAT_FACTORINFO_SIZE)
1139 
1140    Notes: These are not usually directly used by users, instead use PC type of LU, ILU, CHOLESKY or ICC.
1141 
1142       You can use MatFactorInfoInitialize() to set default values.
1143 
1144    Level: developer
1145 
1146 .seealso: MatLUFactorSymbolic(), MatILUFactorSymbolic(), MatCholeskyFactorSymbolic(), MatICCFactorSymbolic(), MatICCFactor(),
1147           MatFactorInfoInitialize()
1148 
1149 S*/
1150 typedef struct {
1151   PetscReal     diagonal_fill;  /* force diagonal to fill in if initially not filled */
1152   PetscReal     usedt;
1153   PetscReal     dt;             /* drop tolerance */
1154   PetscReal     dtcol;          /* tolerance for pivoting */
1155   PetscReal     dtcount;        /* maximum nonzeros to be allowed per row */
1156   PetscReal     fill;           /* expected fill, nonzeros in factored matrix/nonzeros in original matrix */
1157   PetscReal     levels;         /* ICC/ILU(levels) */
1158   PetscReal     pivotinblocks;  /* for BAIJ and SBAIJ matrices pivot in factorization on blocks, default 1.0
1159                                    factorization may be faster if do not pivot */
1160   PetscReal     zeropivot;      /* pivot is called zero if less than this */
1161   PetscReal     shifttype;      /* type of shift added to matrix factor to prevent zero pivots */
1162   PetscReal     shiftamount;     /* how large the shift is */
1163 } MatFactorInfo;
1164 
1165 extern PetscErrorCode  MatFactorInfoInitialize(MatFactorInfo*);
1166 extern PetscErrorCode  MatCholeskyFactor(Mat,IS,const MatFactorInfo*);
1167 extern PetscErrorCode  MatCholeskyFactorSymbolic(Mat,Mat,IS,const MatFactorInfo*);
1168 extern PetscErrorCode  MatCholeskyFactorNumeric(Mat,Mat,const MatFactorInfo*);
1169 extern PetscErrorCode  MatLUFactor(Mat,IS,IS,const MatFactorInfo*);
1170 extern PetscErrorCode  MatILUFactor(Mat,IS,IS,const MatFactorInfo*);
1171 extern PetscErrorCode  MatLUFactorSymbolic(Mat,Mat,IS,IS,const MatFactorInfo*);
1172 extern PetscErrorCode  MatILUFactorSymbolic(Mat,Mat,IS,IS,const MatFactorInfo*);
1173 extern PetscErrorCode  MatICCFactorSymbolic(Mat,Mat,IS,const MatFactorInfo*);
1174 extern PetscErrorCode  MatICCFactor(Mat,IS,const MatFactorInfo*);
1175 extern PetscErrorCode  MatLUFactorNumeric(Mat,Mat,const MatFactorInfo*);
1176 extern PetscErrorCode  MatGetInertia(Mat,PetscInt*,PetscInt*,PetscInt*);
1177 extern PetscErrorCode  MatSolve(Mat,Vec,Vec);
1178 extern PetscErrorCode  MatForwardSolve(Mat,Vec,Vec);
1179 extern PetscErrorCode  MatBackwardSolve(Mat,Vec,Vec);
1180 extern PetscErrorCode  MatSolveAdd(Mat,Vec,Vec,Vec);
1181 extern PetscErrorCode  MatSolveTranspose(Mat,Vec,Vec);
1182 extern PetscErrorCode  MatSolveTransposeAdd(Mat,Vec,Vec,Vec);
1183 extern PetscErrorCode  MatSolves(Mat,Vecs,Vecs);
1184 
1185 extern PetscErrorCode  MatSetUnfactored(Mat);
1186 
1187 /*E
1188     MatSORType - What type of (S)SOR to perform
1189 
1190     Level: beginner
1191 
1192    May be bitwise ORd together
1193 
1194    Any additions/changes here MUST also be made in include/finclude/petscmat.h
1195 
1196    MatSORType may be bitwise ORd together, so do not change the numbers
1197 
1198 .seealso: MatSOR()
1199 E*/
1200 typedef enum {SOR_FORWARD_SWEEP=1,SOR_BACKWARD_SWEEP=2,SOR_SYMMETRIC_SWEEP=3,
1201               SOR_LOCAL_FORWARD_SWEEP=4,SOR_LOCAL_BACKWARD_SWEEP=8,
1202               SOR_LOCAL_SYMMETRIC_SWEEP=12,SOR_ZERO_INITIAL_GUESS=16,
1203               SOR_EISENSTAT=32,SOR_APPLY_UPPER=64,SOR_APPLY_LOWER=128} MatSORType;
1204 extern PetscErrorCode  MatSOR(Mat,Vec,PetscReal,MatSORType,PetscReal,PetscInt,PetscInt,Vec);
1205 
1206 /*
1207     These routines are for efficiently computing Jacobians via finite differences.
1208 */
1209 
1210 /*J
1211     MatColoringType - String with the name of a PETSc matrix coloring or the creation function
1212        with an optional dynamic library name, for example
1213        http://www.mcs.anl.gov/petsc/lib.a:coloringcreate()
1214 
1215    Level: beginner
1216 
1217 .seealso: MatGetColoring()
1218 J*/
1219 #define MatColoringType char*
1220 #define MATCOLORINGNATURAL "natural"
1221 #define MATCOLORINGSL      "sl"
1222 #define MATCOLORINGLF      "lf"
1223 #define MATCOLORINGID      "id"
1224 
1225 extern PetscErrorCode  MatGetColoring(Mat,const MatColoringType,ISColoring*);
1226 extern PetscErrorCode  MatColoringRegister(const char[],const char[],const char[],PetscErrorCode(*)(Mat,MatColoringType,ISColoring *));
1227 
1228 /*MC
1229    MatColoringRegisterDynamic - Adds a new sparse matrix coloring to the
1230                                matrix package.
1231 
1232    Synopsis:
1233    PetscErrorCode MatColoringRegisterDynamic(const char *name_coloring,const char *path,const char *name_create,PetscErrorCode (*routine_create)(MatColoring))
1234 
1235    Not Collective
1236 
1237    Input Parameters:
1238 +  sname - name of Coloring (for example MATCOLORINGSL)
1239 .  path - location of library where creation routine is
1240 .  name - name of function that creates the Coloring type, a string
1241 -  function - function pointer that creates the coloring
1242 
1243    Level: developer
1244 
1245    If dynamic libraries are used, then the fourth input argument (function)
1246    is ignored.
1247 
1248    Sample usage:
1249 .vb
1250    MatColoringRegisterDynamic("my_color",/home/username/my_lib/lib/libO/solaris/mylib.a,
1251                "MyColor",MyColor);
1252 .ve
1253 
1254    Then, your partitioner can be chosen with the procedural interface via
1255 $     MatColoringSetType(part,"my_color")
1256    or at runtime via the option
1257 $     -mat_coloring_type my_color
1258 
1259    $PETSC_ARCH occuring in pathname will be replaced with appropriate values.
1260 
1261 .keywords: matrix, Coloring, register
1262 
1263 .seealso: MatColoringRegisterDestroy(), MatColoringRegisterAll()
1264 M*/
1265 #if defined(PETSC_USE_DYNAMIC_LIBRARIES)
1266 #define MatColoringRegisterDynamic(a,b,c,d) MatColoringRegister(a,b,c,0)
1267 #else
1268 #define MatColoringRegisterDynamic(a,b,c,d) MatColoringRegister(a,b,c,d)
1269 #endif
1270 
1271 extern PetscBool  MatColoringRegisterAllCalled;
1272 
1273 extern PetscErrorCode  MatColoringRegisterAll(const char[]);
1274 extern PetscErrorCode  MatColoringRegisterDestroy(void);
1275 extern PetscErrorCode  MatColoringPatch(Mat,PetscInt,PetscInt,ISColoringValue[],ISColoring*);
1276 
1277 /*S
1278      MatFDColoring - Object for computing a sparse Jacobian via finite differences
1279         and coloring
1280 
1281    Level: beginner
1282 
1283   Concepts: coloring, sparse Jacobian, finite differences
1284 
1285 .seealso:  MatFDColoringCreate()
1286 S*/
1287 typedef struct _p_MatFDColoring* MatFDColoring;
1288 
1289 extern PetscErrorCode  MatFDColoringCreate(Mat,ISColoring,MatFDColoring *);
1290 extern PetscErrorCode  MatFDColoringDestroy(MatFDColoring*);
1291 extern PetscErrorCode  MatFDColoringView(MatFDColoring,PetscViewer);
1292 extern PetscErrorCode  MatFDColoringSetFunction(MatFDColoring,PetscErrorCode (*)(void),void*);
1293 extern PetscErrorCode  MatFDColoringGetFunction(MatFDColoring,PetscErrorCode (**)(void),void**);
1294 extern PetscErrorCode  MatFDColoringSetParameters(MatFDColoring,PetscReal,PetscReal);
1295 extern PetscErrorCode  MatFDColoringSetFromOptions(MatFDColoring);
1296 extern PetscErrorCode  MatFDColoringApply(Mat,MatFDColoring,Vec,MatStructure*,void *);
1297 extern PetscErrorCode  MatFDColoringApplyTS(Mat,MatFDColoring,PetscReal,Vec,MatStructure*,void *);
1298 extern PetscErrorCode  MatFDColoringSetF(MatFDColoring,Vec);
1299 extern PetscErrorCode  MatFDColoringGetPerturbedColumns(MatFDColoring,PetscInt*,PetscInt*[]);
1300 /*
1301     These routines are for partitioning matrices: currently used only
1302   for adjacency matrix, MatCreateMPIAdj().
1303 */
1304 
1305 /*S
1306      MatPartitioning - Object for managing the partitioning of a matrix or graph
1307 
1308    Level: beginner
1309 
1310   Concepts: partitioning
1311 
1312 .seealso:  MatPartitioningCreate(), MatPartitioningType
1313 S*/
1314 typedef struct _p_MatPartitioning* MatPartitioning;
1315 
1316 /*J
1317     MatPartitioningType - String with the name of a PETSc matrix partitioning or the creation function
1318        with an optional dynamic library name, for example
1319        http://www.mcs.anl.gov/petsc/lib.a:partitioningcreate()
1320 
1321    Level: beginner
1322 
1323 .seealso: MatPartitioningCreate(), MatPartitioning
1324 J*/
1325 #define MatPartitioningType char*
1326 #define MATPARTITIONINGCURRENT  "current"
1327 #define MATPARTITIONINGSQUARE   "square"
1328 #define MATPARTITIONINGPARMETIS "parmetis"
1329 #define MATPARTITIONINGCHACO    "chaco"
1330 #define MATPARTITIONINGPARTY    "party"
1331 #define MATPARTITIONINGPTSCOTCH "ptscotch"
1332 
1333 
1334 extern PetscErrorCode  MatPartitioningCreate(MPI_Comm,MatPartitioning*);
1335 extern PetscErrorCode  MatPartitioningSetType(MatPartitioning,const MatPartitioningType);
1336 extern PetscErrorCode  MatPartitioningSetNParts(MatPartitioning,PetscInt);
1337 extern PetscErrorCode  MatPartitioningSetAdjacency(MatPartitioning,Mat);
1338 extern PetscErrorCode  MatPartitioningSetVertexWeights(MatPartitioning,const PetscInt[]);
1339 extern PetscErrorCode  MatPartitioningSetPartitionWeights(MatPartitioning,const PetscReal []);
1340 extern PetscErrorCode  MatPartitioningApply(MatPartitioning,IS*);
1341 extern PetscErrorCode  MatPartitioningDestroy(MatPartitioning*);
1342 
1343 extern PetscErrorCode  MatPartitioningRegister(const char[],const char[],const char[],PetscErrorCode (*)(MatPartitioning));
1344 
1345 /*MC
1346    MatPartitioningRegisterDynamic - Adds a new sparse matrix partitioning to the
1347    matrix package.
1348 
1349    Synopsis:
1350    PetscErrorCode MatPartitioningRegisterDynamic(const char *name_partitioning,const char *path,const char *name_create,PetscErrorCode (*routine_create)(MatPartitioning))
1351 
1352    Not Collective
1353 
1354    Input Parameters:
1355 +  sname - name of partitioning (for example MATPARTITIONINGCURRENT) or parmetis
1356 .  path - location of library where creation routine is
1357 .  name - name of function that creates the partitioning type, a string
1358 -  function - function pointer that creates the partitioning type
1359 
1360    Level: developer
1361 
1362    If dynamic libraries are used, then the fourth input argument (function)
1363    is ignored.
1364 
1365    Sample usage:
1366 .vb
1367    MatPartitioningRegisterDynamic("my_part",/home/username/my_lib/lib/libO/solaris/mylib.a,
1368                "MyPartCreate",MyPartCreate);
1369 .ve
1370 
1371    Then, your partitioner can be chosen with the procedural interface via
1372 $     MatPartitioningSetType(part,"my_part")
1373    or at runtime via the option
1374 $     -mat_partitioning_type my_part
1375 
1376    $PETSC_ARCH occuring in pathname will be replaced with appropriate values.
1377 
1378 .keywords: matrix, partitioning, register
1379 
1380 .seealso: MatPartitioningRegisterDestroy(), MatPartitioningRegisterAll()
1381 M*/
1382 #if defined(PETSC_USE_DYNAMIC_LIBRARIES)
1383 #define MatPartitioningRegisterDynamic(a,b,c,d) MatPartitioningRegister(a,b,c,0)
1384 #else
1385 #define MatPartitioningRegisterDynamic(a,b,c,d) MatPartitioningRegister(a,b,c,d)
1386 #endif
1387 
1388 extern PetscBool  MatPartitioningRegisterAllCalled;
1389 
1390 extern PetscErrorCode  MatPartitioningRegisterAll(const char[]);
1391 extern PetscErrorCode  MatPartitioningRegisterDestroy(void);
1392 
1393 extern PetscErrorCode  MatPartitioningView(MatPartitioning,PetscViewer);
1394 extern PetscErrorCode  MatPartitioningSetFromOptions(MatPartitioning);
1395 extern PetscErrorCode  MatPartitioningGetType(MatPartitioning,const MatPartitioningType*);
1396 
1397 extern PetscErrorCode  MatPartitioningParmetisSetCoarseSequential(MatPartitioning);
1398 extern PetscErrorCode  MatPartitioningParmetisGetEdgeCut(MatPartitioning, PetscInt *);
1399 
1400 typedef enum { MP_CHACO_MULTILEVEL=1,MP_CHACO_SPECTRAL=2,MP_CHACO_LINEAR=4,MP_CHACO_RANDOM=5,MP_CHACO_SCATTERED=6 } MPChacoGlobalType;
1401 extern const char *MPChacoGlobalTypes[];
1402 typedef enum { MP_CHACO_KERNIGHAN=1,MP_CHACO_NONE=2 } MPChacoLocalType;
1403 extern const char *MPChacoLocalTypes[];
1404 typedef enum { MP_CHACO_LANCZOS=0,MP_CHACO_RQI=1 } MPChacoEigenType;
1405 extern const char *MPChacoEigenTypes[];
1406 
1407 extern PetscErrorCode  MatPartitioningChacoSetGlobal(MatPartitioning,MPChacoGlobalType);
1408 extern PetscErrorCode  MatPartitioningChacoGetGlobal(MatPartitioning,MPChacoGlobalType*);
1409 extern PetscErrorCode  MatPartitioningChacoSetLocal(MatPartitioning,MPChacoLocalType);
1410 extern PetscErrorCode  MatPartitioningChacoGetLocal(MatPartitioning,MPChacoLocalType*);
1411 extern PetscErrorCode  MatPartitioningChacoSetCoarseLevel(MatPartitioning,PetscReal);
1412 extern PetscErrorCode  MatPartitioningChacoSetEigenSolver(MatPartitioning,MPChacoEigenType);
1413 extern PetscErrorCode  MatPartitioningChacoGetEigenSolver(MatPartitioning,MPChacoEigenType*);
1414 extern PetscErrorCode  MatPartitioningChacoSetEigenTol(MatPartitioning,PetscReal);
1415 extern PetscErrorCode  MatPartitioningChacoGetEigenTol(MatPartitioning,PetscReal*);
1416 extern PetscErrorCode  MatPartitioningChacoSetEigenNumber(MatPartitioning,PetscInt);
1417 extern PetscErrorCode  MatPartitioningChacoGetEigenNumber(MatPartitioning,PetscInt*);
1418 
1419 #define MP_PARTY_OPT "opt"
1420 #define MP_PARTY_LIN "lin"
1421 #define MP_PARTY_SCA "sca"
1422 #define MP_PARTY_RAN "ran"
1423 #define MP_PARTY_GBF "gbf"
1424 #define MP_PARTY_GCF "gcf"
1425 #define MP_PARTY_BUB "bub"
1426 #define MP_PARTY_DEF "def"
1427 extern PetscErrorCode  MatPartitioningPartySetGlobal(MatPartitioning,const char*);
1428 #define MP_PARTY_HELPFUL_SETS "hs"
1429 #define MP_PARTY_KERNIGHAN_LIN "kl"
1430 #define MP_PARTY_NONE "no"
1431 extern PetscErrorCode  MatPartitioningPartySetLocal(MatPartitioning,const char*);
1432 extern PetscErrorCode  MatPartitioningPartySetCoarseLevel(MatPartitioning,PetscReal);
1433 extern PetscErrorCode  MatPartitioningPartySetBipart(MatPartitioning,PetscBool);
1434 extern PetscErrorCode  MatPartitioningPartySetMatchOptimization(MatPartitioning,PetscBool);
1435 
1436 typedef enum { MP_PTSCOTCH_QUALITY,MP_PTSCOTCH_SPEED,MP_PTSCOTCH_BALANCE,MP_PTSCOTCH_SAFETY,MP_PTSCOTCH_SCALABILITY } MPPTScotchStrategyType;
1437 extern const char *MPPTScotchStrategyTypes[];
1438 
1439 extern PetscErrorCode MatPartitioningPTScotchSetImbalance(MatPartitioning,PetscReal);
1440 extern PetscErrorCode MatPartitioningPTScotchGetImbalance(MatPartitioning,PetscReal*);
1441 extern PetscErrorCode MatPartitioningPTScotchSetStrategy(MatPartitioning,MPPTScotchStrategyType);
1442 extern PetscErrorCode MatPartitioningPTScotchGetStrategy(MatPartitioning,MPPTScotchStrategyType*);
1443 
1444 extern PetscErrorCode MatMeshToVertexGraph(Mat,PetscInt,Mat*);
1445 extern PetscErrorCode MatMeshToCellGraph(Mat,PetscInt,Mat*);
1446 
1447 /*
1448     If you add entries here you must also add them to finclude/petscmat.h
1449 */
1450 typedef enum { MATOP_SET_VALUES=0,
1451                MATOP_GET_ROW=1,
1452                MATOP_RESTORE_ROW=2,
1453                MATOP_MULT=3,
1454                MATOP_MULT_ADD=4,
1455                MATOP_MULT_TRANSPOSE=5,
1456                MATOP_MULT_TRANSPOSE_ADD=6,
1457                MATOP_SOLVE=7,
1458                MATOP_SOLVE_ADD=8,
1459                MATOP_SOLVE_TRANSPOSE=9,
1460                MATOP_SOLVE_TRANSPOSE_ADD=10,
1461                MATOP_LUFACTOR=11,
1462                MATOP_CHOLESKYFACTOR=12,
1463                MATOP_SOR=13,
1464                MATOP_TRANSPOSE=14,
1465                MATOP_GETINFO=15,
1466                MATOP_EQUAL=16,
1467                MATOP_GET_DIAGONAL=17,
1468                MATOP_DIAGONAL_SCALE=18,
1469                MATOP_NORM=19,
1470                MATOP_ASSEMBLY_BEGIN=20,
1471                MATOP_ASSEMBLY_END=21,
1472                MATOP_SET_OPTION=22,
1473                MATOP_ZERO_ENTRIES=23,
1474                MATOP_ZERO_ROWS=24,
1475                MATOP_LUFACTOR_SYMBOLIC=25,
1476                MATOP_LUFACTOR_NUMERIC=26,
1477                MATOP_CHOLESKY_FACTOR_SYMBOLIC=27,
1478                MATOP_CHOLESKY_FACTOR_NUMERIC=28,
1479                MATOP_SETUP_PREALLOCATION=29,
1480                MATOP_ILUFACTOR_SYMBOLIC=30,
1481                MATOP_ICCFACTOR_SYMBOLIC=31,
1482                MATOP_GET_ARRAY=32,
1483                MATOP_RESTORE_ARRAY=33,
1484                MATOP_DUPLICATE=34,
1485                MATOP_FORWARD_SOLVE=35,
1486                MATOP_BACKWARD_SOLVE=36,
1487                MATOP_ILUFACTOR=37,
1488                MATOP_ICCFACTOR=38,
1489                MATOP_AXPY=39,
1490                MATOP_GET_SUBMATRICES=40,
1491                MATOP_INCREASE_OVERLAP=41,
1492                MATOP_GET_VALUES=42,
1493                MATOP_COPY=43,
1494                MATOP_GET_ROW_MAX=44,
1495                MATOP_SCALE=45,
1496                MATOP_SHIFT=46,
1497                MATOP_DIAGONAL_SET=47,
1498                MATOP_ILUDT_FACTOR=48,
1499                MATOP_SET_BLOCK_SIZE=49,
1500                MATOP_GET_ROW_IJ=50,
1501                MATOP_RESTORE_ROW_IJ=51,
1502                MATOP_GET_COLUMN_IJ=52,
1503                MATOP_RESTORE_COLUMN_IJ=53,
1504                MATOP_FDCOLORING_CREATE=54,
1505                MATOP_COLORING_PATCH=55,
1506                MATOP_SET_UNFACTORED=56,
1507                MATOP_PERMUTE=57,
1508                MATOP_SET_VALUES_BLOCKED=58,
1509                MATOP_GET_SUBMATRIX=59,
1510                MATOP_DESTROY=60,
1511                MATOP_VIEW=61,
1512                MATOP_CONVERT_FROM=62,
1513                MATOP_USE_SCALED_FORM=63,
1514                MATOP_SCALE_SYSTEM=64,
1515                MATOP_UNSCALE_SYSTEM=65,
1516                MATOP_SET_LOCAL_TO_GLOBAL_MAP=66,
1517                MATOP_SET_VALUES_LOCAL=67,
1518                MATOP_ZERO_ROWS_LOCAL=68,
1519                MATOP_GET_ROW_MAX_ABS=69,
1520                MATOP_GET_ROW_MIN_ABS=70,
1521                MATOP_CONVERT=71,
1522                MATOP_SET_COLORING=72,
1523                MATOP_SET_VALUES_ADIC=73,
1524                MATOP_SET_VALUES_ADIFOR=74,
1525                MATOP_FD_COLORING_APPLY=75,
1526                MATOP_SET_FROM_OPTIONS=76,
1527                MATOP_MULT_CON=77,
1528                MATOP_MULT_TRANSPOSE_CON=78,
1529                MATOP_PERMUTE_SPARSIFY=79,
1530                MATOP_MULT_MULTIPLE=80,
1531                MATOP_SOLVE_MULTIPLE=81,
1532                MATOP_GET_INERTIA=82,
1533                MATOP_LOAD=83,
1534                MATOP_IS_SYMMETRIC=84,
1535                MATOP_IS_HERMITIAN=85,
1536                MATOP_IS_STRUCTURALLY_SYMMETRIC=86,
1537                MATOP_DUMMY=87,
1538                MATOP_GET_VECS=88,
1539                MATOP_MAT_MULT=89,
1540                MATOP_MAT_MULT_SYMBOLIC=90,
1541                MATOP_MAT_MULT_NUMERIC=91,
1542                MATOP_PTAP=92,
1543                MATOP_PTAP_SYMBOLIC=93,
1544                MATOP_PTAP_NUMERIC=94,
1545                MATOP_MAT_MULTTRANSPOSE=95,
1546                MATOP_MAT_MULTTRANSPOSE_SYM=96,
1547                MATOP_MAT_MULTTRANSPOSE_NUM=97,
1548                MATOP_PTAP_SYMBOLIC_SEQAIJ=98,
1549                MATOP_PTAP_NUMERIC_SEQAIJ=99,
1550                MATOP_PTAP_SYMBOLIC_MPIAIJ=100,
1551                MATOP_PTAP_NUMERIC_MPIAIJ=101,
1552                MATOP_CONJUGATE=102,
1553                MATOP_SET_SIZES=103,
1554                MATOP_SET_VALUES_ROW=104,
1555                MATOP_REAL_PART=105,
1556                MATOP_IMAG_PART=106,
1557                MATOP_GET_ROW_UTRIANGULAR=107,
1558                MATOP_RESTORE_ROW_UTRIANGULAR=108,
1559                MATOP_MATSOLVE=109,
1560                MATOP_GET_REDUNDANTMATRIX=110,
1561                MATOP_GET_ROW_MIN=111,
1562                MATOP_GET_COLUMN_VEC=112,
1563                MATOP_MISSING_DIAGONAL=113,
1564                MATOP_MATGETSEQNONZEROSTRUCTURE=114,
1565                MATOP_CREATE=115,
1566                MATOP_GET_GHOSTS=116,
1567                MATOP_GET_LOCALSUBMATRIX=117,
1568                MATOP_RESTORE_LOCALSUBMATRIX=118,
1569                MATOP_MULT_DIAGONAL_BLOCK=119,
1570                MATOP_HERMITIANTRANSPOSE=120,
1571                MATOP_MULTHERMITIANTRANSPOSE=121,
1572                MATOP_MULTHERMITIANTRANSPOSEADD=122,
1573                MATOP_GETMULTIPROCBLOCK=123,
1574                MATOP_GETCOLUMNNORMS=125,
1575 	       MATOP_GET_SUBMATRICES_PARALLEL=128,
1576                MATOP_SET_VALUES_BATCH=129
1577              } MatOperation;
1578 extern PetscErrorCode  MatHasOperation(Mat,MatOperation,PetscBool *);
1579 extern PetscErrorCode  MatShellSetOperation(Mat,MatOperation,void(*)(void));
1580 extern PetscErrorCode  MatShellGetOperation(Mat,MatOperation,void(**)(void));
1581 extern PetscErrorCode  MatShellSetContext(Mat,void*);
1582 
1583 /*
1584    Codes for matrices stored on disk. By default they are
1585    stored in a universal format. By changing the format with
1586    PetscViewerSetFormat(viewer,PETSC_VIEWER_NATIVE); the matrices will
1587    be stored in a way natural for the matrix, for example dense matrices
1588    would be stored as dense. Matrices stored this way may only be
1589    read into matrices of the same type.
1590 */
1591 #define MATRIX_BINARY_FORMAT_DENSE -1
1592 
1593 extern PetscErrorCode  MatMPIBAIJSetHashTableFactor(Mat,PetscReal);
1594 extern PetscErrorCode  MatISGetLocalMat(Mat,Mat*);
1595 
1596 /*S
1597      MatNullSpace - Object that removes a null space from a vector, i.e.
1598          orthogonalizes the vector to a subsapce
1599 
1600    Level: advanced
1601 
1602   Concepts: matrix; linear operator, null space
1603 
1604   Users manual sections:
1605 .   sec_singular
1606 
1607 .seealso:  MatNullSpaceCreate()
1608 S*/
1609 typedef struct _p_MatNullSpace* MatNullSpace;
1610 
1611 extern PetscErrorCode  MatNullSpaceCreate(MPI_Comm,PetscBool ,PetscInt,const Vec[],MatNullSpace*);
1612 extern PetscErrorCode  MatNullSpaceSetFunction(MatNullSpace,PetscErrorCode (*)(MatNullSpace,Vec,void*),void*);
1613 extern PetscErrorCode  MatNullSpaceDestroy(MatNullSpace*);
1614 extern PetscErrorCode  MatNullSpaceRemove(MatNullSpace,Vec,Vec*);
1615 extern PetscErrorCode  MatSetNullSpace(Mat,MatNullSpace);
1616 extern PetscErrorCode  MatNullSpaceTest(MatNullSpace,Mat,PetscBool  *);
1617 extern PetscErrorCode  MatNullSpaceView(MatNullSpace,PetscViewer);
1618 
1619 extern PetscErrorCode  MatReorderingSeqSBAIJ(Mat,IS);
1620 extern PetscErrorCode  MatMPISBAIJSetHashTableFactor(Mat,PetscReal);
1621 extern PetscErrorCode  MatSeqSBAIJSetColumnIndices(Mat,PetscInt *);
1622 extern PetscErrorCode  MatSeqBAIJInvertBlockDiagonal(Mat);
1623 
1624 extern PetscErrorCode  MatCreateMAIJ(Mat,PetscInt,Mat*);
1625 extern PetscErrorCode  MatMAIJRedimension(Mat,PetscInt,Mat*);
1626 extern PetscErrorCode  MatMAIJGetAIJ(Mat,Mat*);
1627 
1628 extern PetscErrorCode  MatComputeExplicitOperator(Mat,Mat*);
1629 
1630 extern PetscErrorCode  MatDiagonalScaleLocal(Mat,Vec);
1631 
1632 extern PetscErrorCode  MatCreateMFFD(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,Mat*);
1633 extern PetscErrorCode  MatMFFDSetBase(Mat,Vec,Vec);
1634 extern PetscErrorCode  MatMFFDSetFunction(Mat,PetscErrorCode(*)(void*,Vec,Vec),void*);
1635 extern PetscErrorCode  MatMFFDSetFunctioni(Mat,PetscErrorCode (*)(void*,PetscInt,Vec,PetscScalar*));
1636 extern PetscErrorCode  MatMFFDSetFunctioniBase(Mat,PetscErrorCode (*)(void*,Vec));
1637 extern PetscErrorCode  MatMFFDAddNullSpace(Mat,MatNullSpace);
1638 extern PetscErrorCode  MatMFFDSetHHistory(Mat,PetscScalar[],PetscInt);
1639 extern PetscErrorCode  MatMFFDResetHHistory(Mat);
1640 extern PetscErrorCode  MatMFFDSetFunctionError(Mat,PetscReal);
1641 extern PetscErrorCode  MatMFFDSetPeriod(Mat,PetscInt);
1642 extern PetscErrorCode  MatMFFDGetH(Mat,PetscScalar *);
1643 extern PetscErrorCode  MatMFFDSetOptionsPrefix(Mat,const char[]);
1644 extern PetscErrorCode  MatMFFDCheckPositivity(void*,Vec,Vec,PetscScalar*);
1645 extern PetscErrorCode  MatMFFDSetCheckh(Mat,PetscErrorCode (*)(void*,Vec,Vec,PetscScalar*),void*);
1646 
1647 /*S
1648     MatMFFD - A data structured used to manage the computation of the h differencing parameter for matrix-free
1649               Jacobian vector products
1650 
1651     Notes: MATMFFD is a specific MatType which uses the MatMFFD data structure
1652 
1653            MatMFFD*() methods actually take the Mat as their first argument. Not a MatMFFD data structure
1654 
1655     Level: developer
1656 
1657 .seealso: MATMFFD, MatCreateMFFD(), MatMFFDSetFuction(), MatMFFDSetType(), MatMFFDRegister()
1658 S*/
1659 typedef struct _p_MatMFFD* MatMFFD;
1660 
1661 /*J
1662     MatMFFDType - algorithm used to compute the h used in computing matrix-vector products via differencing of the function
1663 
1664    Level: beginner
1665 
1666 .seealso: MatMFFDSetType(), MatMFFDRegister()
1667 J*/
1668 #define MatMFFDType char*
1669 #define MATMFFD_DS  "ds"
1670 #define MATMFFD_WP  "wp"
1671 
1672 extern PetscErrorCode  MatMFFDSetType(Mat,const MatMFFDType);
1673 extern PetscErrorCode  MatMFFDRegister(const char[],const char[],const char[],PetscErrorCode (*)(MatMFFD));
1674 
1675 /*MC
1676    MatMFFDRegisterDynamic - Adds a method to the MatMFFD registry.
1677 
1678    Synopsis:
1679    PetscErrorCode MatMFFDRegisterDynamic(const char *name_solver,const char *path,const char *name_create,PetscErrorCode (*routine_create)(MatMFFD))
1680 
1681    Not Collective
1682 
1683    Input Parameters:
1684 +  name_solver - name of a new user-defined compute-h module
1685 .  path - path (either absolute or relative) the library containing this solver
1686 .  name_create - name of routine to create method context
1687 -  routine_create - routine to create method context
1688 
1689    Level: developer
1690 
1691    Notes:
1692    MatMFFDRegisterDynamic() may be called multiple times to add several user-defined solvers.
1693 
1694    If dynamic libraries are used, then the fourth input argument (routine_create)
1695    is ignored.
1696 
1697    Sample usage:
1698 .vb
1699    MatMFFDRegisterDynamic("my_h",/home/username/my_lib/lib/libO/solaris/mylib.a,
1700                "MyHCreate",MyHCreate);
1701 .ve
1702 
1703    Then, your solver can be chosen with the procedural interface via
1704 $     MatMFFDSetType(mfctx,"my_h")
1705    or at runtime via the option
1706 $     -snes_mf_type my_h
1707 
1708 .keywords: MatMFFD, register
1709 
1710 .seealso: MatMFFDRegisterAll(), MatMFFDRegisterDestroy()
1711 M*/
1712 #if defined(PETSC_USE_DYNAMIC_LIBRARIES)
1713 #define MatMFFDRegisterDynamic(a,b,c,d) MatMFFDRegister(a,b,c,0)
1714 #else
1715 #define MatMFFDRegisterDynamic(a,b,c,d) MatMFFDRegister(a,b,c,d)
1716 #endif
1717 
1718 extern PetscErrorCode  MatMFFDRegisterAll(const char[]);
1719 extern PetscErrorCode  MatMFFDRegisterDestroy(void);
1720 extern PetscErrorCode  MatMFFDDSSetUmin(Mat,PetscReal);
1721 extern PetscErrorCode  MatMFFDWPSetComputeNormU(Mat,PetscBool );
1722 
1723 
1724 extern PetscErrorCode  PetscViewerMathematicaPutMatrix(PetscViewer, PetscInt, PetscInt, PetscReal *);
1725 extern PetscErrorCode  PetscViewerMathematicaPutCSRMatrix(PetscViewer, PetscInt, PetscInt, PetscInt *, PetscInt *, PetscReal *);
1726 
1727 /*
1728    PETSc interface to MUMPS
1729 */
1730 #ifdef PETSC_HAVE_MUMPS
1731 extern PetscErrorCode  MatMumpsSetIcntl(Mat,PetscInt,PetscInt);
1732 #endif
1733 
1734 /*
1735    PETSc interface to SUPERLU
1736 */
1737 #ifdef PETSC_HAVE_SUPERLU
1738 extern PetscErrorCode  MatSuperluSetILUDropTol(Mat,PetscReal);
1739 #endif
1740 
1741 #if defined(PETSC_HAVE_CUSP)
1742 extern PetscErrorCode  MatCreateSeqAIJCUSP(MPI_Comm,PetscInt,PetscInt,PetscInt,const PetscInt[],Mat*);
1743 extern PetscErrorCode  MatCreateMPIAIJCUSP(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt,const PetscInt[],PetscInt,const PetscInt[],Mat*);
1744 #endif
1745 
1746 /*
1747    PETSc interface to FFTW
1748 */
1749 #if defined(PETSC_HAVE_FFTW)
1750 extern PetscErrorCode VecScatterPetscToFFTW(Mat,Vec,Vec);
1751 extern PetscErrorCode VecScatterFFTWToPetsc(Mat,Vec,Vec);
1752 extern PetscErrorCode MatGetVecsFFTW(Mat,Vec*,Vec*,Vec*);
1753 #endif
1754 
1755 extern PetscErrorCode MatCreateNest(MPI_Comm,PetscInt,const IS[],PetscInt,const IS[],const Mat[],Mat*);
1756 extern PetscErrorCode MatNestGetSize(Mat,PetscInt*,PetscInt*);
1757 extern PetscErrorCode MatNestGetSubMats(Mat,PetscInt*,PetscInt*,Mat***);
1758 extern PetscErrorCode MatNestGetSubMat(Mat,PetscInt,PetscInt,Mat*);
1759 extern PetscErrorCode MatNestSetVecType(Mat,const VecType);
1760 extern PetscErrorCode MatNestSetSubMats(Mat,PetscInt,const IS[],PetscInt,const IS[],const Mat[]);
1761 extern PetscErrorCode MatNestSetSubMat(Mat,PetscInt,PetscInt,Mat);
1762 
1763 PETSC_EXTERN_CXX_END
1764 #endif
1765