xref: /petsc/include/petscmat.h (revision 0d6fbc72e08a63e91ce3cd64123f05c91ae55da2)
1 /*
2      Include file for the matrix component of PETSc
3 */
4 #ifndef __PETSCMAT_H
5 #define __PETSCMAT_H
6 #include "petscvec.h"
7 PETSC_EXTERN_CXX_BEGIN
8 
9 /*S
10      Mat - Abstract PETSc matrix object
11 
12    Level: beginner
13 
14   Concepts: matrix; linear operator
15 
16 .seealso:  MatCreate(), MatType, MatSetType()
17 S*/
18 typedef struct _p_Mat*           Mat;
19 
20 /*E
21     MatType - String with the name of a PETSc matrix or the creation function
22        with an optional dynamic library name, for example
23        http://www.mcs.anl.gov/petsc/lib.a:mymatcreate()
24 
25    Level: beginner
26 
27 .seealso: MatSetType(), Mat, MatSolverPackage
28 E*/
29 #define MatType char*
30 #define MATSAME            "same"
31 #define MATMAIJ            "maij"
32 #define MATSEQMAIJ         "seqmaij"
33 #define MATMPIMAIJ         "mpimaij"
34 #define MATIS              "is"
35 #define MATAIJ             "aij"
36 #define MATSEQAIJ          "seqaij"
37 #define MATSEQAIJPTHREAD   "seqaijpthread"
38 #define MATAIJPTHREAD      "aijpthread"
39 #define MATMPIAIJ          "mpiaij"
40 #define MATAIJCRL          "aijcrl"
41 #define MATSEQAIJCRL       "seqaijcrl"
42 #define MATMPIAIJCRL       "mpiaijcrl"
43 #define MATAIJCUSP         "aijcusp"
44 #define MATSEQAIJCUSP      "seqaijcusp"
45 #define MATMPIAIJCUSP      "mpiaijcusp"
46 #define MATAIJPERM         "aijperm"
47 #define MATSEQAIJPERM      "seqaijperm"
48 #define MATMPIAIJPERM      "mpiaijperm"
49 #define MATSHELL           "shell"
50 #define MATDENSE           "dense"
51 #define MATSEQDENSE        "seqdense"
52 #define MATMPIDENSE        "mpidense"
53 #define MATBAIJ            "baij"
54 #define MATSEQBAIJ         "seqbaij"
55 #define MATMPIBAIJ         "mpibaij"
56 #define MATMPIADJ          "mpiadj"
57 #define MATSBAIJ           "sbaij"
58 #define MATSEQSBAIJ        "seqsbaij"
59 #define MATMPISBAIJ        "mpisbaij"
60 
61 #define MATSEQBSTRM        "seqbstrm"
62 #define MATMPIBSTRM        "mpibstrm"
63 #define MATBSTRM           "bstrm"
64 #define MATSEQSBSTRM       "seqsbstrm"
65 #define MATMPISBSTRM       "mpisbstrm"
66 #define MATSBSTRM          "sbstrm"
67 
68 #define MATDAAD            "daad"
69 #define MATMFFD            "mffd"
70 #define MATNORMAL          "normal"
71 #define MATLRC             "lrc"
72 #define MATSCATTER         "scatter"
73 #define MATBLOCKMAT        "blockmat"
74 #define MATCOMPOSITE       "composite"
75 #define MATFFT             "fft"
76 #define MATFFTW            "fftw"
77 #define MATSEQCUFFT        "seqcufft"
78 #define MATTRANSPOSEMAT    "transpose"
79 #define MATSCHURCOMPLEMENT "schurcomplement"
80 #define MATPYTHON          "python"
81 #define MATHYPRESTRUCT     "hyprestruct"
82 #define MATHYPRESSTRUCT    "hypresstruct"
83 #define MATSUBMATRIX       "submatrix"
84 #define MATLOCALREF        "localref"
85 #define MATNEST            "nest"
86 
87 /*E
88     MatSolverPackage - String with the name of a PETSc matrix solver type.
89 
90     For example: "petsc" indicates what PETSc provides, "superlu" indicates either
91        SuperLU or SuperLU_Dist etc.
92 
93 
94    Level: beginner
95 
96 .seealso: MatGetFactor(), Mat, MatSetType(), MatType
97 E*/
98 #define MatSolverPackage char*
99 #define MATSOLVERSPOOLES      "spooles"
100 #define MATSOLVERSUPERLU      "superlu"
101 #define MATSOLVERSUPERLU_DIST "superlu_dist"
102 #define MATSOLVERUMFPACK      "umfpack"
103 #define MATSOLVERCHOLMOD      "cholmod"
104 #define MATSOLVERESSL         "essl"
105 #define MATSOLVERLUSOL        "lusol"
106 #define MATSOLVERMUMPS        "mumps"
107 #define MATSOLVERPASTIX       "pastix"
108 #define MATSOLVERMATLAB       "matlab"
109 #define MATSOLVERPETSC        "petsc"
110 #define MATSOLVERPLAPACK      "plapack"
111 #define MATSOLVERBAS          "bas"
112 
113 #define MATSOLVERBSTRM        "bstrm"
114 #define MATSOLVERSBSTRM       "sbstrm"
115 
116 /*E
117     MatFactorType - indicates what type of factorization is requested
118 
119     Level: beginner
120 
121    Any additions/changes here MUST also be made in include/finclude/petscmat.h
122 
123 .seealso: MatSolverPackage, MatGetFactor()
124 E*/
125 typedef enum {MAT_FACTOR_NONE, MAT_FACTOR_LU, MAT_FACTOR_CHOLESKY, MAT_FACTOR_ILU, MAT_FACTOR_ICC,MAT_FACTOR_ILUDT} MatFactorType;
126 extern const char *const MatFactorTypes[];
127 
128 extern PetscErrorCode  MatGetFactor(Mat,const MatSolverPackage,MatFactorType,Mat*);
129 extern PetscErrorCode  MatGetFactorAvailable(Mat,const MatSolverPackage,MatFactorType,PetscBool *);
130 extern PetscErrorCode  MatFactorGetSolverPackage(Mat,const MatSolverPackage*);
131 extern PetscErrorCode  MatGetFactorType(Mat,MatFactorType*);
132 
133 /* Logging support */
134 #define    MAT_FILE_CLASSID 1211216    /* used to indicate matrices in binary files */
135 extern PetscClassId  MAT_CLASSID;
136 extern PetscClassId  MAT_FDCOLORING_CLASSID;
137 extern PetscClassId  MAT_PARTITIONING_CLASSID;
138 extern PetscClassId  MAT_NULLSPACE_CLASSID;
139 extern PetscClassId  MATMFFD_CLASSID;
140 
141 /*E
142     MatReuse - Indicates if matrices obtained from a previous call to MatGetSubMatrices()
143      or MatGetSubMatrix() are to be reused to store the new matrix values. For MatConvert() is used to indicate
144      that the input matrix is to be replaced with the converted matrix.
145 
146     Level: beginner
147 
148    Any additions/changes here MUST also be made in include/finclude/petscmat.h
149 
150 .seealso: MatGetSubMatrices(), MatGetSubMatrix(), MatDestroyMatrices(), MatConvert()
151 E*/
152 typedef enum {MAT_INITIAL_MATRIX,MAT_REUSE_MATRIX,MAT_IGNORE_MATRIX} MatReuse;
153 
154 /*E
155     MatGetSubMatrixOption - Indicates if matrices obtained from a call to MatGetSubMatrices()
156      include the matrix values. Currently it is only used by MatGetSeqNonzerostructure().
157 
158     Level: beginner
159 
160 .seealso: MatGetSeqNonzerostructure()
161 E*/
162 typedef enum {MAT_DO_NOT_GET_VALUES,MAT_GET_VALUES} MatGetSubMatrixOption;
163 
164 extern PetscErrorCode  MatInitializePackage(const char[]);
165 
166 extern PetscErrorCode  MatCreate(MPI_Comm,Mat*);
167 PetscPolymorphicFunction(MatCreate,(MPI_Comm comm),(comm,&A),Mat,A)
168 PetscPolymorphicFunction(MatCreate,(),(PETSC_COMM_WORLD,&A),Mat,A)
169 extern PetscErrorCode  MatSetSizes(Mat,PetscInt,PetscInt,PetscInt,PetscInt);
170 extern PetscErrorCode  MatSetType(Mat,const MatType);
171 extern PetscErrorCode  MatSetFromOptions(Mat);
172 extern PetscErrorCode  MatSetUpPreallocation(Mat);
173 extern PetscErrorCode  MatRegisterAll(const char[]);
174 extern PetscErrorCode  MatRegister(const char[],const char[],const char[],PetscErrorCode(*)(Mat));
175 extern PetscErrorCode  MatRegisterBaseName(const char[],const char[],const char[]);
176 extern PetscErrorCode  MatSetOptionsPrefix(Mat,const char[]);
177 extern PetscErrorCode  MatAppendOptionsPrefix(Mat,const char[]);
178 extern PetscErrorCode  MatGetOptionsPrefix(Mat,const char*[]);
179 
180 /*MC
181    MatRegisterDynamic - Adds a new matrix type
182 
183    Synopsis:
184    PetscErrorCode MatRegisterDynamic(const char *name,const char *path,const char *name_create,PetscErrorCode (*routine_create)(Mat))
185 
186    Not Collective
187 
188    Input Parameters:
189 +  name - name of a new user-defined matrix type
190 .  path - path (either absolute or relative) the library containing this solver
191 .  name_create - name of routine to create method context
192 -  routine_create - routine to create method context
193 
194    Notes:
195    MatRegisterDynamic() may be called multiple times to add several user-defined solvers.
196 
197    If dynamic libraries are used, then the fourth input argument (routine_create)
198    is ignored.
199 
200    Sample usage:
201 .vb
202    MatRegisterDynamic("my_mat",/home/username/my_lib/lib/libO/solaris/mylib.a,
203                "MyMatCreate",MyMatCreate);
204 .ve
205 
206    Then, your solver can be chosen with the procedural interface via
207 $     MatSetType(Mat,"my_mat")
208    or at runtime via the option
209 $     -mat_type my_mat
210 
211    Level: advanced
212 
213    Notes: ${PETSC_ARCH} occuring in pathname will be replaced with appropriate values.
214          If your function is not being put into a shared library then use VecRegister() instead
215 
216 .keywords: Mat, register
217 
218 .seealso: MatRegisterAll(), MatRegisterDestroy()
219 
220 M*/
221 #if defined(PETSC_USE_DYNAMIC_LIBRARIES)
222 #define MatRegisterDynamic(a,b,c,d) MatRegister(a,b,c,0)
223 #else
224 #define MatRegisterDynamic(a,b,c,d) MatRegister(a,b,c,d)
225 #endif
226 
227 extern PetscBool  MatRegisterAllCalled;
228 extern PetscFList MatList;
229 extern PetscFList MatColoringList;
230 extern PetscFList MatPartitioningList;
231 
232 /*E
233     MatStructure - Indicates if the matrix has the same nonzero structure
234 
235     Level: beginner
236 
237    Any additions/changes here MUST also be made in include/finclude/petscmat.h
238 
239 .seealso: MatCopy(), KSPSetOperators(), PCSetOperators()
240 E*/
241 typedef enum {DIFFERENT_NONZERO_PATTERN,SUBSET_NONZERO_PATTERN,SAME_NONZERO_PATTERN,SAME_PRECONDITIONER} MatStructure;
242 
243 extern PetscErrorCode  MatCreateSeqDense(MPI_Comm,PetscInt,PetscInt,PetscScalar[],Mat*);
244 extern PetscErrorCode  MatCreateMPIDense(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,PetscScalar[],Mat*);
245 extern PetscErrorCode  MatCreateSeqAIJ(MPI_Comm,PetscInt,PetscInt,PetscInt,const PetscInt[],Mat*);
246 PetscPolymorphicFunction(MatCreateSeqAIJ,(PetscInt m,PetscInt n,PetscInt nz,const PetscInt nnz[]),(PETSC_COMM_SELF,m,n,nz,nnz,&A),Mat,A)
247 PetscPolymorphicFunction(MatCreateSeqAIJ,(PetscInt m,PetscInt n,PetscInt nz),(PETSC_COMM_SELF,m,n,nz,PETSC_NULL,&A),Mat,A)
248 PetscPolymorphicFunction(MatCreateSeqAIJ,(PetscInt m,PetscInt n,const PetscInt nnz[]),(PETSC_COMM_SELF,m,n,0,nnz,&A),Mat,A)
249 PetscPolymorphicFunction(MatCreateSeqAIJ,(PetscInt m,PetscInt n),(PETSC_COMM_SELF,m,n,0,PETSC_NULL,&A),Mat,A)
250 PetscPolymorphicSubroutine(MatCreateSeqAIJ,(PetscInt m,PetscInt n,PetscInt nz,Mat *A),(PETSC_COMM_SELF,m,n,nz,PETSC_NULL,A))
251 PetscPolymorphicSubroutine(MatCreateSeqAIJ,(PetscInt m,PetscInt n,const PetscInt nnz[],Mat *A),(PETSC_COMM_SELF,m,n,0,nnz,A))
252 PetscPolymorphicSubroutine(MatCreateSeqAIJ,(PetscInt m,PetscInt n,Mat *A),(PETSC_COMM_SELF,m,n,0,PETSC_NULL,A))
253 extern PetscErrorCode  MatCreateMPIAIJ(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt,const PetscInt[],PetscInt,const PetscInt[],Mat*);
254 PetscPolymorphicFunction(MatCreateMPIAIJ,(MPI_Comm comm,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,const PetscInt nnz[],PetscInt onz,const PetscInt onnz[]),(comm,m,n,M,N,nz,nnz,onz,onnz,&A),Mat,A)
255 PetscPolymorphicFunction(MatCreateMPIAIJ,(MPI_Comm comm,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,PetscInt nnz),(comm,m,n,M,N,nz,PETSC_NULL,nnz,PETSC_NULL,&A),Mat,A)
256 PetscPolymorphicFunction(MatCreateMPIAIJ,(MPI_Comm comm,PetscInt m,PetscInt n,PetscInt M,PetscInt N,const PetscInt nnz[],const PetscInt onz[]),(comm,m,n,M,N,0,nnz,0,onz,&A),Mat,A)
257 PetscPolymorphicFunction(MatCreateMPIAIJ,(MPI_Comm comm,PetscInt m,PetscInt n,PetscInt M,PetscInt N),(comm,m,n,M,N,0,PETSC_NULL,0,PETSC_NULL,&A),Mat,A)
258 PetscPolymorphicSubroutine(MatCreateMPIAIJ,(MPI_Comm comm,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,PetscInt nnz,Mat *A),(comm,m,n,M,N,nz,PETSC_NULL,nnz,PETSC_NULL,A))
259 PetscPolymorphicSubroutine(MatCreateMPIAIJ,(MPI_Comm comm,PetscInt m,PetscInt n,PetscInt M,PetscInt N,const PetscInt nnz[],const PetscInt onz[],Mat *A),(comm,m,n,M,N,0,nnz,0,onz,A))
260 PetscPolymorphicSubroutine(MatCreateMPIAIJ,(MPI_Comm comm,PetscInt m,PetscInt n,PetscInt M,PetscInt N,Mat *A),(comm,m,n,M,N,0,PETSC_NULL,0,PETSC_NULL,A))
261 PetscPolymorphicFunction(MatCreateMPIAIJ,(PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,const PetscInt nnz[],PetscInt onz,const PetscInt onnz[]),(PETSC_COMM_WORLD,m,n,M,N,nz,nnz,onz,onnz,&A),Mat,A)
262 PetscPolymorphicFunction(MatCreateMPIAIJ,(PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,PetscInt nnz),(PETSC_COMM_WORLD,m,n,M,N,nz,PETSC_NULL,nnz,PETSC_NULL,&A),Mat,A)
263 PetscPolymorphicFunction(MatCreateMPIAIJ,(PetscInt m,PetscInt n,PetscInt M,PetscInt N,const PetscInt nnz[],const PetscInt onz[]),(PETSC_COMM_WORLD,m,n,M,N,0,nnz,0,onz,&A),Mat,A)
264 PetscPolymorphicFunction(MatCreateMPIAIJ,(PetscInt m,PetscInt n,PetscInt M,PetscInt N),(PETSC_COMM_WORLD,m,n,M,N,0,PETSC_NULL,0,PETSC_NULL,&A),Mat,A)
265 PetscPolymorphicSubroutine(MatCreateMPIAIJ,(PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,PetscInt nnz,Mat *A),(PETSC_COMM_WORLD,m,n,M,N,nz,PETSC_NULL,nnz,PETSC_NULL,A))
266 PetscPolymorphicSubroutine(MatCreateMPIAIJ,(PetscInt m,PetscInt n,PetscInt M,PetscInt N,const PetscInt nnz[],const PetscInt onz[],Mat *A),(PETSC_COMM_WORLD,m,n,M,N,0,nnz,0,onz,A))
267 PetscPolymorphicSubroutine(MatCreateMPIAIJ,(PetscInt m,PetscInt n,PetscInt M,PetscInt N,Mat *A),(PETSC_COMM_WORLD,m,n,M,N,0,PETSC_NULL,0,PETSC_NULL,A))
268 extern PetscErrorCode  MatCreateMPIAIJWithArrays(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,const PetscInt[],const PetscInt[],const PetscScalar[],Mat *);
269 extern PetscErrorCode  MatCreateMPIAIJWithSplitArrays(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt[],PetscInt[],PetscScalar[],PetscInt[],PetscInt[],PetscScalar[],Mat*);
270 
271 extern PetscErrorCode  MatCreateSeqBAIJ(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,const PetscInt[],Mat*);
272 PetscPolymorphicFunction(MatCreateSeqBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt nz,const PetscInt nnz[]),(PETSC_COMM_SELF,bs,m,n,nz,nnz,&A),Mat,A)
273 PetscPolymorphicFunction(MatCreateSeqBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt nz),(PETSC_COMM_SELF,bs,m,n,nz,PETSC_NULL,&A),Mat,A)
274 PetscPolymorphicFunction(MatCreateSeqBAIJ,(PetscInt bs,PetscInt m,PetscInt n,const PetscInt nnz[]),(PETSC_COMM_SELF,bs,m,n,0,nnz,&A),Mat,A)
275 PetscPolymorphicFunction(MatCreateSeqBAIJ,(PetscInt bs,PetscInt m,PetscInt n),(PETSC_COMM_SELF,bs,m,n,0,PETSC_NULL,&A),Mat,A)
276 PetscPolymorphicSubroutine(MatCreateSeqBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt nz,Mat *A),(PETSC_COMM_SELF,bs,m,n,nz,PETSC_NULL,A))
277 PetscPolymorphicSubroutine(MatCreateSeqBAIJ,(PetscInt bs,PetscInt m,PetscInt n,const PetscInt nnz[],Mat *A),(PETSC_COMM_SELF,bs,m,n,0,nnz,A))
278 PetscPolymorphicSubroutine(MatCreateSeqBAIJ,(PetscInt bs,PetscInt m,PetscInt n,Mat *A),(PETSC_COMM_SELF,bs,m,n,0,PETSC_NULL,A))
279 extern PetscErrorCode  MatCreateMPIBAIJ(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt,const PetscInt[],PetscInt,const PetscInt[],Mat*);
280 PetscPolymorphicFunction(MatCreateMPIBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,const PetscInt nnz[],PetscInt onz,const PetscInt onnz[]),(comm,bs,m,n,M,N,nz,nnz,onz,onnz,&A),Mat,A)
281 PetscPolymorphicFunction(MatCreateMPIBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,PetscInt nnz),(comm,bs,m,n,M,N,nz,PETSC_NULL,nnz,PETSC_NULL,&A),Mat,A)
282 PetscPolymorphicFunction(MatCreateMPIBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,const PetscInt nnz[],const PetscInt onz[]),(comm,bs,m,n,M,N,0,nnz,0,onz,&A),Mat,A)
283 PetscPolymorphicFunction(MatCreateMPIBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N),(comm,bs,m,n,M,N,0,PETSC_NULL,0,PETSC_NULL,&A),Mat,A)
284 PetscPolymorphicSubroutine(MatCreateMPIBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,PetscInt nnz,Mat *A),(comm,bs,m,n,M,N,nz,PETSC_NULL,nnz,PETSC_NULL,A))
285 PetscPolymorphicSubroutine(MatCreateMPIBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,const PetscInt nnz[],const PetscInt onz[],Mat *A),(comm,bs,m,n,M,N,0,nnz,0,onz,A))
286 PetscPolymorphicSubroutine(MatCreateMPIBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,Mat *A),(comm,bs,m,n,M,N,0,PETSC_NULL,0,PETSC_NULL,A))
287 PetscPolymorphicFunction(MatCreateMPIBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,const PetscInt nnz[],PetscInt onz,const PetscInt onnz[]),(PETSC_COMM_WORLD,bs,m,n,M,N,nz,nnz,onz,onnz,&A),Mat,A)
288 PetscPolymorphicFunction(MatCreateMPIBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,PetscInt nnz),(PETSC_COMM_WORLD,bs,m,n,M,N,nz,PETSC_NULL,nnz,PETSC_NULL,&A),Mat,A)
289 PetscPolymorphicFunction(MatCreateMPIBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,const PetscInt nnz[],const PetscInt onz[]),(PETSC_COMM_WORLD,bs,m,n,M,N,0,nnz,0,onz,&A),Mat,A)
290 PetscPolymorphicFunction(MatCreateMPIBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N),(PETSC_COMM_WORLD,bs,m,n,M,N,0,PETSC_NULL,0,PETSC_NULL,&A),Mat,A)
291 PetscPolymorphicSubroutine(MatCreateMPIBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,PetscInt nnz,Mat *A),(PETSC_COMM_WORLD,bs,m,n,M,N,nz,PETSC_NULL,nnz,PETSC_NULL,A))
292 PetscPolymorphicSubroutine(MatCreateMPIBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,const PetscInt nnz[],const PetscInt onz[],Mat *A),(PETSC_COMM_WORLD,bs,m,n,M,N,0,nnz,0,onz,A))
293 PetscPolymorphicSubroutine(MatCreateMPIBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,Mat *A),(PETSC_COMM_WORLD,bs,m,n,M,N,0,PETSC_NULL,0,PETSC_NULL,A))
294 extern PetscErrorCode  MatCreateMPIBAIJWithArrays(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt,const PetscInt[],const PetscInt[],const PetscScalar[],Mat*);
295 
296 extern PetscErrorCode  MatCreateMPIAdj(MPI_Comm,PetscInt,PetscInt,PetscInt[],PetscInt[],PetscInt[],Mat*);
297 extern PetscErrorCode  MatCreateSeqSBAIJ(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,const PetscInt[],Mat*);
298 PetscPolymorphicFunction(MatCreateSeqSBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt nz,const PetscInt nnz[]),(PETSC_COMM_SELF,bs,m,n,nz,nnz,&A),Mat,A)
299 PetscPolymorphicFunction(MatCreateSeqSBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt nz),(PETSC_COMM_SELF,bs,m,n,nz,PETSC_NULL,&A),Mat,A)
300 PetscPolymorphicFunction(MatCreateSeqSBAIJ,(PetscInt bs,PetscInt m,PetscInt n,const PetscInt nnz[]),(PETSC_COMM_SELF,bs,m,n,0,nnz,&A),Mat,A)
301 PetscPolymorphicFunction(MatCreateSeqSBAIJ,(PetscInt bs,PetscInt m,PetscInt n),(PETSC_COMM_SELF,bs,m,n,0,PETSC_NULL,&A),Mat,A)
302 PetscPolymorphicSubroutine(MatCreateSeqSBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt nz,Mat *A),(PETSC_COMM_SELF,bs,m,n,nz,PETSC_NULL,A))
303 PetscPolymorphicSubroutine(MatCreateSeqSBAIJ,(PetscInt bs,PetscInt m,PetscInt n,const PetscInt nnz[],Mat *A),(PETSC_COMM_SELF,bs,m,n,0,nnz,A))
304 PetscPolymorphicSubroutine(MatCreateSeqSBAIJ,(PetscInt bs,PetscInt m,PetscInt n,Mat *A),(PETSC_COMM_SELF,bs,m,n,0,PETSC_NULL,A))
305 
306 extern PetscErrorCode  MatCreateMPISBAIJ(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt,const PetscInt[],PetscInt,const PetscInt[],Mat*);
307 PetscPolymorphicFunction(MatCreateMPISBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,const PetscInt nnz[],PetscInt onz,const PetscInt onnz[]),(comm,bs,m,n,M,N,nz,nnz,onz,onnz,&A),Mat,A)
308 PetscPolymorphicFunction(MatCreateMPISBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,PetscInt nnz),(comm,bs,m,n,M,N,nz,PETSC_NULL,nnz,PETSC_NULL,&A),Mat,A)
309 PetscPolymorphicFunction(MatCreateMPISBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,const PetscInt nnz[],const PetscInt onz[]),(comm,bs,m,n,M,N,0,nnz,0,onz,&A),Mat,A)
310 PetscPolymorphicFunction(MatCreateMPISBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N),(comm,bs,m,n,M,N,0,PETSC_NULL,0,PETSC_NULL,&A),Mat,A)
311 PetscPolymorphicSubroutine(MatCreateMPISBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,PetscInt nnz,Mat *A),(comm,bs,m,n,M,N,nz,PETSC_NULL,nnz,PETSC_NULL,A))
312 PetscPolymorphicSubroutine(MatCreateMPISBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,const PetscInt nnz[],const PetscInt onz[],Mat *A),(comm,bs,m,n,M,N,0,nnz,0,onz,A))
313 PetscPolymorphicSubroutine(MatCreateMPISBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,Mat *A),(comm,bs,m,n,M,N,0,PETSC_NULL,0,PETSC_NULL,A))
314 PetscPolymorphicFunction(MatCreateMPISBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,const PetscInt nnz[],PetscInt onz,const PetscInt onnz[]),(PETSC_COMM_WORLD,bs,m,n,M,N,nz,nnz,onz,onnz,&A),Mat,A)
315 PetscPolymorphicFunction(MatCreateMPISBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,PetscInt nnz),(PETSC_COMM_WORLD,bs,m,n,M,N,nz,PETSC_NULL,nnz,PETSC_NULL,&A),Mat,A)
316 PetscPolymorphicFunction(MatCreateMPISBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,const PetscInt nnz[],const PetscInt onz[]),(PETSC_COMM_WORLD,bs,m,n,M,N,0,nnz,0,onz,&A),Mat,A)
317 PetscPolymorphicFunction(MatCreateMPISBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N),(PETSC_COMM_WORLD,bs,m,n,M,N,0,PETSC_NULL,0,PETSC_NULL,&A),Mat,A)
318 PetscPolymorphicSubroutine(MatCreateMPISBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,PetscInt nnz,Mat *A),(PETSC_COMM_WORLD,bs,m,n,M,N,nz,PETSC_NULL,nnz,PETSC_NULL,A))
319 PetscPolymorphicSubroutine(MatCreateMPISBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,const PetscInt nnz[],const PetscInt onz[],Mat *A),(PETSC_COMM_WORLD,bs,m,n,M,N,0,nnz,0,onz,A))
320 PetscPolymorphicSubroutine(MatCreateMPISBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,Mat *A),(PETSC_COMM_WORLD,bs,m,n,M,N,0,PETSC_NULL,0,PETSC_NULL,A))
321 extern PetscErrorCode  MatCreateMPISBAIJWithArrays(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt,const PetscInt[],const PetscInt[],const PetscScalar[],Mat *);
322 extern PetscErrorCode  MatMPISBAIJSetPreallocationCSR(Mat,PetscInt,const PetscInt[],const PetscInt[],const PetscScalar[]);
323 
324 extern PetscErrorCode  MatCreateShell(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,void *,Mat*);
325 PetscPolymorphicFunction(MatCreateShell,(MPI_Comm comm,PetscInt m,PetscInt n,PetscInt M,PetscInt N,void *ctx),(comm,m,n,M,N,ctx,&A),Mat,A)
326 PetscPolymorphicFunction(MatCreateShell,(PetscInt m,PetscInt n,PetscInt M,PetscInt N,void *ctx),(PETSC_COMM_WORLD,m,n,M,N,ctx,&A),Mat,A)
327 extern PetscErrorCode  MatCreateAdic(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt,void (*)(void),Mat*);
328 extern PetscErrorCode  MatCreateNormal(Mat,Mat*);
329 PetscPolymorphicFunction(MatCreateNormal,(Mat mat),(mat,&A),Mat,A)
330 extern PetscErrorCode  MatCreateLRC(Mat,Mat,Mat,Mat*);
331 extern PetscErrorCode  MatCreateIS(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,ISLocalToGlobalMapping,Mat*);
332 extern PetscErrorCode  MatCreateSeqAIJCRL(MPI_Comm,PetscInt,PetscInt,PetscInt,const PetscInt[],Mat*);
333 extern PetscErrorCode  MatCreateMPIAIJCRL(MPI_Comm,PetscInt,PetscInt,PetscInt,const PetscInt[],PetscInt,const PetscInt[],Mat*);
334 
335 extern PetscErrorCode  MatCreateSeqBSTRM(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,const PetscInt[],Mat*);
336 extern PetscErrorCode  MatCreateMPIBSTRM(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt,const PetscInt[],PetscInt,const PetscInt[],Mat*);
337 extern PetscErrorCode  MatCreateSeqSBSTRM(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,const PetscInt[],Mat*);
338 extern PetscErrorCode  MatCreateMPISBSTRM(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt,const PetscInt[],PetscInt,const PetscInt[],Mat*);
339 
340 extern PetscErrorCode  MatCreateScatter(MPI_Comm,VecScatter,Mat*);
341 extern PetscErrorCode  MatScatterSetVecScatter(Mat,VecScatter);
342 extern PetscErrorCode  MatScatterGetVecScatter(Mat,VecScatter*);
343 extern PetscErrorCode  MatCreateBlockMat(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt*,Mat*);
344 extern PetscErrorCode  MatCompositeAddMat(Mat,Mat);
345 extern PetscErrorCode  MatCompositeMerge(Mat);
346 extern PetscErrorCode  MatCreateComposite(MPI_Comm,PetscInt,const Mat*,Mat*);
347 typedef enum {MAT_COMPOSITE_ADDITIVE,MAT_COMPOSITE_MULTIPLICATIVE} MatCompositeType;
348 extern PetscErrorCode  MatCompositeSetType(Mat,MatCompositeType);
349 
350 extern PetscErrorCode  MatCreateFFT(MPI_Comm,PetscInt,const PetscInt[],const MatType,Mat*);
351 extern PetscErrorCode  MatCreateSeqCUFFT(MPI_Comm,PetscInt,const PetscInt[],Mat*);
352 
353 extern PetscErrorCode  MatCreateTranspose(Mat,Mat*);
354 extern PetscErrorCode  MatCreateSubMatrix(Mat,IS,IS,Mat*);
355 extern PetscErrorCode  MatSubMatrixUpdate(Mat,Mat,IS,IS);
356 extern PetscErrorCode  MatCreateLocalRef(Mat,IS,IS,Mat*);
357 
358 extern PetscErrorCode  MatPythonSetType(Mat,const char[]);
359 
360 extern PetscErrorCode  MatSetUp(Mat);
361 extern PetscErrorCode  MatDestroy(Mat*);
362 
363 extern PetscErrorCode  MatConjugate(Mat);
364 extern PetscErrorCode  MatRealPart(Mat);
365 extern PetscErrorCode  MatImaginaryPart(Mat);
366 extern PetscErrorCode  MatGetDiagonalBlock(Mat,Mat*);
367 extern PetscErrorCode  MatGetTrace(Mat,PetscScalar*);
368 extern PetscErrorCode  MatInvertBlockDiagonal(Mat,PetscScalar **);
369 
370 /* ------------------------------------------------------------*/
371 extern PetscErrorCode  MatSetValues(Mat,PetscInt,const PetscInt[],PetscInt,const PetscInt[],const PetscScalar[],InsertMode);
372 extern PetscErrorCode  MatSetValuesBlocked(Mat,PetscInt,const PetscInt[],PetscInt,const PetscInt[],const PetscScalar[],InsertMode);
373 extern PetscErrorCode  MatSetValuesRow(Mat,PetscInt,const PetscScalar[]);
374 extern PetscErrorCode  MatSetValuesRowLocal(Mat,PetscInt,const PetscScalar[]);
375 extern PetscErrorCode  MatSetValuesBatch(Mat,PetscInt,PetscInt,PetscInt[],const PetscScalar[]);
376 
377 /*S
378      MatStencil - Data structure (C struct) for storing information about a single row or
379         column of a matrix as index on an associated grid.
380 
381    Level: beginner
382 
383   Concepts: matrix; linear operator
384 
385 .seealso:  MatSetValuesStencil(), MatSetStencil(), MatSetValuesBlockStencil()
386 S*/
387 typedef struct {
388   PetscInt k,j,i,c;
389 } MatStencil;
390 
391 extern PetscErrorCode  MatSetValuesStencil(Mat,PetscInt,const MatStencil[],PetscInt,const MatStencil[],const PetscScalar[],InsertMode);
392 extern PetscErrorCode  MatSetValuesBlockedStencil(Mat,PetscInt,const MatStencil[],PetscInt,const MatStencil[],const PetscScalar[],InsertMode);
393 extern PetscErrorCode  MatSetStencil(Mat,PetscInt,const PetscInt[],const PetscInt[],PetscInt);
394 
395 extern PetscErrorCode  MatSetColoring(Mat,ISColoring);
396 extern PetscErrorCode  MatSetValuesAdic(Mat,void*);
397 extern PetscErrorCode  MatSetValuesAdifor(Mat,PetscInt,void*);
398 
399 /*E
400     MatAssemblyType - Indicates if the matrix is now to be used, or if you plan
401      to continue to add values to it
402 
403     Level: beginner
404 
405 .seealso: MatAssemblyBegin(), MatAssemblyEnd()
406 E*/
407 typedef enum {MAT_FLUSH_ASSEMBLY=1,MAT_FINAL_ASSEMBLY=0} MatAssemblyType;
408 extern PetscErrorCode  MatAssemblyBegin(Mat,MatAssemblyType);
409 extern PetscErrorCode  MatAssemblyEnd(Mat,MatAssemblyType);
410 extern PetscErrorCode  MatAssembled(Mat,PetscBool *);
411 
412 
413 
414 /*E
415     MatOption - Options that may be set for a matrix and its behavior or storage
416 
417     Level: beginner
418 
419    Any additions/changes here MUST also be made in include/finclude/petscmat.h
420 
421 .seealso: MatSetOption()
422 E*/
423 typedef enum {MAT_ROW_ORIENTED,MAT_NEW_NONZERO_LOCATIONS,
424               MAT_SYMMETRIC,
425               MAT_STRUCTURALLY_SYMMETRIC,
426               MAT_NEW_DIAGONALS,MAT_IGNORE_OFF_PROC_ENTRIES,
427               MAT_NEW_NONZERO_LOCATION_ERR,
428               MAT_NEW_NONZERO_ALLOCATION_ERR,MAT_USE_HASH_TABLE,
429               MAT_KEEP_NONZERO_PATTERN,MAT_IGNORE_ZERO_ENTRIES,
430               MAT_USE_INODES,
431               MAT_HERMITIAN,
432               MAT_SYMMETRY_ETERNAL,
433               MAT_CHECK_COMPRESSED_ROW,
434               MAT_IGNORE_LOWER_TRIANGULAR,MAT_ERROR_LOWER_TRIANGULAR,
435               MAT_GETROW_UPPERTRIANGULAR,MAT_UNUSED_NONZERO_LOCATION_ERR,
436               MAT_SPD,MAT_NO_OFF_PROC_ENTRIES,MAT_NO_OFF_PROC_ZERO_ROWS,
437               NUM_MAT_OPTIONS} MatOption;
438 extern const char *MatOptions[];
439 extern PetscErrorCode  MatSetOption(Mat,MatOption,PetscBool );
440 extern PetscErrorCode  MatGetType(Mat,const MatType*);
441 PetscPolymorphicFunction(MatGetType,(Mat mat),(mat,&t),const MatType,t)
442 
443 extern PetscErrorCode  MatGetValues(Mat,PetscInt,const PetscInt[],PetscInt,const PetscInt[],PetscScalar[]);
444 extern PetscErrorCode  MatGetRow(Mat,PetscInt,PetscInt *,const PetscInt *[],const PetscScalar*[]);
445 extern PetscErrorCode  MatRestoreRow(Mat,PetscInt,PetscInt *,const PetscInt *[],const PetscScalar*[]);
446 extern PetscErrorCode  MatGetRowUpperTriangular(Mat);
447 extern PetscErrorCode  MatRestoreRowUpperTriangular(Mat);
448 extern PetscErrorCode  MatGetColumn(Mat,PetscInt,PetscInt *,const PetscInt *[],const PetscScalar*[]);
449 extern PetscErrorCode  MatRestoreColumn(Mat,PetscInt,PetscInt *,const PetscInt *[],const PetscScalar*[]);
450 extern PetscErrorCode  MatGetColumnVector(Mat,Vec,PetscInt);
451 extern PetscErrorCode  MatGetArray(Mat,PetscScalar *[]);
452 PetscPolymorphicFunction(MatGetArray,(Mat mat),(mat,&a),PetscScalar*,a)
453 extern PetscErrorCode  MatRestoreArray(Mat,PetscScalar *[]);
454 extern PetscErrorCode  MatGetBlockSize(Mat,PetscInt *);
455 PetscPolymorphicFunction(MatGetBlockSize,(Mat mat),(mat,&a),PetscInt,a)
456 extern PetscErrorCode  MatSetBlockSize(Mat,PetscInt);
457 
458 
459 extern PetscErrorCode  MatMult(Mat,Vec,Vec);
460 extern PetscErrorCode  MatMultDiagonalBlock(Mat,Vec,Vec);
461 extern PetscErrorCode  MatMultAdd(Mat,Vec,Vec,Vec);
462 PetscPolymorphicSubroutine(MatMultAdd,(Mat A,Vec x,Vec y),(A,x,y,y))
463 extern PetscErrorCode  MatMultTranspose(Mat,Vec,Vec);
464 extern PetscErrorCode  MatMultHermitianTranspose(Mat,Vec,Vec);
465 extern PetscErrorCode  MatIsTranspose(Mat,Mat,PetscReal,PetscBool *);
466 PetscPolymorphicFunction(MatIsTranspose,(Mat A,Mat B,PetscReal tol),(A,B,tol,&t),PetscBool ,t)
467 PetscPolymorphicFunction(MatIsTranspose,(Mat A,Mat B),(A,B,0,&t),PetscBool ,t)
468 extern PetscErrorCode  MatIsHermitianTranspose(Mat,Mat,PetscReal,PetscBool *);
469 extern PetscErrorCode  MatMultTransposeAdd(Mat,Vec,Vec,Vec);
470 extern PetscErrorCode  MatMultHermitianTransposeAdd(Mat,Vec,Vec,Vec);
471 PetscPolymorphicSubroutine(MatMultTransposeAdd,(Mat A,Vec x,Vec y),(A,x,y,y))
472 extern PetscErrorCode  MatMultConstrained(Mat,Vec,Vec);
473 extern PetscErrorCode  MatMultTransposeConstrained(Mat,Vec,Vec);
474 extern PetscErrorCode  MatMatSolve(Mat,Mat,Mat);
475 
476 /*E
477     MatDuplicateOption - Indicates if a duplicated sparse matrix should have
478   its numerical values copied over or just its nonzero structure.
479 
480     Level: beginner
481 
482    Any additions/changes here MUST also be made in include/finclude/petscmat.h
483 
484 $   MAT_SHARE_NONZERO_PATTERN - the i and j arrays in the new matrix will be shared with the original matrix
485 $                               this also triggers the MAT_DO_NOT_COPY_VALUES option. This is used when you
486 $                               have several matrices with the same nonzero pattern.
487 
488 .seealso: MatDuplicate()
489 E*/
490 typedef enum {MAT_DO_NOT_COPY_VALUES,MAT_COPY_VALUES,MAT_SHARE_NONZERO_PATTERN} MatDuplicateOption;
491 
492 extern PetscErrorCode  MatConvert(Mat,const MatType,MatReuse,Mat*);
493 PetscPolymorphicFunction(MatConvert,(Mat A,const MatType t),(A,t,MAT_INITIAL_MATRIX,&a),Mat,a)
494 extern PetscErrorCode  MatDuplicate(Mat,MatDuplicateOption,Mat*);
495 PetscPolymorphicFunction(MatDuplicate,(Mat A,MatDuplicateOption o),(A,o,&a),Mat,a)
496 PetscPolymorphicFunction(MatDuplicate,(Mat A),(A,MAT_COPY_VALUES,&a),Mat,a)
497 
498 
499 extern PetscErrorCode  MatCopy(Mat,Mat,MatStructure);
500 extern PetscErrorCode  MatView(Mat,PetscViewer);
501 extern PetscErrorCode  MatIsSymmetric(Mat,PetscReal,PetscBool *);
502 PetscPolymorphicFunction(MatIsSymmetric,(Mat A,PetscReal tol),(A,tol,&t),PetscBool ,t)
503 PetscPolymorphicFunction(MatIsSymmetric,(Mat A),(A,0,&t),PetscBool ,t)
504 extern PetscErrorCode  MatIsStructurallySymmetric(Mat,PetscBool *);
505 PetscPolymorphicFunction(MatIsStructurallySymmetric,(Mat A),(A,&t),PetscBool ,t)
506 extern PetscErrorCode  MatIsHermitian(Mat,PetscReal,PetscBool *);
507 PetscPolymorphicFunction(MatIsHermitian,(Mat A),(A,0,&t),PetscBool ,t)
508 extern PetscErrorCode  MatIsSymmetricKnown(Mat,PetscBool *,PetscBool *);
509 extern PetscErrorCode  MatIsHermitianKnown(Mat,PetscBool *,PetscBool *);
510 extern PetscErrorCode  MatMissingDiagonal(Mat,PetscBool  *,PetscInt *);
511 extern PetscErrorCode  MatLoad(Mat, PetscViewer);
512 
513 extern PetscErrorCode  MatGetRowIJ(Mat,PetscInt,PetscBool ,PetscBool ,PetscInt*,PetscInt *[],PetscInt *[],PetscBool  *);
514 extern PetscErrorCode  MatRestoreRowIJ(Mat,PetscInt,PetscBool ,PetscBool ,PetscInt *,PetscInt *[],PetscInt *[],PetscBool  *);
515 extern PetscErrorCode  MatGetColumnIJ(Mat,PetscInt,PetscBool ,PetscBool ,PetscInt*,PetscInt *[],PetscInt *[],PetscBool  *);
516 extern PetscErrorCode  MatRestoreColumnIJ(Mat,PetscInt,PetscBool ,PetscBool ,PetscInt *,PetscInt *[],PetscInt *[],PetscBool  *);
517 
518 /*S
519      MatInfo - Context of matrix information, used with MatGetInfo()
520 
521    In Fortran this is simply a double precision array of dimension MAT_INFO_SIZE
522 
523    Level: intermediate
524 
525   Concepts: matrix^nonzero information
526 
527 .seealso:  MatGetInfo(), MatInfoType
528 S*/
529 typedef struct {
530   PetscLogDouble block_size;                         /* block size */
531   PetscLogDouble nz_allocated,nz_used,nz_unneeded;   /* number of nonzeros */
532   PetscLogDouble memory;                             /* memory allocated */
533   PetscLogDouble assemblies;                         /* number of matrix assemblies called */
534   PetscLogDouble mallocs;                            /* number of mallocs during MatSetValues() */
535   PetscLogDouble fill_ratio_given,fill_ratio_needed; /* fill ratio for LU/ILU */
536   PetscLogDouble factor_mallocs;                     /* number of mallocs during factorization */
537 } MatInfo;
538 
539 /*E
540     MatInfoType - Indicates if you want information about the local part of the matrix,
541      the entire parallel matrix or the maximum over all the local parts.
542 
543     Level: beginner
544 
545    Any additions/changes here MUST also be made in include/finclude/petscmat.h
546 
547 .seealso: MatGetInfo(), MatInfo
548 E*/
549 typedef enum {MAT_LOCAL=1,MAT_GLOBAL_MAX=2,MAT_GLOBAL_SUM=3} MatInfoType;
550 extern PetscErrorCode  MatGetInfo(Mat,MatInfoType,MatInfo*);
551 extern PetscErrorCode  MatGetDiagonal(Mat,Vec);
552 extern PetscErrorCode  MatGetRowMax(Mat,Vec,PetscInt[]);
553 extern PetscErrorCode  MatGetRowMin(Mat,Vec,PetscInt[]);
554 extern PetscErrorCode  MatGetRowMaxAbs(Mat,Vec,PetscInt[]);
555 extern PetscErrorCode  MatGetRowMinAbs(Mat,Vec,PetscInt[]);
556 extern PetscErrorCode  MatGetRowSum(Mat,Vec);
557 extern PetscErrorCode  MatTranspose(Mat,MatReuse,Mat*);
558 PetscPolymorphicFunction(MatTranspose,(Mat A),(A,MAT_INITIAL_MATRIX,&t),Mat,t)
559 extern PetscErrorCode  MatHermitianTranspose(Mat,MatReuse,Mat*);
560 extern PetscErrorCode  MatPermute(Mat,IS,IS,Mat *);
561 PetscPolymorphicFunction(MatPermute,(Mat A,IS is1,IS is2),(A,is1,is2,&t),Mat,t)
562 extern PetscErrorCode  MatDiagonalScale(Mat,Vec,Vec);
563 extern PetscErrorCode  MatDiagonalSet(Mat,Vec,InsertMode);
564 extern PetscErrorCode  MatEqual(Mat,Mat,PetscBool *);
565 PetscPolymorphicFunction(MatEqual,(Mat A,Mat B),(A,B,&t),PetscBool ,t)
566 extern PetscErrorCode  MatMultEqual(Mat,Mat,PetscInt,PetscBool *);
567 extern PetscErrorCode  MatMultAddEqual(Mat,Mat,PetscInt,PetscBool *);
568 extern PetscErrorCode  MatMultTransposeEqual(Mat,Mat,PetscInt,PetscBool *);
569 extern PetscErrorCode  MatMultTransposeAddEqual(Mat,Mat,PetscInt,PetscBool *);
570 
571 extern PetscErrorCode  MatNorm(Mat,NormType,PetscReal *);
572 PetscPolymorphicFunction(MatNorm,(Mat A,NormType t),(A,t,&n),PetscReal,n)
573 extern PetscErrorCode  MatGetColumnNorms(Mat,NormType,PetscReal *);
574 extern PetscErrorCode  MatZeroEntries(Mat);
575 extern PetscErrorCode  MatZeroRows(Mat,PetscInt,const PetscInt [],PetscScalar,Vec,Vec);
576 extern PetscErrorCode  MatZeroRowsIS(Mat,IS,PetscScalar,Vec,Vec);
577 extern PetscErrorCode  MatZeroRowsStencil(Mat,PetscInt,const MatStencil [],PetscScalar,Vec,Vec);
578 extern PetscErrorCode  MatZeroRowsColumns(Mat,PetscInt,const PetscInt [],PetscScalar,Vec,Vec);
579 extern PetscErrorCode  MatZeroRowsColumnsIS(Mat,IS,PetscScalar,Vec,Vec);
580 
581 extern PetscErrorCode  MatUseScaledForm(Mat,PetscBool );
582 extern PetscErrorCode  MatScaleSystem(Mat,Vec,Vec);
583 extern PetscErrorCode  MatUnScaleSystem(Mat,Vec,Vec);
584 
585 extern PetscErrorCode  MatGetSize(Mat,PetscInt*,PetscInt*);
586 extern PetscErrorCode  MatGetLocalSize(Mat,PetscInt*,PetscInt*);
587 extern PetscErrorCode  MatGetOwnershipRange(Mat,PetscInt*,PetscInt*);
588 extern PetscErrorCode  MatGetOwnershipRanges(Mat,const PetscInt**);
589 extern PetscErrorCode  MatGetOwnershipRangeColumn(Mat,PetscInt*,PetscInt*);
590 extern PetscErrorCode  MatGetOwnershipRangesColumn(Mat,const PetscInt**);
591 
592 extern PetscErrorCode  MatGetSubMatrices(Mat,PetscInt,const IS[],const IS[],MatReuse,Mat *[]);
593 extern PetscErrorCode  MatGetSubMatricesParallel(Mat,PetscInt,const IS[],const IS[],MatReuse,Mat *[]);
594 extern PetscErrorCode  MatDestroyMatrices(PetscInt,Mat *[]);
595 extern PetscErrorCode  MatGetSubMatrix(Mat,IS,IS,MatReuse,Mat *);
596 extern PetscErrorCode  MatGetLocalSubMatrix(Mat,IS,IS,Mat*);
597 extern PetscErrorCode  MatRestoreLocalSubMatrix(Mat,IS,IS,Mat*);
598 extern PetscErrorCode  MatGetSeqNonzeroStructure(Mat,Mat*);
599 extern PetscErrorCode  MatDestroySeqNonzeroStructure(Mat*);
600 
601 extern PetscErrorCode  MatMerge(MPI_Comm,Mat,PetscInt,MatReuse,Mat*);
602 extern PetscErrorCode  MatMerge_SeqsToMPI(MPI_Comm,Mat,PetscInt,PetscInt,MatReuse,Mat*);
603 extern PetscErrorCode  MatMerge_SeqsToMPISymbolic(MPI_Comm,Mat,PetscInt,PetscInt,Mat*);
604 extern PetscErrorCode  MatMerge_SeqsToMPINumeric(Mat,Mat);
605 extern PetscErrorCode  MatMPIAIJGetLocalMat(Mat,MatReuse,Mat*);
606 extern PetscErrorCode  MatMPIAIJGetLocalMatCondensed(Mat,MatReuse,IS*,IS*,Mat*);
607 extern PetscErrorCode  MatGetBrowsOfAcols(Mat,Mat,MatReuse,IS*,IS*,PetscInt*,Mat*);
608 extern PetscErrorCode  MatGetBrowsOfAoCols(Mat,Mat,MatReuse,PetscInt**,PetscInt**,MatScalar**,Mat*);
609 #if defined (PETSC_USE_CTABLE)
610 #include "petscctable.h"
611 extern PetscErrorCode  MatGetCommunicationStructs(Mat, Vec *, PetscTable *, VecScatter *);
612 #else
613 extern PetscErrorCode  MatGetCommunicationStructs(Mat, Vec *, PetscInt *[], VecScatter *);
614 #endif
615 extern PetscErrorCode  MatGetGhosts(Mat, PetscInt *,const PetscInt *[]);
616 
617 extern PetscErrorCode  MatIncreaseOverlap(Mat,PetscInt,IS[],PetscInt);
618 
619 extern PetscErrorCode  MatMatMult(Mat,Mat,MatReuse,PetscReal,Mat*);
620 extern PetscErrorCode  MatMatMultSymbolic(Mat,Mat,PetscReal,Mat*);
621 extern PetscErrorCode  MatMatMultNumeric(Mat,Mat,Mat);
622 
623 extern PetscErrorCode  MatPtAP(Mat,Mat,MatReuse,PetscReal,Mat*);
624 extern PetscErrorCode  MatPtAPSymbolic(Mat,Mat,PetscReal,Mat*);
625 extern PetscErrorCode  MatPtAPNumeric(Mat,Mat,Mat);
626 
627 extern PetscErrorCode  MatMatMultTranspose(Mat,Mat,MatReuse,PetscReal,Mat*);
628 extern PetscErrorCode  MatMatMultTransposeSymbolic(Mat,Mat,PetscReal,Mat*);
629 extern PetscErrorCode  MatMatMultTransposeNumeric(Mat,Mat,Mat);
630 
631 extern PetscErrorCode  MatAXPY(Mat,PetscScalar,Mat,MatStructure);
632 extern PetscErrorCode  MatAYPX(Mat,PetscScalar,Mat,MatStructure);
633 
634 extern PetscErrorCode  MatScale(Mat,PetscScalar);
635 extern PetscErrorCode  MatShift(Mat,PetscScalar);
636 
637 extern PetscErrorCode  MatSetLocalToGlobalMapping(Mat,ISLocalToGlobalMapping,ISLocalToGlobalMapping);
638 extern PetscErrorCode  MatSetLocalToGlobalMappingBlock(Mat,ISLocalToGlobalMapping,ISLocalToGlobalMapping);
639 extern PetscErrorCode  MatGetLocalToGlobalMapping(Mat,ISLocalToGlobalMapping*,ISLocalToGlobalMapping*);
640 extern PetscErrorCode  MatGetLocalToGlobalMappingBlock(Mat,ISLocalToGlobalMapping*,ISLocalToGlobalMapping*);
641 extern PetscErrorCode  MatZeroRowsLocal(Mat,PetscInt,const PetscInt [],PetscScalar,Vec,Vec);
642 extern PetscErrorCode  MatZeroRowsLocalIS(Mat,IS,PetscScalar,Vec,Vec);
643 extern PetscErrorCode  MatZeroRowsColumnsLocal(Mat,PetscInt,const PetscInt [],PetscScalar,Vec,Vec);
644 extern PetscErrorCode  MatZeroRowsColumnsLocalIS(Mat,IS,PetscScalar,Vec,Vec);
645 extern PetscErrorCode  MatSetValuesLocal(Mat,PetscInt,const PetscInt[],PetscInt,const PetscInt[],const PetscScalar[],InsertMode);
646 extern PetscErrorCode  MatSetValuesBlockedLocal(Mat,PetscInt,const PetscInt[],PetscInt,const PetscInt[],const PetscScalar[],InsertMode);
647 
648 extern PetscErrorCode  MatStashSetInitialSize(Mat,PetscInt,PetscInt);
649 extern PetscErrorCode  MatStashGetInfo(Mat,PetscInt*,PetscInt*,PetscInt*,PetscInt*);
650 
651 extern PetscErrorCode  MatInterpolate(Mat,Vec,Vec);
652 extern PetscErrorCode  MatInterpolateAdd(Mat,Vec,Vec,Vec);
653 PetscPolymorphicSubroutine(MatInterpolateAdd,(Mat A,Vec x,Vec y),(A,x,y,y))
654 extern PetscErrorCode  MatRestrict(Mat,Vec,Vec);
655 extern PetscErrorCode  MatGetVecs(Mat,Vec*,Vec*);
656 extern PetscErrorCode  MatGetRedundantMatrix(Mat,PetscInt,MPI_Comm,PetscInt,MatReuse,Mat*);
657 extern PetscErrorCode  MatGetMultiProcBlock(Mat,MPI_Comm,Mat*);
658 extern PetscErrorCode  MatFindZeroDiagonals(Mat,IS*);
659 
660 /*MC
661    MatSetValue - Set a single entry into a matrix.
662 
663    Not collective
664 
665    Input Parameters:
666 +  m - the matrix
667 .  row - the row location of the entry
668 .  col - the column location of the entry
669 .  value - the value to insert
670 -  mode - either INSERT_VALUES or ADD_VALUES
671 
672    Notes:
673    For efficiency one should use MatSetValues() and set several or many
674    values simultaneously if possible.
675 
676    Level: beginner
677 
678 .seealso: MatSetValues(), MatSetValueLocal()
679 M*/
680 PETSC_STATIC_INLINE PetscErrorCode MatSetValue(Mat v,PetscInt i,PetscInt j,PetscScalar va,InsertMode mode) {return MatSetValues(v,1,&i,1,&j,&va,mode);}
681 
682 PETSC_STATIC_INLINE PetscErrorCode MatGetValue(Mat v,PetscInt i,PetscInt j,PetscScalar *va) {return MatGetValues(v,1,&i,1,&j,va);}
683 
684 PETSC_STATIC_INLINE PetscErrorCode MatSetValueLocal(Mat v,PetscInt i,PetscInt j,PetscScalar va,InsertMode mode) {return MatSetValuesLocal(v,1,&i,1,&j,&va,mode);}
685 
686 /*MC
687    MatPreallocateInitialize - Begins the block of code that will count the number of nonzeros per
688        row in a matrix providing the data that one can use to correctly preallocate the matrix.
689 
690    Synopsis:
691    PetscErrorCode MatPreallocateInitialize(MPI_Comm comm, PetscInt nrows, PetscInt ncols, PetscInt *dnz, PetscInt *onz)
692 
693    Collective on MPI_Comm
694 
695    Input Parameters:
696 +  comm - the communicator that will share the eventually allocated matrix
697 .  nrows - the number of LOCAL rows in the matrix
698 -  ncols - the number of LOCAL columns in the matrix
699 
700    Output Parameters:
701 +  dnz - the array that will be passed to the matrix preallocation routines
702 -  ozn - the other array passed to the matrix preallocation routines
703 
704 
705    Level: intermediate
706 
707    Notes:
708     See the <A href="../../docs/manual.pdf#nameddest=ch_performance">Hints for Performance Improvment</A> chapter in the users manual for more details.
709 
710    Do not malloc or free dnz and onz, that is handled internally by these routines
711 
712    Use MatPreallocateInitializeSymmetric() for symmetric matrices (MPISBAIJ matrices)
713 
714    This is a MACRO not a function because it has a leading { that is closed by PetscPreallocateFinalize().
715 
716   Concepts: preallocation^Matrix
717 
718 .seealso: MatPreallocateFinalize(), MatPreallocateSet(), MatPreallocateSymmetricSet(), MatPreallocateSetLocal(),
719           MatPreallocateInitializeSymmetric(), MatPreallocateSymmetricSetLocal()
720 M*/
721 #define MatPreallocateInitialize(comm,nrows,ncols,dnz,onz) 0; \
722 { \
723   PetscErrorCode _4_ierr; PetscInt __nrows = (nrows),__ctmp = (ncols),__rstart,__start,__end; \
724   _4_ierr = PetscMalloc2(__nrows,PetscInt,&dnz,__nrows,PetscInt,&onz);CHKERRQ(_4_ierr); \
725   _4_ierr = PetscMemzero(dnz,__nrows*sizeof(PetscInt));CHKERRQ(_4_ierr);\
726   _4_ierr = PetscMemzero(onz,__nrows*sizeof(PetscInt));CHKERRQ(_4_ierr);\
727   _4_ierr = MPI_Scan(&__ctmp,&__end,1,MPIU_INT,MPI_SUM,comm);CHKERRQ(_4_ierr); __start = __end - __ctmp;\
728   _4_ierr = MPI_Scan(&__nrows,&__rstart,1,MPIU_INT,MPI_SUM,comm);CHKERRQ(_4_ierr); __rstart = __rstart - __nrows;
729 
730 /*MC
731    MatPreallocateSymmetricInitialize - Begins the block of code that will count the number of nonzeros per
732        row in a matrix providing the data that one can use to correctly preallocate the matrix.
733 
734    Synopsis:
735    PetscErrorCode MatPreallocateSymmetricInitialize(MPI_Comm comm, PetscInt nrows, PetscInt ncols, PetscInt *dnz, PetscInt *onz)
736 
737    Collective on MPI_Comm
738 
739    Input Parameters:
740 +  comm - the communicator that will share the eventually allocated matrix
741 .  nrows - the number of LOCAL rows in the matrix
742 -  ncols - the number of LOCAL columns in the matrix
743 
744    Output Parameters:
745 +  dnz - the array that will be passed to the matrix preallocation routines
746 -  ozn - the other array passed to the matrix preallocation routines
747 
748 
749    Level: intermediate
750 
751    Notes:
752     See the <A href="../../docs/manual.pdf#nameddest=ch_performance">Hints for Performance Improvment</A> chapter in the users manual for more details.
753 
754    Do not malloc or free dnz and onz, that is handled internally by these routines
755 
756    This is a MACRO not a function because it has a leading { that is closed by PetscPreallocateFinalize().
757 
758   Concepts: preallocation^Matrix
759 
760 .seealso: MatPreallocateFinalize(), MatPreallocateSet(), MatPreallocateSymmetricSet(), MatPreallocateSetLocal(),
761           MatPreallocateInitialize(), MatPreallocateSymmetricSetLocal()
762 M*/
763 #define MatPreallocateSymmetricInitialize(comm,nrows,ncols,dnz,onz) 0; \
764 { \
765   PetscErrorCode _4_ierr; PetscInt __nrows = (nrows),__ctmp = (ncols),__rstart,__end; \
766   _4_ierr = PetscMalloc2(__nrows,PetscInt,&dnz,__nrows,PetscInt,&onz);CHKERRQ(_4_ierr); \
767   _4_ierr = PetscMemzero(dnz,__nrows*sizeof(PetscInt));CHKERRQ(_4_ierr);\
768   _4_ierr = PetscMemzero(onz,__nrows*sizeof(PetscInt));CHKERRQ(_4_ierr);\
769   _4_ierr = MPI_Scan(&__ctmp,&__end,1,MPIU_INT,MPI_SUM,comm);CHKERRQ(_4_ierr);\
770   _4_ierr = MPI_Scan(&__nrows,&__rstart,1,MPIU_INT,MPI_SUM,comm);CHKERRQ(_4_ierr); __rstart = __rstart - __nrows;
771 
772 /*MC
773    MatPreallocateSetLocal - Indicates the locations (rows and columns) in the matrix where nonzeros will be
774        inserted using a local number of the rows and columns
775 
776    Synopsis:
777    PetscErrorCode MatPreallocateSetLocal(ISLocalToGlobalMappping map,PetscInt nrows, PetscInt *rows,PetscInt ncols, PetscInt *cols,PetscInt *dnz, PetscInt *onz)
778 
779    Not Collective
780 
781    Input Parameters:
782 +  map - the row mapping from local numbering to global numbering
783 .  nrows - the number of rows indicated
784 .  rows - the indices of the rows
785 .  cmap - the column mapping from local to global numbering
786 .  ncols - the number of columns in the matrix
787 .  cols - the columns indicated
788 .  dnz - the array that will be passed to the matrix preallocation routines
789 -  ozn - the other array passed to the matrix preallocation routines
790 
791 
792    Level: intermediate
793 
794    Notes:
795     See the <A href="../../docs/manual.pdf#nameddest=ch_performance">Hints for Performance Improvment</A> chapter in the users manual for more details.
796 
797    Do not malloc or free dnz and onz, that is handled internally by these routines
798 
799   Concepts: preallocation^Matrix
800 
801 .seealso: MatPreallocateFinalize(), MatPreallocateSet(), MatPreallocateSymmetricSet(), MatPreallocateInitialize(),
802           MatPreallocateInitialize(), MatPreallocateSymmetricSetLocal()
803 M*/
804 #define MatPreallocateSetLocal(rmap,nrows,rows,cmap,ncols,cols,dnz,onz) 0; \
805 {\
806   PetscInt __l;\
807   _4_ierr = ISLocalToGlobalMappingApply(rmap,nrows,rows,rows);CHKERRQ(_4_ierr);\
808   _4_ierr = ISLocalToGlobalMappingApply(cmap,ncols,cols,cols);CHKERRQ(_4_ierr);\
809   for (__l=0;__l<nrows;__l++) {\
810     _4_ierr = MatPreallocateSet((rows)[__l],ncols,cols,dnz,onz);CHKERRQ(_4_ierr);\
811   }\
812 }
813 
814 /*MC
815    MatPreallocateSymmetricSetLocal - Indicates the locations (rows and columns) in the matrix where nonzeros will be
816        inserted using a local number of the rows and columns
817 
818    Synopsis:
819    PetscErrorCode MatPreallocateSymmetricSetLocal(ISLocalToGlobalMappping map,PetscInt nrows, PetscInt *rows,PetscInt ncols, PetscInt *cols,PetscInt *dnz, PetscInt *onz)
820 
821    Not Collective
822 
823    Input Parameters:
824 +  map - the mapping between local numbering and global numbering
825 .  nrows - the number of rows indicated
826 .  rows - the indices of the rows
827 .  ncols - the number of columns in the matrix
828 .  cols - the columns indicated
829 .  dnz - the array that will be passed to the matrix preallocation routines
830 -  ozn - the other array passed to the matrix preallocation routines
831 
832 
833    Level: intermediate
834 
835    Notes:
836     See the <A href="../../docs/manual.pdf#nameddest=ch_performance">Hints for Performance Improvment</A> chapter in the users manual for more details.
837 
838    Do not malloc or free dnz and onz that is handled internally by these routines
839 
840   Concepts: preallocation^Matrix
841 
842 .seealso: MatPreallocateFinalize(), MatPreallocateSet(), MatPreallocateSymmetricSet(), MatPreallocateInitialize(),
843           MatPreallocateInitialize(), MatPreallocateSymmetricSetLocal(), MatPreallocateSetLocal()
844 M*/
845 #define MatPreallocateSymmetricSetLocal(map,nrows,rows,ncols,cols,dnz,onz) 0;\
846 {\
847   PetscInt __l;\
848   _4_ierr = ISLocalToGlobalMappingApply(map,nrows,rows,rows);CHKERRQ(_4_ierr);\
849   _4_ierr = ISLocalToGlobalMappingApply(map,ncols,cols,cols);CHKERRQ(_4_ierr);\
850   for (__l=0;__l<nrows;__l++) {\
851     _4_ierr = MatPreallocateSymmetricSet((rows)[__l],ncols,cols,dnz,onz);CHKERRQ(_4_ierr);\
852   }\
853 }
854 
855 /*MC
856    MatPreallocateSet - Indicates the locations (rows and columns) in the matrix where nonzeros will be
857        inserted using a local number of the rows and columns
858 
859    Synopsis:
860    PetscErrorCode MatPreallocateSet(PetscInt nrows, PetscInt *rows,PetscInt ncols, PetscInt *cols,PetscInt *dnz, PetscInt *onz)
861 
862    Not Collective
863 
864    Input Parameters:
865 +  row - the row
866 .  ncols - the number of columns in the matrix
867 -  cols - the columns indicated
868 
869    Output Parameters:
870 +  dnz - the array that will be passed to the matrix preallocation routines
871 -  ozn - the other array passed to the matrix preallocation routines
872 
873 
874    Level: intermediate
875 
876    Notes:
877     See the <A href="../../docs/manual.pdf#nameddest=ch_performance">Hints for Performance Improvment</A> chapter in the users manual for more details.
878 
879    Do not malloc or free dnz and onz that is handled internally by these routines
880 
881    This is a MACRO not a function because it uses variables declared in MatPreallocateInitialize().
882 
883   Concepts: preallocation^Matrix
884 
885 .seealso: MatPreallocateFinalize(), MatPreallocateSet(), MatPreallocateSymmetricSet(), MatPreallocateInitialize(),
886           MatPreallocateInitialize(), MatPreallocateSymmetricSetLocal(), MatPreallocateSetLocal()
887 M*/
888 #define MatPreallocateSet(row,nc,cols,dnz,onz) 0;\
889 { PetscInt __i; \
890   if (row < __rstart) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Trying to set preallocation for row %D less than first local row %D",row,__rstart);\
891   if (row >= __rstart+__nrows) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Trying to set preallocation for row %D greater than last local row %D",row,__rstart+__nrows-1);\
892   for (__i=0; __i<nc; __i++) {\
893     if ((cols)[__i] < __start || (cols)[__i] >= __end) onz[row - __rstart]++; \
894     else dnz[row - __rstart]++;\
895   }\
896 }
897 
898 /*MC
899    MatPreallocateSymmetricSet - Indicates the locations (rows and columns) in the matrix where nonzeros will be
900        inserted using a local number of the rows and columns
901 
902    Synopsis:
903    PetscErrorCode MatPreallocateSymmetricSet(PetscInt nrows, PetscInt *rows,PetscInt ncols, PetscInt *cols,PetscInt *dnz, PetscInt *onz)
904 
905    Not Collective
906 
907    Input Parameters:
908 +  nrows - the number of rows indicated
909 .  rows - the indices of the rows
910 .  ncols - the number of columns in the matrix
911 .  cols - the columns indicated
912 .  dnz - the array that will be passed to the matrix preallocation routines
913 -  ozn - the other array passed to the matrix preallocation routines
914 
915 
916    Level: intermediate
917 
918    Notes:
919     See the <A href="../../docs/manual.pdf#nameddest=ch_performance">Hints for Performance Improvment</A> chapter in the users manual for more details.
920 
921    Do not malloc or free dnz and onz that is handled internally by these routines
922 
923    This is a MACRO not a function because it uses variables declared in MatPreallocateInitialize().
924 
925   Concepts: preallocation^Matrix
926 
927 .seealso: MatPreallocateFinalize(), MatPreallocateSet(), MatPreallocateSymmetricSet(), MatPreallocateInitialize(),
928           MatPreallocateInitialize(), MatPreallocateSymmetricSetLocal(), MatPreallocateSetLocal()
929 M*/
930 #define MatPreallocateSymmetricSet(row,nc,cols,dnz,onz) 0;\
931 { PetscInt __i; \
932   for (__i=0; __i<nc; __i++) {\
933     if (cols[__i] >= __end) onz[row - __rstart]++; \
934     else if (cols[__i] >= row) dnz[row - __rstart]++;\
935   }\
936 }
937 
938 /*MC
939    MatPreallocateLocation -  An alternative to MatPreallocationSet() that puts the nonzero locations into the matrix if it exists
940 
941    Synopsis:
942    PetscErrorCode MatPreallocateLocations(Mat A,PetscInt row,PetscInt ncols,PetscInt *cols,PetscInt *dnz,PetscInt *onz)
943 
944    Not Collective
945 
946    Input Parameters:
947 .  A - matrix
948 .  row - row where values exist (must be local to this process)
949 .  ncols - number of columns
950 .  cols - columns with nonzeros
951 .  dnz - the array that will be passed to the matrix preallocation routines
952 -  ozn - the other array passed to the matrix preallocation routines
953 
954 
955    Level: intermediate
956 
957    Notes:
958     See the <A href="../../docs/manual.pdf#nameddest=ch_performance">Hints for Performance Improvment</A> chapter in the users manual for more details.
959 
960    Do not malloc or free dnz and onz that is handled internally by these routines
961 
962    This is a MACRO not a function because it uses a bunch of variables private to the MatPreallocation.... routines.
963 
964   Concepts: preallocation^Matrix
965 
966 .seealso: MatPreallocateInitialize(), MatPreallocateSet(), MatPreallocateSymmetricSet(), MatPreallocateSetLocal(),
967           MatPreallocateSymmetricInitialize(), MatPreallocateSymmetricSetLocal()
968 M*/
969 #define MatPreallocateLocation(A,row,ncols,cols,dnz,onz) 0;if (A) {ierr = MatSetValues(A,1,&row,ncols,cols,PETSC_NULL,INSERT_VALUES);CHKERRQ(ierr);} else {ierr =  MatPreallocateSet(row,ncols,cols,dnz,onz);CHKERRQ(ierr);}
970 
971 
972 /*MC
973    MatPreallocateFinalize - Ends the block of code that will count the number of nonzeros per
974        row in a matrix providing the data that one can use to correctly preallocate the matrix.
975 
976    Synopsis:
977    PetscErrorCode MatPreallocateFinalize(PetscInt *dnz, PetscInt *onz)
978 
979    Collective on MPI_Comm
980 
981    Input Parameters:
982 +  dnz - the array that was be passed to the matrix preallocation routines
983 -  ozn - the other array passed to the matrix preallocation routines
984 
985 
986    Level: intermediate
987 
988    Notes:
989     See the <A href="../../docs/manual.pdf#nameddest=ch_performance">Hints for Performance Improvment</A> chapter in the users manual for more details.
990 
991    Do not malloc or free dnz and onz that is handled internally by these routines
992 
993    This is a MACRO not a function because it closes the { started in MatPreallocateInitialize().
994 
995   Concepts: preallocation^Matrix
996 
997 .seealso: MatPreallocateInitialize(), MatPreallocateSet(), MatPreallocateSymmetricSet(), MatPreallocateSetLocal(),
998           MatPreallocateSymmetricInitialize(), MatPreallocateSymmetricSetLocal()
999 M*/
1000 #define MatPreallocateFinalize(dnz,onz) 0;_4_ierr = PetscFree2(dnz,onz);CHKERRQ(_4_ierr);}
1001 
1002 
1003 
1004 /* Routines unique to particular data structures */
1005 extern PetscErrorCode  MatShellGetContext(Mat,void *);
1006 PetscPolymorphicFunction(MatShellGetContext,(Mat A),(A,&t),void*,t)
1007 
1008 extern PetscErrorCode  MatInodeAdjustForInodes(Mat,IS*,IS*);
1009 extern PetscErrorCode  MatInodeGetInodeSizes(Mat,PetscInt *,PetscInt *[],PetscInt *);
1010 
1011 extern PetscErrorCode  MatSeqAIJSetColumnIndices(Mat,PetscInt[]);
1012 extern PetscErrorCode  MatSeqBAIJSetColumnIndices(Mat,PetscInt[]);
1013 extern PetscErrorCode  MatCreateSeqAIJWithArrays(MPI_Comm,PetscInt,PetscInt,PetscInt[],PetscInt[],PetscScalar[],Mat*);
1014 extern PetscErrorCode  MatCreateSeqBAIJWithArrays(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt[],PetscInt[],PetscScalar[],Mat*);
1015 extern PetscErrorCode  MatCreateSeqSBAIJWithArrays(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt[],PetscInt[],PetscScalar[],Mat*);
1016 
1017 #define MAT_SKIP_ALLOCATION -4
1018 
1019 extern PetscErrorCode  MatSeqBAIJSetPreallocation(Mat,PetscInt,PetscInt,const PetscInt[]);
1020 PetscPolymorphicSubroutine(MatSeqBAIJSetPreallocation,(Mat A,PetscInt bs,const PetscInt nnz[]),(A,bs,0,nnz))
1021 extern PetscErrorCode  MatSeqSBAIJSetPreallocation(Mat,PetscInt,PetscInt,const PetscInt[]);
1022 PetscPolymorphicSubroutine(MatSeqSBAIJSetPreallocation,(Mat A,PetscInt bs,const PetscInt nnz[]),(A,bs,0,nnz))
1023 extern PetscErrorCode  MatSeqAIJSetPreallocation(Mat,PetscInt,const PetscInt[]);
1024 PetscPolymorphicSubroutine(MatSeqAIJSetPreallocation,(Mat A,const PetscInt nnz[]),(A,0,nnz))
1025 
1026 extern PetscErrorCode  MatMPIBAIJSetPreallocation(Mat,PetscInt,PetscInt,const PetscInt[],PetscInt,const PetscInt[]);
1027 PetscPolymorphicSubroutine(MatMPIBAIJSetPreallocation,(Mat A,PetscInt bs,const PetscInt nnz[],const PetscInt onz[]),(A,bs,0,nnz,0,onz))
1028 extern PetscErrorCode  MatMPISBAIJSetPreallocation(Mat,PetscInt,PetscInt,const PetscInt[],PetscInt,const PetscInt[]);
1029 extern PetscErrorCode  MatMPIAIJSetPreallocation(Mat,PetscInt,const PetscInt[],PetscInt,const PetscInt[]);
1030 extern PetscErrorCode  MatSeqAIJSetPreallocationCSR(Mat,const PetscInt [],const PetscInt [],const PetscScalar []);
1031 extern PetscErrorCode  MatSeqBAIJSetPreallocationCSR(Mat,PetscInt,const PetscInt[],const PetscInt[],const PetscScalar[]);
1032 extern PetscErrorCode  MatMPIAIJSetPreallocationCSR(Mat,const PetscInt[],const PetscInt[],const PetscScalar[]);
1033 extern PetscErrorCode  MatMPIBAIJSetPreallocationCSR(Mat,PetscInt,const PetscInt[],const PetscInt[],const PetscScalar[]);
1034 extern PetscErrorCode  MatMPIAdjSetPreallocation(Mat,PetscInt[],PetscInt[],PetscInt[]);
1035 extern PetscErrorCode  MatMPIDenseSetPreallocation(Mat,PetscScalar[]);
1036 extern PetscErrorCode  MatSeqDenseSetPreallocation(Mat,PetscScalar[]);
1037 extern PetscErrorCode  MatMPIAIJGetSeqAIJ(Mat,Mat*,Mat*,PetscInt*[]);
1038 extern PetscErrorCode  MatMPIBAIJGetSeqBAIJ(Mat,Mat*,Mat*,PetscInt*[]);
1039 extern PetscErrorCode  MatAdicSetLocalFunction(Mat,void (*)(void));
1040 
1041 extern PetscErrorCode  MatSeqDenseSetLDA(Mat,PetscInt);
1042 extern PetscErrorCode  MatDenseGetLocalMatrix(Mat,Mat*);
1043 
1044 extern PetscErrorCode  MatStoreValues(Mat);
1045 extern PetscErrorCode  MatRetrieveValues(Mat);
1046 
1047 extern PetscErrorCode  MatDAADSetCtx(Mat,void*);
1048 
1049 extern PetscErrorCode  MatFindNonzeroRows(Mat,IS*);
1050 /*
1051   These routines are not usually accessed directly, rather solving is
1052   done through the KSP and PC interfaces.
1053 */
1054 
1055 /*E
1056     MatOrderingType - String with the name of a PETSc matrix ordering or the creation function
1057        with an optional dynamic library name, for example
1058        http://www.mcs.anl.gov/petsc/lib.a:orderingcreate()
1059 
1060    Level: beginner
1061 
1062    Cannot use const because the PC objects manipulate the string
1063 
1064 .seealso: MatGetOrdering()
1065 E*/
1066 #define MatOrderingType char*
1067 #define MATORDERINGNATURAL     "natural"
1068 #define MATORDERINGND          "nd"
1069 #define MATORDERING1WD         "1wd"
1070 #define MATORDERINGRCM         "rcm"
1071 #define MATORDERINGQMD         "qmd"
1072 #define MATORDERINGROWLENGTH   "rowlength"
1073 #define MATORDERINGAMD         "amd"            /* only works if UMFPACK is installed with PETSc */
1074 
1075 extern PetscErrorCode  MatGetOrdering(Mat,const MatOrderingType,IS*,IS*);
1076 extern PetscErrorCode  MatGetOrderingList(PetscFList *list);
1077 extern PetscErrorCode  MatOrderingRegister(const char[],const char[],const char[],PetscErrorCode(*)(Mat,const MatOrderingType,IS*,IS*));
1078 
1079 /*MC
1080    MatOrderingRegisterDynamic - Adds a new sparse matrix ordering to the matrix package.
1081 
1082    Synopsis:
1083    PetscErrorCode MatOrderingRegisterDynamic(const char *name_ordering,const char *path,const char *name_create,PetscErrorCode (*routine_create)(MatOrdering))
1084 
1085    Not Collective
1086 
1087    Input Parameters:
1088 +  sname - name of ordering (for example MATORDERINGND)
1089 .  path - location of library where creation routine is
1090 .  name - name of function that creates the ordering type,a string
1091 -  function - function pointer that creates the ordering
1092 
1093    Level: developer
1094 
1095    If dynamic libraries are used, then the fourth input argument (function)
1096    is ignored.
1097 
1098    Sample usage:
1099 .vb
1100    MatOrderingRegisterDynamic("my_order",/home/username/my_lib/lib/libO/solaris/mylib.a,
1101                "MyOrder",MyOrder);
1102 .ve
1103 
1104    Then, your partitioner can be chosen with the procedural interface via
1105 $     MatOrderingSetType(part,"my_order)
1106    or at runtime via the option
1107 $     -pc_factor_mat_ordering_type my_order
1108 
1109    ${PETSC_ARCH} occuring in pathname will be replaced with appropriate values.
1110 
1111 .keywords: matrix, ordering, register
1112 
1113 .seealso: MatOrderingRegisterDestroy(), MatOrderingRegisterAll()
1114 M*/
1115 #if defined(PETSC_USE_DYNAMIC_LIBRARIES)
1116 #define MatOrderingRegisterDynamic(a,b,c,d) MatOrderingRegister(a,b,c,0)
1117 #else
1118 #define MatOrderingRegisterDynamic(a,b,c,d) MatOrderingRegister(a,b,c,d)
1119 #endif
1120 
1121 extern PetscErrorCode  MatOrderingRegisterDestroy(void);
1122 extern PetscErrorCode  MatOrderingRegisterAll(const char[]);
1123 extern PetscBool  MatOrderingRegisterAllCalled;
1124 extern PetscFList MatOrderingList;
1125 
1126 extern PetscErrorCode  MatReorderForNonzeroDiagonal(Mat,PetscReal,IS,IS);
1127 
1128 /*S
1129     MatFactorShiftType - Numeric Shift.
1130 
1131    Level: beginner
1132 
1133 S*/
1134 typedef enum {MAT_SHIFT_NONE,MAT_SHIFT_NONZERO,MAT_SHIFT_POSITIVE_DEFINITE,MAT_SHIFT_INBLOCKS} MatFactorShiftType;
1135 extern const char *MatFactorShiftTypes[];
1136 
1137 /*S
1138    MatFactorInfo - Data passed into the matrix factorization routines
1139 
1140    In Fortran these are simply double precision arrays of size MAT_FACTORINFO_SIZE, that is use
1141 $     MatFactorInfo  info(MAT_FACTORINFO_SIZE)
1142 
1143    Notes: These are not usually directly used by users, instead use PC type of LU, ILU, CHOLESKY or ICC.
1144 
1145       You can use MatFactorInfoInitialize() to set default values.
1146 
1147    Level: developer
1148 
1149 .seealso: MatLUFactorSymbolic(), MatILUFactorSymbolic(), MatCholeskyFactorSymbolic(), MatICCFactorSymbolic(), MatICCFactor(),
1150           MatFactorInfoInitialize()
1151 
1152 S*/
1153 typedef struct {
1154   PetscReal     diagonal_fill;  /* force diagonal to fill in if initially not filled */
1155   PetscReal     usedt;
1156   PetscReal     dt;             /* drop tolerance */
1157   PetscReal     dtcol;          /* tolerance for pivoting */
1158   PetscReal     dtcount;        /* maximum nonzeros to be allowed per row */
1159   PetscReal     fill;           /* expected fill, nonzeros in factored matrix/nonzeros in original matrix */
1160   PetscReal     levels;         /* ICC/ILU(levels) */
1161   PetscReal     pivotinblocks;  /* for BAIJ and SBAIJ matrices pivot in factorization on blocks, default 1.0
1162                                    factorization may be faster if do not pivot */
1163   PetscReal     zeropivot;      /* pivot is called zero if less than this */
1164   PetscReal     shifttype;      /* type of shift added to matrix factor to prevent zero pivots */
1165   PetscReal     shiftamount;     /* how large the shift is */
1166 } MatFactorInfo;
1167 
1168 extern PetscErrorCode  MatFactorInfoInitialize(MatFactorInfo*);
1169 extern PetscErrorCode  MatCholeskyFactor(Mat,IS,const MatFactorInfo*);
1170 extern PetscErrorCode  MatCholeskyFactorSymbolic(Mat,Mat,IS,const MatFactorInfo*);
1171 extern PetscErrorCode  MatCholeskyFactorNumeric(Mat,Mat,const MatFactorInfo*);
1172 extern PetscErrorCode  MatLUFactor(Mat,IS,IS,const MatFactorInfo*);
1173 extern PetscErrorCode  MatILUFactor(Mat,IS,IS,const MatFactorInfo*);
1174 extern PetscErrorCode  MatLUFactorSymbolic(Mat,Mat,IS,IS,const MatFactorInfo*);
1175 extern PetscErrorCode  MatILUFactorSymbolic(Mat,Mat,IS,IS,const MatFactorInfo*);
1176 extern PetscErrorCode  MatICCFactorSymbolic(Mat,Mat,IS,const MatFactorInfo*);
1177 extern PetscErrorCode  MatICCFactor(Mat,IS,const MatFactorInfo*);
1178 extern PetscErrorCode  MatLUFactorNumeric(Mat,Mat,const MatFactorInfo*);
1179 extern PetscErrorCode  MatGetInertia(Mat,PetscInt*,PetscInt*,PetscInt*);
1180 extern PetscErrorCode  MatSolve(Mat,Vec,Vec);
1181 extern PetscErrorCode  MatForwardSolve(Mat,Vec,Vec);
1182 extern PetscErrorCode  MatBackwardSolve(Mat,Vec,Vec);
1183 extern PetscErrorCode  MatSolveAdd(Mat,Vec,Vec,Vec);
1184 extern PetscErrorCode  MatSolveTranspose(Mat,Vec,Vec);
1185 extern PetscErrorCode  MatSolveTransposeAdd(Mat,Vec,Vec,Vec);
1186 extern PetscErrorCode  MatSolves(Mat,Vecs,Vecs);
1187 
1188 extern PetscErrorCode  MatSetUnfactored(Mat);
1189 
1190 /*E
1191     MatSORType - What type of (S)SOR to perform
1192 
1193     Level: beginner
1194 
1195    May be bitwise ORd together
1196 
1197    Any additions/changes here MUST also be made in include/finclude/petscmat.h
1198 
1199    MatSORType may be bitwise ORd together, so do not change the numbers
1200 
1201 .seealso: MatSOR()
1202 E*/
1203 typedef enum {SOR_FORWARD_SWEEP=1,SOR_BACKWARD_SWEEP=2,SOR_SYMMETRIC_SWEEP=3,
1204               SOR_LOCAL_FORWARD_SWEEP=4,SOR_LOCAL_BACKWARD_SWEEP=8,
1205               SOR_LOCAL_SYMMETRIC_SWEEP=12,SOR_ZERO_INITIAL_GUESS=16,
1206               SOR_EISENSTAT=32,SOR_APPLY_UPPER=64,SOR_APPLY_LOWER=128} MatSORType;
1207 extern PetscErrorCode  MatSOR(Mat,Vec,PetscReal,MatSORType,PetscReal,PetscInt,PetscInt,Vec);
1208 
1209 /*
1210     These routines are for efficiently computing Jacobians via finite differences.
1211 */
1212 
1213 /*E
1214     MatColoringType - String with the name of a PETSc matrix coloring or the creation function
1215        with an optional dynamic library name, for example
1216        http://www.mcs.anl.gov/petsc/lib.a:coloringcreate()
1217 
1218    Level: beginner
1219 
1220 .seealso: MatGetColoring()
1221 E*/
1222 #define MatColoringType char*
1223 #define MATCOLORINGNATURAL "natural"
1224 #define MATCOLORINGSL      "sl"
1225 #define MATCOLORINGLF      "lf"
1226 #define MATCOLORINGID      "id"
1227 
1228 extern PetscErrorCode  MatGetColoring(Mat,const MatColoringType,ISColoring*);
1229 extern PetscErrorCode  MatColoringRegister(const char[],const char[],const char[],PetscErrorCode(*)(Mat,MatColoringType,ISColoring *));
1230 
1231 /*MC
1232    MatColoringRegisterDynamic - Adds a new sparse matrix coloring to the
1233                                matrix package.
1234 
1235    Synopsis:
1236    PetscErrorCode MatColoringRegisterDynamic(const char *name_coloring,const char *path,const char *name_create,PetscErrorCode (*routine_create)(MatColoring))
1237 
1238    Not Collective
1239 
1240    Input Parameters:
1241 +  sname - name of Coloring (for example MATCOLORINGSL)
1242 .  path - location of library where creation routine is
1243 .  name - name of function that creates the Coloring type, a string
1244 -  function - function pointer that creates the coloring
1245 
1246    Level: developer
1247 
1248    If dynamic libraries are used, then the fourth input argument (function)
1249    is ignored.
1250 
1251    Sample usage:
1252 .vb
1253    MatColoringRegisterDynamic("my_color",/home/username/my_lib/lib/libO/solaris/mylib.a,
1254                "MyColor",MyColor);
1255 .ve
1256 
1257    Then, your partitioner can be chosen with the procedural interface via
1258 $     MatColoringSetType(part,"my_color")
1259    or at runtime via the option
1260 $     -mat_coloring_type my_color
1261 
1262    $PETSC_ARCH occuring in pathname will be replaced with appropriate values.
1263 
1264 .keywords: matrix, Coloring, register
1265 
1266 .seealso: MatColoringRegisterDestroy(), MatColoringRegisterAll()
1267 M*/
1268 #if defined(PETSC_USE_DYNAMIC_LIBRARIES)
1269 #define MatColoringRegisterDynamic(a,b,c,d) MatColoringRegister(a,b,c,0)
1270 #else
1271 #define MatColoringRegisterDynamic(a,b,c,d) MatColoringRegister(a,b,c,d)
1272 #endif
1273 
1274 extern PetscBool  MatColoringRegisterAllCalled;
1275 
1276 extern PetscErrorCode  MatColoringRegisterAll(const char[]);
1277 extern PetscErrorCode  MatColoringRegisterDestroy(void);
1278 extern PetscErrorCode  MatColoringPatch(Mat,PetscInt,PetscInt,ISColoringValue[],ISColoring*);
1279 
1280 /*S
1281      MatFDColoring - Object for computing a sparse Jacobian via finite differences
1282         and coloring
1283 
1284    Level: beginner
1285 
1286   Concepts: coloring, sparse Jacobian, finite differences
1287 
1288 .seealso:  MatFDColoringCreate()
1289 S*/
1290 typedef struct _p_MatFDColoring* MatFDColoring;
1291 
1292 extern PetscErrorCode  MatFDColoringCreate(Mat,ISColoring,MatFDColoring *);
1293 extern PetscErrorCode  MatFDColoringDestroy(MatFDColoring*);
1294 extern PetscErrorCode  MatFDColoringView(MatFDColoring,PetscViewer);
1295 extern PetscErrorCode  MatFDColoringSetFunction(MatFDColoring,PetscErrorCode (*)(void),void*);
1296 extern PetscErrorCode  MatFDColoringGetFunction(MatFDColoring,PetscErrorCode (**)(void),void**);
1297 extern PetscErrorCode  MatFDColoringSetParameters(MatFDColoring,PetscReal,PetscReal);
1298 extern PetscErrorCode  MatFDColoringSetFromOptions(MatFDColoring);
1299 extern PetscErrorCode  MatFDColoringApply(Mat,MatFDColoring,Vec,MatStructure*,void *);
1300 extern PetscErrorCode  MatFDColoringApplyTS(Mat,MatFDColoring,PetscReal,Vec,MatStructure*,void *);
1301 extern PetscErrorCode  MatFDColoringSetF(MatFDColoring,Vec);
1302 extern PetscErrorCode  MatFDColoringGetPerturbedColumns(MatFDColoring,PetscInt*,PetscInt*[]);
1303 /*
1304     These routines are for partitioning matrices: currently used only
1305   for adjacency matrix, MatCreateMPIAdj().
1306 */
1307 
1308 /*S
1309      MatPartitioning - Object for managing the partitioning of a matrix or graph
1310 
1311    Level: beginner
1312 
1313   Concepts: partitioning
1314 
1315 .seealso:  MatPartitioningCreate(), MatPartitioningType
1316 S*/
1317 typedef struct _p_MatPartitioning* MatPartitioning;
1318 
1319 /*E
1320     MatPartitioningType - String with the name of a PETSc matrix partitioning or the creation function
1321        with an optional dynamic library name, for example
1322        http://www.mcs.anl.gov/petsc/lib.a:partitioningcreate()
1323 
1324    Level: beginner
1325 
1326 .seealso: MatPartitioningCreate(), MatPartitioning
1327 E*/
1328 #define MatPartitioningType char*
1329 #define MATPARTITIONINGCURRENT  "current"
1330 #define MATPARTITIONINGSQUARE   "square"
1331 #define MATPARTITIONINGPARMETIS "parmetis"
1332 #define MATPARTITIONINGCHACO    "chaco"
1333 #define MATPARTITIONINGPARTY    "party"
1334 #define MATPARTITIONINGPTSCOTCH "ptscotch"
1335 
1336 
1337 extern PetscErrorCode  MatPartitioningCreate(MPI_Comm,MatPartitioning*);
1338 extern PetscErrorCode  MatPartitioningSetType(MatPartitioning,const MatPartitioningType);
1339 extern PetscErrorCode  MatPartitioningSetNParts(MatPartitioning,PetscInt);
1340 extern PetscErrorCode  MatPartitioningSetAdjacency(MatPartitioning,Mat);
1341 extern PetscErrorCode  MatPartitioningSetVertexWeights(MatPartitioning,const PetscInt[]);
1342 extern PetscErrorCode  MatPartitioningSetPartitionWeights(MatPartitioning,const PetscReal []);
1343 extern PetscErrorCode  MatPartitioningApply(MatPartitioning,IS*);
1344 extern PetscErrorCode  MatPartitioningDestroy(MatPartitioning*);
1345 
1346 extern PetscErrorCode  MatPartitioningRegister(const char[],const char[],const char[],PetscErrorCode (*)(MatPartitioning));
1347 
1348 /*MC
1349    MatPartitioningRegisterDynamic - Adds a new sparse matrix partitioning to the
1350    matrix package.
1351 
1352    Synopsis:
1353    PetscErrorCode MatPartitioningRegisterDynamic(const char *name_partitioning,const char *path,const char *name_create,PetscErrorCode (*routine_create)(MatPartitioning))
1354 
1355    Not Collective
1356 
1357    Input Parameters:
1358 +  sname - name of partitioning (for example MATPARTITIONINGCURRENT) or parmetis
1359 .  path - location of library where creation routine is
1360 .  name - name of function that creates the partitioning type, a string
1361 -  function - function pointer that creates the partitioning type
1362 
1363    Level: developer
1364 
1365    If dynamic libraries are used, then the fourth input argument (function)
1366    is ignored.
1367 
1368    Sample usage:
1369 .vb
1370    MatPartitioningRegisterDynamic("my_part",/home/username/my_lib/lib/libO/solaris/mylib.a,
1371                "MyPartCreate",MyPartCreate);
1372 .ve
1373 
1374    Then, your partitioner can be chosen with the procedural interface via
1375 $     MatPartitioningSetType(part,"my_part")
1376    or at runtime via the option
1377 $     -mat_partitioning_type my_part
1378 
1379    $PETSC_ARCH occuring in pathname will be replaced with appropriate values.
1380 
1381 .keywords: matrix, partitioning, register
1382 
1383 .seealso: MatPartitioningRegisterDestroy(), MatPartitioningRegisterAll()
1384 M*/
1385 #if defined(PETSC_USE_DYNAMIC_LIBRARIES)
1386 #define MatPartitioningRegisterDynamic(a,b,c,d) MatPartitioningRegister(a,b,c,0)
1387 #else
1388 #define MatPartitioningRegisterDynamic(a,b,c,d) MatPartitioningRegister(a,b,c,d)
1389 #endif
1390 
1391 extern PetscBool  MatPartitioningRegisterAllCalled;
1392 
1393 extern PetscErrorCode  MatPartitioningRegisterAll(const char[]);
1394 extern PetscErrorCode  MatPartitioningRegisterDestroy(void);
1395 
1396 extern PetscErrorCode  MatPartitioningView(MatPartitioning,PetscViewer);
1397 extern PetscErrorCode  MatPartitioningSetFromOptions(MatPartitioning);
1398 extern PetscErrorCode  MatPartitioningGetType(MatPartitioning,const MatPartitioningType*);
1399 
1400 extern PetscErrorCode  MatPartitioningParmetisSetCoarseSequential(MatPartitioning);
1401 extern PetscErrorCode  MatPartitioningParmetisGetEdgeCut(MatPartitioning, PetscInt *);
1402 
1403 typedef enum { MP_CHACO_MULTILEVEL=1,MP_CHACO_SPECTRAL=2,MP_CHACO_LINEAR=4,MP_CHACO_RANDOM=5,MP_CHACO_SCATTERED=6 } MPChacoGlobalType;
1404 extern const char *MPChacoGlobalTypes[];
1405 typedef enum { MP_CHACO_KERNIGHAN=1,MP_CHACO_NONE=2 } MPChacoLocalType;
1406 extern const char *MPChacoLocalTypes[];
1407 typedef enum { MP_CHACO_LANCZOS=0,MP_CHACO_RQI=1 } MPChacoEigenType;
1408 extern const char *MPChacoEigenTypes[];
1409 
1410 extern PetscErrorCode  MatPartitioningChacoSetGlobal(MatPartitioning,MPChacoGlobalType);
1411 extern PetscErrorCode  MatPartitioningChacoGetGlobal(MatPartitioning,MPChacoGlobalType*);
1412 extern PetscErrorCode  MatPartitioningChacoSetLocal(MatPartitioning,MPChacoLocalType);
1413 extern PetscErrorCode  MatPartitioningChacoGetLocal(MatPartitioning,MPChacoLocalType*);
1414 extern PetscErrorCode  MatPartitioningChacoSetCoarseLevel(MatPartitioning,PetscReal);
1415 extern PetscErrorCode  MatPartitioningChacoSetEigenSolver(MatPartitioning,MPChacoEigenType);
1416 extern PetscErrorCode  MatPartitioningChacoGetEigenSolver(MatPartitioning,MPChacoEigenType*);
1417 extern PetscErrorCode  MatPartitioningChacoSetEigenTol(MatPartitioning,PetscReal);
1418 extern PetscErrorCode  MatPartitioningChacoGetEigenTol(MatPartitioning,PetscReal*);
1419 extern PetscErrorCode  MatPartitioningChacoSetEigenNumber(MatPartitioning,PetscInt);
1420 extern PetscErrorCode  MatPartitioningChacoGetEigenNumber(MatPartitioning,PetscInt*);
1421 
1422 #define MP_PARTY_OPT "opt"
1423 #define MP_PARTY_LIN "lin"
1424 #define MP_PARTY_SCA "sca"
1425 #define MP_PARTY_RAN "ran"
1426 #define MP_PARTY_GBF "gbf"
1427 #define MP_PARTY_GCF "gcf"
1428 #define MP_PARTY_BUB "bub"
1429 #define MP_PARTY_DEF "def"
1430 extern PetscErrorCode  MatPartitioningPartySetGlobal(MatPartitioning,const char*);
1431 #define MP_PARTY_HELPFUL_SETS "hs"
1432 #define MP_PARTY_KERNIGHAN_LIN "kl"
1433 #define MP_PARTY_NONE "no"
1434 extern PetscErrorCode  MatPartitioningPartySetLocal(MatPartitioning,const char*);
1435 extern PetscErrorCode  MatPartitioningPartySetCoarseLevel(MatPartitioning,PetscReal);
1436 extern PetscErrorCode  MatPartitioningPartySetBipart(MatPartitioning,PetscBool);
1437 extern PetscErrorCode  MatPartitioningPartySetMatchOptimization(MatPartitioning,PetscBool);
1438 
1439 typedef enum { MP_PTSCOTCH_QUALITY,MP_PTSCOTCH_SPEED,MP_PTSCOTCH_BALANCE,MP_PTSCOTCH_SAFETY,MP_PTSCOTCH_SCALABILITY } MPPTScotchStrategyType;
1440 extern const char *MPPTScotchStrategyTypes[];
1441 
1442 extern PetscErrorCode MatPartitioningPTScotchSetImbalance(MatPartitioning,PetscReal);
1443 extern PetscErrorCode MatPartitioningPTScotchGetImbalance(MatPartitioning,PetscReal*);
1444 extern PetscErrorCode MatPartitioningPTScotchSetStrategy(MatPartitioning,MPPTScotchStrategyType);
1445 extern PetscErrorCode MatPartitioningPTScotchGetStrategy(MatPartitioning,MPPTScotchStrategyType*);
1446 
1447 extern PetscErrorCode MatMeshToVertexGraph(Mat,PetscInt,Mat*);
1448 extern PetscErrorCode MatMeshToCellGraph(Mat,PetscInt,Mat*);
1449 
1450 /*
1451     If you add entries here you must also add them to finclude/petscmat.h
1452 */
1453 typedef enum { MATOP_SET_VALUES=0,
1454                MATOP_GET_ROW=1,
1455                MATOP_RESTORE_ROW=2,
1456                MATOP_MULT=3,
1457                MATOP_MULT_ADD=4,
1458                MATOP_MULT_TRANSPOSE=5,
1459                MATOP_MULT_TRANSPOSE_ADD=6,
1460                MATOP_SOLVE=7,
1461                MATOP_SOLVE_ADD=8,
1462                MATOP_SOLVE_TRANSPOSE=9,
1463                MATOP_SOLVE_TRANSPOSE_ADD=10,
1464                MATOP_LUFACTOR=11,
1465                MATOP_CHOLESKYFACTOR=12,
1466                MATOP_SOR=13,
1467                MATOP_TRANSPOSE=14,
1468                MATOP_GETINFO=15,
1469                MATOP_EQUAL=16,
1470                MATOP_GET_DIAGONAL=17,
1471                MATOP_DIAGONAL_SCALE=18,
1472                MATOP_NORM=19,
1473                MATOP_ASSEMBLY_BEGIN=20,
1474                MATOP_ASSEMBLY_END=21,
1475                MATOP_SET_OPTION=22,
1476                MATOP_ZERO_ENTRIES=23,
1477                MATOP_ZERO_ROWS=24,
1478                MATOP_LUFACTOR_SYMBOLIC=25,
1479                MATOP_LUFACTOR_NUMERIC=26,
1480                MATOP_CHOLESKY_FACTOR_SYMBOLIC=27,
1481                MATOP_CHOLESKY_FACTOR_NUMERIC=28,
1482                MATOP_SETUP_PREALLOCATION=29,
1483                MATOP_ILUFACTOR_SYMBOLIC=30,
1484                MATOP_ICCFACTOR_SYMBOLIC=31,
1485                MATOP_GET_ARRAY=32,
1486                MATOP_RESTORE_ARRAY=33,
1487                MATOP_DUPLICATE=34,
1488                MATOP_FORWARD_SOLVE=35,
1489                MATOP_BACKWARD_SOLVE=36,
1490                MATOP_ILUFACTOR=37,
1491                MATOP_ICCFACTOR=38,
1492                MATOP_AXPY=39,
1493                MATOP_GET_SUBMATRICES=40,
1494                MATOP_INCREASE_OVERLAP=41,
1495                MATOP_GET_VALUES=42,
1496                MATOP_COPY=43,
1497                MATOP_GET_ROW_MAX=44,
1498                MATOP_SCALE=45,
1499                MATOP_SHIFT=46,
1500                MATOP_DIAGONAL_SET=47,
1501                MATOP_ILUDT_FACTOR=48,
1502                MATOP_SET_BLOCK_SIZE=49,
1503                MATOP_GET_ROW_IJ=50,
1504                MATOP_RESTORE_ROW_IJ=51,
1505                MATOP_GET_COLUMN_IJ=52,
1506                MATOP_RESTORE_COLUMN_IJ=53,
1507                MATOP_FDCOLORING_CREATE=54,
1508                MATOP_COLORING_PATCH=55,
1509                MATOP_SET_UNFACTORED=56,
1510                MATOP_PERMUTE=57,
1511                MATOP_SET_VALUES_BLOCKED=58,
1512                MATOP_GET_SUBMATRIX=59,
1513                MATOP_DESTROY=60,
1514                MATOP_VIEW=61,
1515                MATOP_CONVERT_FROM=62,
1516                MATOP_USE_SCALED_FORM=63,
1517                MATOP_SCALE_SYSTEM=64,
1518                MATOP_UNSCALE_SYSTEM=65,
1519                MATOP_SET_LOCAL_TO_GLOBAL_MAP=66,
1520                MATOP_SET_VALUES_LOCAL=67,
1521                MATOP_ZERO_ROWS_LOCAL=68,
1522                MATOP_GET_ROW_MAX_ABS=69,
1523                MATOP_GET_ROW_MIN_ABS=70,
1524                MATOP_CONVERT=71,
1525                MATOP_SET_COLORING=72,
1526                MATOP_SET_VALUES_ADIC=73,
1527                MATOP_SET_VALUES_ADIFOR=74,
1528                MATOP_FD_COLORING_APPLY=75,
1529                MATOP_SET_FROM_OPTIONS=76,
1530                MATOP_MULT_CON=77,
1531                MATOP_MULT_TRANSPOSE_CON=78,
1532                MATOP_PERMUTE_SPARSIFY=79,
1533                MATOP_MULT_MULTIPLE=80,
1534                MATOP_SOLVE_MULTIPLE=81,
1535                MATOP_GET_INERTIA=82,
1536                MATOP_LOAD=83,
1537                MATOP_IS_SYMMETRIC=84,
1538                MATOP_IS_HERMITIAN=85,
1539                MATOP_IS_STRUCTURALLY_SYMMETRIC=86,
1540                MATOP_DUMMY=87,
1541                MATOP_GET_VECS=88,
1542                MATOP_MAT_MULT=89,
1543                MATOP_MAT_MULT_SYMBOLIC=90,
1544                MATOP_MAT_MULT_NUMERIC=91,
1545                MATOP_PTAP=92,
1546                MATOP_PTAP_SYMBOLIC=93,
1547                MATOP_PTAP_NUMERIC=94,
1548                MATOP_MAT_MULTTRANSPOSE=95,
1549                MATOP_MAT_MULTTRANSPOSE_SYM=96,
1550                MATOP_MAT_MULTTRANSPOSE_NUM=97,
1551                MATOP_PTAP_SYMBOLIC_SEQAIJ=98,
1552                MATOP_PTAP_NUMERIC_SEQAIJ=99,
1553                MATOP_PTAP_SYMBOLIC_MPIAIJ=100,
1554                MATOP_PTAP_NUMERIC_MPIAIJ=101,
1555                MATOP_CONJUGATE=102,
1556                MATOP_SET_SIZES=103,
1557                MATOP_SET_VALUES_ROW=104,
1558                MATOP_REAL_PART=105,
1559                MATOP_IMAG_PART=106,
1560                MATOP_GET_ROW_UTRIANGULAR=107,
1561                MATOP_RESTORE_ROW_UTRIANGULAR=108,
1562                MATOP_MATSOLVE=109,
1563                MATOP_GET_REDUNDANTMATRIX=110,
1564                MATOP_GET_ROW_MIN=111,
1565                MATOP_GET_COLUMN_VEC=112,
1566                MATOP_MISSING_DIAGONAL=113,
1567                MATOP_MATGETSEQNONZEROSTRUCTURE=114,
1568                MATOP_CREATE=115,
1569                MATOP_GET_GHOSTS=116,
1570                MATOP_GET_LOCALSUBMATRIX=117,
1571                MATOP_RESTORE_LOCALSUBMATRIX=118,
1572                MATOP_MULT_DIAGONAL_BLOCK=119,
1573                MATOP_HERMITIANTRANSPOSE=120,
1574                MATOP_MULTHERMITIANTRANSPOSE=121,
1575                MATOP_MULTHERMITIANTRANSPOSEADD=122,
1576                MATOP_GETMULTIPROCBLOCK=123,
1577                MATOP_GETCOLUMNNORMS=125,
1578 	       MATOP_GET_SUBMATRICES_PARALLEL=128,
1579                MATOP_SET_VALUES_BATCH=129
1580              } MatOperation;
1581 extern PetscErrorCode  MatHasOperation(Mat,MatOperation,PetscBool *);
1582 extern PetscErrorCode  MatShellSetOperation(Mat,MatOperation,void(*)(void));
1583 extern PetscErrorCode  MatShellGetOperation(Mat,MatOperation,void(**)(void));
1584 extern PetscErrorCode  MatShellSetContext(Mat,void*);
1585 
1586 /*
1587    Codes for matrices stored on disk. By default they are
1588    stored in a universal format. By changing the format with
1589    PetscViewerSetFormat(viewer,PETSC_VIEWER_NATIVE); the matrices will
1590    be stored in a way natural for the matrix, for example dense matrices
1591    would be stored as dense. Matrices stored this way may only be
1592    read into matrices of the same type.
1593 */
1594 #define MATRIX_BINARY_FORMAT_DENSE -1
1595 
1596 extern PetscErrorCode  MatMPIBAIJSetHashTableFactor(Mat,PetscReal);
1597 extern PetscErrorCode  MatISGetLocalMat(Mat,Mat*);
1598 
1599 /*S
1600      MatNullSpace - Object that removes a null space from a vector, i.e.
1601          orthogonalizes the vector to a subsapce
1602 
1603    Level: advanced
1604 
1605   Concepts: matrix; linear operator, null space
1606 
1607   Users manual sections:
1608 .   sec_singular
1609 
1610 .seealso:  MatNullSpaceCreate()
1611 S*/
1612 typedef struct _p_MatNullSpace* MatNullSpace;
1613 
1614 extern PetscErrorCode  MatNullSpaceCreate(MPI_Comm,PetscBool ,PetscInt,const Vec[],MatNullSpace*);
1615 extern PetscErrorCode  MatNullSpaceSetFunction(MatNullSpace,PetscErrorCode (*)(MatNullSpace,Vec,void*),void*);
1616 extern PetscErrorCode  MatNullSpaceDestroy(MatNullSpace*);
1617 extern PetscErrorCode  MatNullSpaceRemove(MatNullSpace,Vec,Vec*);
1618 extern PetscErrorCode  MatNullSpaceAttach(Mat,MatNullSpace);
1619 extern PetscErrorCode  MatNullSpaceTest(MatNullSpace,Mat,PetscBool  *);
1620 extern PetscErrorCode  MatNullSpaceView(MatNullSpace,PetscViewer);
1621 
1622 extern PetscErrorCode  MatReorderingSeqSBAIJ(Mat,IS);
1623 extern PetscErrorCode  MatMPISBAIJSetHashTableFactor(Mat,PetscReal);
1624 extern PetscErrorCode  MatSeqSBAIJSetColumnIndices(Mat,PetscInt *);
1625 extern PetscErrorCode  MatSeqBAIJInvertBlockDiagonal(Mat);
1626 
1627 extern PetscErrorCode  MatCreateMAIJ(Mat,PetscInt,Mat*);
1628 extern PetscErrorCode  MatMAIJRedimension(Mat,PetscInt,Mat*);
1629 extern PetscErrorCode  MatMAIJGetAIJ(Mat,Mat*);
1630 
1631 extern PetscErrorCode  MatComputeExplicitOperator(Mat,Mat*);
1632 
1633 extern PetscErrorCode  MatDiagonalScaleLocal(Mat,Vec);
1634 
1635 extern PetscErrorCode  MatCreateMFFD(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,Mat*);
1636 extern PetscErrorCode  MatMFFDSetBase(Mat,Vec,Vec);
1637 extern PetscErrorCode  MatMFFDSetFunction(Mat,PetscErrorCode(*)(void*,Vec,Vec),void*);
1638 extern PetscErrorCode  MatMFFDSetFunctioni(Mat,PetscErrorCode (*)(void*,PetscInt,Vec,PetscScalar*));
1639 extern PetscErrorCode  MatMFFDSetFunctioniBase(Mat,PetscErrorCode (*)(void*,Vec));
1640 extern PetscErrorCode  MatMFFDAddNullSpace(Mat,MatNullSpace);
1641 extern PetscErrorCode  MatMFFDSetHHistory(Mat,PetscScalar[],PetscInt);
1642 extern PetscErrorCode  MatMFFDResetHHistory(Mat);
1643 extern PetscErrorCode  MatMFFDSetFunctionError(Mat,PetscReal);
1644 extern PetscErrorCode  MatMFFDSetPeriod(Mat,PetscInt);
1645 extern PetscErrorCode  MatMFFDGetH(Mat,PetscScalar *);
1646 extern PetscErrorCode  MatMFFDSetOptionsPrefix(Mat,const char[]);
1647 extern PetscErrorCode  MatMFFDCheckPositivity(void*,Vec,Vec,PetscScalar*);
1648 extern PetscErrorCode  MatMFFDSetCheckh(Mat,PetscErrorCode (*)(void*,Vec,Vec,PetscScalar*),void*);
1649 
1650 /*S
1651     MatMFFD - A data structured used to manage the computation of the h differencing parameter for matrix-free
1652               Jacobian vector products
1653 
1654     Notes: MATMFFD is a specific MatType which uses the MatMFFD data structure
1655 
1656            MatMFFD*() methods actually take the Mat as their first argument. Not a MatMFFD data structure
1657 
1658     Level: developer
1659 
1660 .seealso: MATMFFD, MatCreateMFFD(), MatMFFDSetFuction(), MatMFFDSetType(), MatMFFDRegister()
1661 S*/
1662 typedef struct _p_MatMFFD* MatMFFD;
1663 
1664 /*E
1665     MatMFFDType - algorithm used to compute the h used in computing matrix-vector products via differencing of the function
1666 
1667    Level: beginner
1668 
1669 .seealso: MatMFFDSetType(), MatMFFDRegister()
1670 E*/
1671 #define MatMFFDType char*
1672 #define MATMFFD_DS  "ds"
1673 #define MATMFFD_WP  "wp"
1674 
1675 extern PetscErrorCode  MatMFFDSetType(Mat,const MatMFFDType);
1676 extern PetscErrorCode  MatMFFDRegister(const char[],const char[],const char[],PetscErrorCode (*)(MatMFFD));
1677 
1678 /*MC
1679    MatMFFDRegisterDynamic - Adds a method to the MatMFFD registry.
1680 
1681    Synopsis:
1682    PetscErrorCode MatMFFDRegisterDynamic(const char *name_solver,const char *path,const char *name_create,PetscErrorCode (*routine_create)(MatMFFD))
1683 
1684    Not Collective
1685 
1686    Input Parameters:
1687 +  name_solver - name of a new user-defined compute-h module
1688 .  path - path (either absolute or relative) the library containing this solver
1689 .  name_create - name of routine to create method context
1690 -  routine_create - routine to create method context
1691 
1692    Level: developer
1693 
1694    Notes:
1695    MatMFFDRegisterDynamic() may be called multiple times to add several user-defined solvers.
1696 
1697    If dynamic libraries are used, then the fourth input argument (routine_create)
1698    is ignored.
1699 
1700    Sample usage:
1701 .vb
1702    MatMFFDRegisterDynamic("my_h",/home/username/my_lib/lib/libO/solaris/mylib.a,
1703                "MyHCreate",MyHCreate);
1704 .ve
1705 
1706    Then, your solver can be chosen with the procedural interface via
1707 $     MatMFFDSetType(mfctx,"my_h")
1708    or at runtime via the option
1709 $     -snes_mf_type my_h
1710 
1711 .keywords: MatMFFD, register
1712 
1713 .seealso: MatMFFDRegisterAll(), MatMFFDRegisterDestroy()
1714 M*/
1715 #if defined(PETSC_USE_DYNAMIC_LIBRARIES)
1716 #define MatMFFDRegisterDynamic(a,b,c,d) MatMFFDRegister(a,b,c,0)
1717 #else
1718 #define MatMFFDRegisterDynamic(a,b,c,d) MatMFFDRegister(a,b,c,d)
1719 #endif
1720 
1721 extern PetscErrorCode  MatMFFDRegisterAll(const char[]);
1722 extern PetscErrorCode  MatMFFDRegisterDestroy(void);
1723 extern PetscErrorCode  MatMFFDDSSetUmin(Mat,PetscReal);
1724 extern PetscErrorCode  MatMFFDWPSetComputeNormU(Mat,PetscBool );
1725 
1726 
1727 extern PetscErrorCode  PetscViewerMathematicaPutMatrix(PetscViewer, PetscInt, PetscInt, PetscReal *);
1728 extern PetscErrorCode  PetscViewerMathematicaPutCSRMatrix(PetscViewer, PetscInt, PetscInt, PetscInt *, PetscInt *, PetscReal *);
1729 
1730 /*
1731    PETSc interface to MUMPS
1732 */
1733 #ifdef PETSC_HAVE_MUMPS
1734 extern PetscErrorCode  MatMumpsSetIcntl(Mat,PetscInt,PetscInt);
1735 #endif
1736 
1737 /*
1738    PETSc interface to SUPERLU
1739 */
1740 #ifdef PETSC_HAVE_SUPERLU
1741 extern PetscErrorCode  MatSuperluSetILUDropTol(Mat,PetscReal);
1742 #endif
1743 
1744 #if defined(PETSC_HAVE_CUSP)
1745 extern PetscErrorCode  MatCreateSeqAIJCUSP(MPI_Comm,PetscInt,PetscInt,PetscInt,const PetscInt[],Mat*);
1746 extern PetscErrorCode  MatCreateMPIAIJCUSP(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt,const PetscInt[],PetscInt,const PetscInt[],Mat*);
1747 #endif
1748 
1749 /*
1750    PETSc interface to FFTW
1751 */
1752 #if defined(PETSC_HAVE_FFTW)
1753 extern PetscErrorCode VecScatterPetscToFFTW(Mat,Vec,Vec);
1754 extern PetscErrorCode VecScatterFFTWToPetsc(Mat,Vec,Vec);
1755 extern PetscErrorCode MatGetVecsFFTW(Mat,Vec*,Vec*,Vec*);
1756 #endif
1757 
1758 extern PetscErrorCode MatCreateNest(MPI_Comm,PetscInt,const IS[],PetscInt,const IS[],const Mat[],Mat*);
1759 extern PetscErrorCode MatNestGetSize(Mat,PetscInt*,PetscInt*);
1760 extern PetscErrorCode MatNestGetSubMats(Mat,PetscInt*,PetscInt*,Mat***);
1761 extern PetscErrorCode MatNestGetSubMat(Mat,PetscInt,PetscInt,Mat*);
1762 extern PetscErrorCode MatNestSetVecType(Mat,const VecType);
1763 extern PetscErrorCode MatNestSetSubMats(Mat,PetscInt,const IS[],PetscInt,const IS[],const Mat[]);
1764 extern PetscErrorCode MatNestSetSubMat(Mat,PetscInt,PetscInt,Mat);
1765 
1766 PETSC_EXTERN_CXX_END
1767 #endif
1768