xref: /petsc/include/petscmat.h (revision 089b283744364aef00a310a92368c00bc3aa30b8)
1 /*
2      Include file for the matrix component of PETSc
3 */
4 #ifndef __PETSCMAT_H
5 #define __PETSCMAT_H
6 #include "petscvec.h"
7 PETSC_EXTERN_CXX_BEGIN
8 
9 /*S
10      Mat - Abstract PETSc matrix object
11 
12    Level: beginner
13 
14   Concepts: matrix; linear operator
15 
16 .seealso:  MatCreate(), MatType, MatSetType()
17 S*/
18 typedef struct _p_Mat*           Mat;
19 
20 /*E
21     MatType - String with the name of a PETSc matrix or the creation function
22        with an optional dynamic library name, for example
23        http://www.mcs.anl.gov/petsc/lib.a:mymatcreate()
24 
25    Level: beginner
26 
27 .seealso: MatSetType(), Mat, MatSolverPackage
28 E*/
29 #define MatType char*
30 #define MATSAME            "same"
31 #define MATMAIJ            "maij"
32 #define MATSEQMAIJ         "seqmaij"
33 #define MATMPIMAIJ         "mpimaij"
34 #define MATIS              "is"
35 #define MATAIJ             "aij"
36 #define MATSEQAIJ          "seqaij"
37 #define MATMPIAIJ          "mpiaij"
38 #define MATAIJCRL          "aijcrl"
39 #define MATSEQAIJCRL       "seqaijcrl"
40 #define MATMPIAIJCRL       "mpiaijcrl"
41 #define MATAIJCUSP         "aijcusp"
42 #define MATSEQAIJCUSP      "seqaijcusp"
43 #define MATMPIAIJCUSP      "mpiaijcusp"
44 #define MATAIJPERM         "aijperm"
45 #define MATSEQAIJPERM      "seqaijperm"
46 #define MATMPIAIJPERM      "mpiaijperm"
47 #define MATSHELL           "shell"
48 #define MATDENSE           "dense"
49 #define MATSEQDENSE        "seqdense"
50 #define MATMPIDENSE        "mpidense"
51 #define MATBAIJ            "baij"
52 #define MATSEQBAIJ         "seqbaij"
53 #define MATMPIBAIJ         "mpibaij"
54 #define MATMPIADJ          "mpiadj"
55 #define MATSBAIJ           "sbaij"
56 #define MATSEQSBAIJ        "seqsbaij"
57 #define MATMPISBAIJ        "mpisbaij"
58 
59 #define MATSEQBSTRM        "seqbstrm"
60 #define MATMPIBSTRM        "mpibstrm"
61 #define MATBSTRM           "bstrm"
62 #define MATSEQSBSTRM       "seqsbstrm"
63 #define MATMPISBSTRM       "mpisbstrm"
64 #define MATSBSTRM          "sbstrm"
65 
66 #define MATDAAD            "daad"
67 #define MATMFFD            "mffd"
68 #define MATNORMAL          "normal"
69 #define MATLRC             "lrc"
70 #define MATSCATTER         "scatter"
71 #define MATBLOCKMAT        "blockmat"
72 #define MATCOMPOSITE       "composite"
73 #define MATFFT             "fft"
74 #define MATFFTW            "fftw"
75 #define MATSEQCUFFT        "seqcufft"
76 #define MATTRANSPOSEMAT    "transpose"
77 #define MATSCHURCOMPLEMENT "schurcomplement"
78 #define MATPYTHON          "python"
79 #define MATHYPRESTRUCT     "hyprestruct"
80 #define MATHYPRESSTRUCT    "hypresstruct"
81 #define MATSUBMATRIX       "submatrix"
82 #define MATLOCALREF        "localref"
83 #define MATNEST            "nest"
84 
85 /*E
86     MatSolverPackage - String with the name of a PETSc matrix solver type.
87 
88     For example: "petsc" indicates what PETSc provides, "superlu" indicates either
89        SuperLU or SuperLU_Dist etc.
90 
91 
92    Level: beginner
93 
94 .seealso: MatGetFactor(), Mat, MatSetType(), MatType
95 E*/
96 #define MatSolverPackage char*
97 #define MATSOLVERSPOOLES      "spooles"
98 #define MATSOLVERSUPERLU      "superlu"
99 #define MATSOLVERSUPERLU_DIST "superlu_dist"
100 #define MATSOLVERUMFPACK      "umfpack"
101 #define MATSOLVERCHOLMOD      "cholmod"
102 #define MATSOLVERESSL         "essl"
103 #define MATSOLVERLUSOL        "lusol"
104 #define MATSOLVERMUMPS        "mumps"
105 #define MATSOLVERPASTIX       "pastix"
106 #define MATSOLVERDSCPACK      "dscpack"
107 #define MATSOLVERMATLAB       "matlab"
108 #define MATSOLVERPETSC        "petsc"
109 #define MATSOLVERPLAPACK      "plapack"
110 #define MATSOLVERBAS          "bas"
111 
112 #define MATSOLVERBSTRM        "bstrm"
113 #define MATSOLVERSBSTRM       "sbstrm"
114 
115 /*E
116     MatFactorType - indicates what type of factorization is requested
117 
118     Level: beginner
119 
120    Any additions/changes here MUST also be made in include/finclude/petscmat.h
121 
122 .seealso: MatSolverPackage, MatGetFactor()
123 E*/
124 typedef enum {MAT_FACTOR_NONE, MAT_FACTOR_LU, MAT_FACTOR_CHOLESKY, MAT_FACTOR_ILU, MAT_FACTOR_ICC,MAT_FACTOR_ILUDT} MatFactorType;
125 extern const char *const MatFactorTypes[];
126 
127 extern PetscErrorCode  MatGetFactor(Mat,const MatSolverPackage,MatFactorType,Mat*);
128 extern PetscErrorCode  MatGetFactorAvailable(Mat,const MatSolverPackage,MatFactorType,PetscBool *);
129 extern PetscErrorCode  MatFactorGetSolverPackage(Mat,const MatSolverPackage*);
130 extern PetscErrorCode  MatGetFactorType(Mat,MatFactorType*);
131 
132 /* Logging support */
133 #define    MAT_FILE_CLASSID 1211216    /* used to indicate matrices in binary files */
134 extern PetscClassId  MAT_CLASSID;
135 extern PetscClassId  MAT_FDCOLORING_CLASSID;
136 extern PetscClassId  MAT_PARTITIONING_CLASSID;
137 extern PetscClassId  MAT_NULLSPACE_CLASSID;
138 extern PetscClassId  MATMFFD_CLASSID;
139 
140 /*E
141     MatReuse - Indicates if matrices obtained from a previous call to MatGetSubMatrices()
142      or MatGetSubMatrix() are to be reused to store the new matrix values. For MatConvert() is used to indicate
143      that the input matrix is to be replaced with the converted matrix.
144 
145     Level: beginner
146 
147    Any additions/changes here MUST also be made in include/finclude/petscmat.h
148 
149 .seealso: MatGetSubMatrices(), MatGetSubMatrix(), MatDestroyMatrices(), MatConvert()
150 E*/
151 typedef enum {MAT_INITIAL_MATRIX,MAT_REUSE_MATRIX,MAT_IGNORE_MATRIX} MatReuse;
152 
153 /*E
154     MatGetSubMatrixOption - Indicates if matrices obtained from a call to MatGetSubMatrices()
155      include the matrix values. Currently it is only used by MatGetSeqNonzerostructure().
156 
157     Level: beginner
158 
159 .seealso: MatGetSeqNonzerostructure()
160 E*/
161 typedef enum {MAT_DO_NOT_GET_VALUES,MAT_GET_VALUES} MatGetSubMatrixOption;
162 
163 extern PetscErrorCode  MatInitializePackage(const char[]);
164 
165 extern PetscErrorCode  MatCreate(MPI_Comm,Mat*);
166 PetscPolymorphicFunction(MatCreate,(MPI_Comm comm),(comm,&A),Mat,A)
167 PetscPolymorphicFunction(MatCreate,(),(PETSC_COMM_WORLD,&A),Mat,A)
168 extern PetscErrorCode  MatSetSizes(Mat,PetscInt,PetscInt,PetscInt,PetscInt);
169 extern PetscErrorCode  MatSetType(Mat,const MatType);
170 extern PetscErrorCode  MatSetFromOptions(Mat);
171 extern PetscErrorCode  MatSetUpPreallocation(Mat);
172 extern PetscErrorCode  MatRegisterAll(const char[]);
173 extern PetscErrorCode  MatRegister(const char[],const char[],const char[],PetscErrorCode(*)(Mat));
174 extern PetscErrorCode  MatRegisterBaseName(const char[],const char[],const char[]);
175 extern PetscErrorCode  MatSetOptionsPrefix(Mat,const char[]);
176 extern PetscErrorCode  MatAppendOptionsPrefix(Mat,const char[]);
177 extern PetscErrorCode  MatGetOptionsPrefix(Mat,const char*[]);
178 
179 /*MC
180    MatRegisterDynamic - Adds a new matrix type
181 
182    Synopsis:
183    PetscErrorCode MatRegisterDynamic(const char *name,const char *path,const char *name_create,PetscErrorCode (*routine_create)(Mat))
184 
185    Not Collective
186 
187    Input Parameters:
188 +  name - name of a new user-defined matrix type
189 .  path - path (either absolute or relative) the library containing this solver
190 .  name_create - name of routine to create method context
191 -  routine_create - routine to create method context
192 
193    Notes:
194    MatRegisterDynamic() may be called multiple times to add several user-defined solvers.
195 
196    If dynamic libraries are used, then the fourth input argument (routine_create)
197    is ignored.
198 
199    Sample usage:
200 .vb
201    MatRegisterDynamic("my_mat",/home/username/my_lib/lib/libO/solaris/mylib.a,
202                "MyMatCreate",MyMatCreate);
203 .ve
204 
205    Then, your solver can be chosen with the procedural interface via
206 $     MatSetType(Mat,"my_mat")
207    or at runtime via the option
208 $     -mat_type my_mat
209 
210    Level: advanced
211 
212    Notes: ${PETSC_ARCH} occuring in pathname will be replaced with appropriate values.
213          If your function is not being put into a shared library then use VecRegister() instead
214 
215 .keywords: Mat, register
216 
217 .seealso: MatRegisterAll(), MatRegisterDestroy()
218 
219 M*/
220 #if defined(PETSC_USE_DYNAMIC_LIBRARIES)
221 #define MatRegisterDynamic(a,b,c,d) MatRegister(a,b,c,0)
222 #else
223 #define MatRegisterDynamic(a,b,c,d) MatRegister(a,b,c,d)
224 #endif
225 
226 extern PetscBool  MatRegisterAllCalled;
227 extern PetscFList MatList;
228 extern PetscFList MatColoringList;
229 extern PetscFList MatPartitioningList;
230 
231 /*E
232     MatStructure - Indicates if the matrix has the same nonzero structure
233 
234     Level: beginner
235 
236    Any additions/changes here MUST also be made in include/finclude/petscmat.h
237 
238 .seealso: MatCopy(), KSPSetOperators(), PCSetOperators()
239 E*/
240 typedef enum {SAME_NONZERO_PATTERN,DIFFERENT_NONZERO_PATTERN,SAME_PRECONDITIONER,SUBSET_NONZERO_PATTERN} MatStructure;
241 
242 extern PetscErrorCode  MatCreateSeqDense(MPI_Comm,PetscInt,PetscInt,PetscScalar[],Mat*);
243 extern PetscErrorCode  MatCreateMPIDense(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,PetscScalar[],Mat*);
244 extern PetscErrorCode  MatCreateSeqAIJ(MPI_Comm,PetscInt,PetscInt,PetscInt,const PetscInt[],Mat*);
245 PetscPolymorphicFunction(MatCreateSeqAIJ,(PetscInt m,PetscInt n,PetscInt nz,const PetscInt nnz[]),(PETSC_COMM_SELF,m,n,nz,nnz,&A),Mat,A)
246 PetscPolymorphicFunction(MatCreateSeqAIJ,(PetscInt m,PetscInt n,PetscInt nz),(PETSC_COMM_SELF,m,n,nz,PETSC_NULL,&A),Mat,A)
247 PetscPolymorphicFunction(MatCreateSeqAIJ,(PetscInt m,PetscInt n,const PetscInt nnz[]),(PETSC_COMM_SELF,m,n,0,nnz,&A),Mat,A)
248 PetscPolymorphicFunction(MatCreateSeqAIJ,(PetscInt m,PetscInt n),(PETSC_COMM_SELF,m,n,0,PETSC_NULL,&A),Mat,A)
249 PetscPolymorphicSubroutine(MatCreateSeqAIJ,(PetscInt m,PetscInt n,PetscInt nz,Mat *A),(PETSC_COMM_SELF,m,n,nz,PETSC_NULL,A))
250 PetscPolymorphicSubroutine(MatCreateSeqAIJ,(PetscInt m,PetscInt n,const PetscInt nnz[],Mat *A),(PETSC_COMM_SELF,m,n,0,nnz,A))
251 PetscPolymorphicSubroutine(MatCreateSeqAIJ,(PetscInt m,PetscInt n,Mat *A),(PETSC_COMM_SELF,m,n,0,PETSC_NULL,A))
252 extern PetscErrorCode  MatCreateMPIAIJ(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt,const PetscInt[],PetscInt,const PetscInt[],Mat*);
253 PetscPolymorphicFunction(MatCreateMPIAIJ,(MPI_Comm comm,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,const PetscInt nnz[],PetscInt onz,const PetscInt onnz[]),(comm,m,n,M,N,nz,nnz,onz,onnz,&A),Mat,A)
254 PetscPolymorphicFunction(MatCreateMPIAIJ,(MPI_Comm comm,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,PetscInt nnz),(comm,m,n,M,N,nz,PETSC_NULL,nnz,PETSC_NULL,&A),Mat,A)
255 PetscPolymorphicFunction(MatCreateMPIAIJ,(MPI_Comm comm,PetscInt m,PetscInt n,PetscInt M,PetscInt N,const PetscInt nnz[],const PetscInt onz[]),(comm,m,n,M,N,0,nnz,0,onz,&A),Mat,A)
256 PetscPolymorphicFunction(MatCreateMPIAIJ,(MPI_Comm comm,PetscInt m,PetscInt n,PetscInt M,PetscInt N),(comm,m,n,M,N,0,PETSC_NULL,0,PETSC_NULL,&A),Mat,A)
257 PetscPolymorphicSubroutine(MatCreateMPIAIJ,(MPI_Comm comm,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,PetscInt nnz,Mat *A),(comm,m,n,M,N,nz,PETSC_NULL,nnz,PETSC_NULL,A))
258 PetscPolymorphicSubroutine(MatCreateMPIAIJ,(MPI_Comm comm,PetscInt m,PetscInt n,PetscInt M,PetscInt N,const PetscInt nnz[],const PetscInt onz[],Mat *A),(comm,m,n,M,N,0,nnz,0,onz,A))
259 PetscPolymorphicSubroutine(MatCreateMPIAIJ,(MPI_Comm comm,PetscInt m,PetscInt n,PetscInt M,PetscInt N,Mat *A),(comm,m,n,M,N,0,PETSC_NULL,0,PETSC_NULL,A))
260 PetscPolymorphicFunction(MatCreateMPIAIJ,(PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,const PetscInt nnz[],PetscInt onz,const PetscInt onnz[]),(PETSC_COMM_WORLD,m,n,M,N,nz,nnz,onz,onnz,&A),Mat,A)
261 PetscPolymorphicFunction(MatCreateMPIAIJ,(PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,PetscInt nnz),(PETSC_COMM_WORLD,m,n,M,N,nz,PETSC_NULL,nnz,PETSC_NULL,&A),Mat,A)
262 PetscPolymorphicFunction(MatCreateMPIAIJ,(PetscInt m,PetscInt n,PetscInt M,PetscInt N,const PetscInt nnz[],const PetscInt onz[]),(PETSC_COMM_WORLD,m,n,M,N,0,nnz,0,onz,&A),Mat,A)
263 PetscPolymorphicFunction(MatCreateMPIAIJ,(PetscInt m,PetscInt n,PetscInt M,PetscInt N),(PETSC_COMM_WORLD,m,n,M,N,0,PETSC_NULL,0,PETSC_NULL,&A),Mat,A)
264 PetscPolymorphicSubroutine(MatCreateMPIAIJ,(PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,PetscInt nnz,Mat *A),(PETSC_COMM_WORLD,m,n,M,N,nz,PETSC_NULL,nnz,PETSC_NULL,A))
265 PetscPolymorphicSubroutine(MatCreateMPIAIJ,(PetscInt m,PetscInt n,PetscInt M,PetscInt N,const PetscInt nnz[],const PetscInt onz[],Mat *A),(PETSC_COMM_WORLD,m,n,M,N,0,nnz,0,onz,A))
266 PetscPolymorphicSubroutine(MatCreateMPIAIJ,(PetscInt m,PetscInt n,PetscInt M,PetscInt N,Mat *A),(PETSC_COMM_WORLD,m,n,M,N,0,PETSC_NULL,0,PETSC_NULL,A))
267 extern PetscErrorCode  MatCreateMPIAIJWithArrays(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,const PetscInt[],const PetscInt[],const PetscScalar[],Mat *);
268 extern PetscErrorCode  MatCreateMPIAIJWithSplitArrays(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt[],PetscInt[],PetscScalar[],PetscInt[],PetscInt[],PetscScalar[],Mat*);
269 
270 extern PetscErrorCode  MatCreateSeqBAIJ(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,const PetscInt[],Mat*);
271 PetscPolymorphicFunction(MatCreateSeqBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt nz,const PetscInt nnz[]),(PETSC_COMM_SELF,bs,m,n,nz,nnz,&A),Mat,A)
272 PetscPolymorphicFunction(MatCreateSeqBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt nz),(PETSC_COMM_SELF,bs,m,n,nz,PETSC_NULL,&A),Mat,A)
273 PetscPolymorphicFunction(MatCreateSeqBAIJ,(PetscInt bs,PetscInt m,PetscInt n,const PetscInt nnz[]),(PETSC_COMM_SELF,bs,m,n,0,nnz,&A),Mat,A)
274 PetscPolymorphicFunction(MatCreateSeqBAIJ,(PetscInt bs,PetscInt m,PetscInt n),(PETSC_COMM_SELF,bs,m,n,0,PETSC_NULL,&A),Mat,A)
275 PetscPolymorphicSubroutine(MatCreateSeqBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt nz,Mat *A),(PETSC_COMM_SELF,bs,m,n,nz,PETSC_NULL,A))
276 PetscPolymorphicSubroutine(MatCreateSeqBAIJ,(PetscInt bs,PetscInt m,PetscInt n,const PetscInt nnz[],Mat *A),(PETSC_COMM_SELF,bs,m,n,0,nnz,A))
277 PetscPolymorphicSubroutine(MatCreateSeqBAIJ,(PetscInt bs,PetscInt m,PetscInt n,Mat *A),(PETSC_COMM_SELF,bs,m,n,0,PETSC_NULL,A))
278 extern PetscErrorCode  MatCreateMPIBAIJ(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt,const PetscInt[],PetscInt,const PetscInt[],Mat*);
279 PetscPolymorphicFunction(MatCreateMPIBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,const PetscInt nnz[],PetscInt onz,const PetscInt onnz[]),(comm,bs,m,n,M,N,nz,nnz,onz,onnz,&A),Mat,A)
280 PetscPolymorphicFunction(MatCreateMPIBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,PetscInt nnz),(comm,bs,m,n,M,N,nz,PETSC_NULL,nnz,PETSC_NULL,&A),Mat,A)
281 PetscPolymorphicFunction(MatCreateMPIBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,const PetscInt nnz[],const PetscInt onz[]),(comm,bs,m,n,M,N,0,nnz,0,onz,&A),Mat,A)
282 PetscPolymorphicFunction(MatCreateMPIBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N),(comm,bs,m,n,M,N,0,PETSC_NULL,0,PETSC_NULL,&A),Mat,A)
283 PetscPolymorphicSubroutine(MatCreateMPIBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,PetscInt nnz,Mat *A),(comm,bs,m,n,M,N,nz,PETSC_NULL,nnz,PETSC_NULL,A))
284 PetscPolymorphicSubroutine(MatCreateMPIBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,const PetscInt nnz[],const PetscInt onz[],Mat *A),(comm,bs,m,n,M,N,0,nnz,0,onz,A))
285 PetscPolymorphicSubroutine(MatCreateMPIBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,Mat *A),(comm,bs,m,n,M,N,0,PETSC_NULL,0,PETSC_NULL,A))
286 PetscPolymorphicFunction(MatCreateMPIBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,const PetscInt nnz[],PetscInt onz,const PetscInt onnz[]),(PETSC_COMM_WORLD,bs,m,n,M,N,nz,nnz,onz,onnz,&A),Mat,A)
287 PetscPolymorphicFunction(MatCreateMPIBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,PetscInt nnz),(PETSC_COMM_WORLD,bs,m,n,M,N,nz,PETSC_NULL,nnz,PETSC_NULL,&A),Mat,A)
288 PetscPolymorphicFunction(MatCreateMPIBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,const PetscInt nnz[],const PetscInt onz[]),(PETSC_COMM_WORLD,bs,m,n,M,N,0,nnz,0,onz,&A),Mat,A)
289 PetscPolymorphicFunction(MatCreateMPIBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N),(PETSC_COMM_WORLD,bs,m,n,M,N,0,PETSC_NULL,0,PETSC_NULL,&A),Mat,A)
290 PetscPolymorphicSubroutine(MatCreateMPIBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,PetscInt nnz,Mat *A),(PETSC_COMM_WORLD,bs,m,n,M,N,nz,PETSC_NULL,nnz,PETSC_NULL,A))
291 PetscPolymorphicSubroutine(MatCreateMPIBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,const PetscInt nnz[],const PetscInt onz[],Mat *A),(PETSC_COMM_WORLD,bs,m,n,M,N,0,nnz,0,onz,A))
292 PetscPolymorphicSubroutine(MatCreateMPIBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,Mat *A),(PETSC_COMM_WORLD,bs,m,n,M,N,0,PETSC_NULL,0,PETSC_NULL,A))
293 extern PetscErrorCode  MatCreateMPIBAIJWithArrays(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt,const PetscInt[],const PetscInt[],const PetscScalar[],Mat*);
294 
295 extern PetscErrorCode  MatCreateMPIAdj(MPI_Comm,PetscInt,PetscInt,PetscInt[],PetscInt[],PetscInt[],Mat*);
296 extern PetscErrorCode  MatCreateSeqSBAIJ(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,const PetscInt[],Mat*);
297 PetscPolymorphicFunction(MatCreateSeqSBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt nz,const PetscInt nnz[]),(PETSC_COMM_SELF,bs,m,n,nz,nnz,&A),Mat,A)
298 PetscPolymorphicFunction(MatCreateSeqSBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt nz),(PETSC_COMM_SELF,bs,m,n,nz,PETSC_NULL,&A),Mat,A)
299 PetscPolymorphicFunction(MatCreateSeqSBAIJ,(PetscInt bs,PetscInt m,PetscInt n,const PetscInt nnz[]),(PETSC_COMM_SELF,bs,m,n,0,nnz,&A),Mat,A)
300 PetscPolymorphicFunction(MatCreateSeqSBAIJ,(PetscInt bs,PetscInt m,PetscInt n),(PETSC_COMM_SELF,bs,m,n,0,PETSC_NULL,&A),Mat,A)
301 PetscPolymorphicSubroutine(MatCreateSeqSBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt nz,Mat *A),(PETSC_COMM_SELF,bs,m,n,nz,PETSC_NULL,A))
302 PetscPolymorphicSubroutine(MatCreateSeqSBAIJ,(PetscInt bs,PetscInt m,PetscInt n,const PetscInt nnz[],Mat *A),(PETSC_COMM_SELF,bs,m,n,0,nnz,A))
303 PetscPolymorphicSubroutine(MatCreateSeqSBAIJ,(PetscInt bs,PetscInt m,PetscInt n,Mat *A),(PETSC_COMM_SELF,bs,m,n,0,PETSC_NULL,A))
304 
305 extern PetscErrorCode  MatCreateMPISBAIJ(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt,const PetscInt[],PetscInt,const PetscInt[],Mat*);
306 PetscPolymorphicFunction(MatCreateMPISBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,const PetscInt nnz[],PetscInt onz,const PetscInt onnz[]),(comm,bs,m,n,M,N,nz,nnz,onz,onnz,&A),Mat,A)
307 PetscPolymorphicFunction(MatCreateMPISBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,PetscInt nnz),(comm,bs,m,n,M,N,nz,PETSC_NULL,nnz,PETSC_NULL,&A),Mat,A)
308 PetscPolymorphicFunction(MatCreateMPISBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,const PetscInt nnz[],const PetscInt onz[]),(comm,bs,m,n,M,N,0,nnz,0,onz,&A),Mat,A)
309 PetscPolymorphicFunction(MatCreateMPISBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N),(comm,bs,m,n,M,N,0,PETSC_NULL,0,PETSC_NULL,&A),Mat,A)
310 PetscPolymorphicSubroutine(MatCreateMPISBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,PetscInt nnz,Mat *A),(comm,bs,m,n,M,N,nz,PETSC_NULL,nnz,PETSC_NULL,A))
311 PetscPolymorphicSubroutine(MatCreateMPISBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,const PetscInt nnz[],const PetscInt onz[],Mat *A),(comm,bs,m,n,M,N,0,nnz,0,onz,A))
312 PetscPolymorphicSubroutine(MatCreateMPISBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,Mat *A),(comm,bs,m,n,M,N,0,PETSC_NULL,0,PETSC_NULL,A))
313 PetscPolymorphicFunction(MatCreateMPISBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,const PetscInt nnz[],PetscInt onz,const PetscInt onnz[]),(PETSC_COMM_WORLD,bs,m,n,M,N,nz,nnz,onz,onnz,&A),Mat,A)
314 PetscPolymorphicFunction(MatCreateMPISBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,PetscInt nnz),(PETSC_COMM_WORLD,bs,m,n,M,N,nz,PETSC_NULL,nnz,PETSC_NULL,&A),Mat,A)
315 PetscPolymorphicFunction(MatCreateMPISBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,const PetscInt nnz[],const PetscInt onz[]),(PETSC_COMM_WORLD,bs,m,n,M,N,0,nnz,0,onz,&A),Mat,A)
316 PetscPolymorphicFunction(MatCreateMPISBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N),(PETSC_COMM_WORLD,bs,m,n,M,N,0,PETSC_NULL,0,PETSC_NULL,&A),Mat,A)
317 PetscPolymorphicSubroutine(MatCreateMPISBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,PetscInt nnz,Mat *A),(PETSC_COMM_WORLD,bs,m,n,M,N,nz,PETSC_NULL,nnz,PETSC_NULL,A))
318 PetscPolymorphicSubroutine(MatCreateMPISBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,const PetscInt nnz[],const PetscInt onz[],Mat *A),(PETSC_COMM_WORLD,bs,m,n,M,N,0,nnz,0,onz,A))
319 PetscPolymorphicSubroutine(MatCreateMPISBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,Mat *A),(PETSC_COMM_WORLD,bs,m,n,M,N,0,PETSC_NULL,0,PETSC_NULL,A))
320 extern PetscErrorCode  MatCreateMPISBAIJWithArrays(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt,const PetscInt[],const PetscInt[],const PetscScalar[],Mat *);
321 extern PetscErrorCode  MatMPISBAIJSetPreallocationCSR(Mat,PetscInt,const PetscInt[],const PetscInt[],const PetscScalar[]);
322 
323 extern PetscErrorCode  MatCreateShell(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,void *,Mat*);
324 PetscPolymorphicFunction(MatCreateShell,(MPI_Comm comm,PetscInt m,PetscInt n,PetscInt M,PetscInt N,void *ctx),(comm,m,n,M,N,ctx,&A),Mat,A)
325 PetscPolymorphicFunction(MatCreateShell,(PetscInt m,PetscInt n,PetscInt M,PetscInt N,void *ctx),(PETSC_COMM_WORLD,m,n,M,N,ctx,&A),Mat,A)
326 extern PetscErrorCode  MatCreateAdic(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt,void (*)(void),Mat*);
327 extern PetscErrorCode  MatCreateNormal(Mat,Mat*);
328 PetscPolymorphicFunction(MatCreateNormal,(Mat mat),(mat,&A),Mat,A)
329 extern PetscErrorCode  MatCreateLRC(Mat,Mat,Mat,Mat*);
330 extern PetscErrorCode  MatCreateIS(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,ISLocalToGlobalMapping,Mat*);
331 extern PetscErrorCode  MatCreateSeqAIJCRL(MPI_Comm,PetscInt,PetscInt,PetscInt,const PetscInt[],Mat*);
332 extern PetscErrorCode  MatCreateMPIAIJCRL(MPI_Comm,PetscInt,PetscInt,PetscInt,const PetscInt[],PetscInt,const PetscInt[],Mat*);
333 
334 extern PetscErrorCode  MatCreateSeqBSTRM(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,const PetscInt[],Mat*);
335 extern PetscErrorCode  MatCreateMPIBSTRM(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt,const PetscInt[],PetscInt,const PetscInt[],Mat*);
336 extern PetscErrorCode  MatCreateSeqSBSTRM(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,const PetscInt[],Mat*);
337 extern PetscErrorCode  MatCreateMPISBSTRM(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt,const PetscInt[],PetscInt,const PetscInt[],Mat*);
338 
339 extern PetscErrorCode  MatCreateScatter(MPI_Comm,VecScatter,Mat*);
340 extern PetscErrorCode  MatScatterSetVecScatter(Mat,VecScatter);
341 extern PetscErrorCode  MatScatterGetVecScatter(Mat,VecScatter*);
342 extern PetscErrorCode  MatCreateBlockMat(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt*,Mat*);
343 extern PetscErrorCode  MatCompositeAddMat(Mat,Mat);
344 extern PetscErrorCode  MatCompositeMerge(Mat);
345 extern PetscErrorCode  MatCreateComposite(MPI_Comm,PetscInt,const Mat*,Mat*);
346 typedef enum {MAT_COMPOSITE_ADDITIVE,MAT_COMPOSITE_MULTIPLICATIVE} MatCompositeType;
347 extern PetscErrorCode  MatCompositeSetType(Mat,MatCompositeType);
348 
349 extern PetscErrorCode  MatCreateFFT(MPI_Comm,PetscInt,const PetscInt[],const MatType,Mat*);
350 extern PetscErrorCode  MatCreateSeqCUFFT(MPI_Comm,PetscInt,const PetscInt[],Mat*);
351 
352 extern PetscErrorCode  MatCreateTranspose(Mat,Mat*);
353 extern PetscErrorCode  MatCreateSubMatrix(Mat,IS,IS,Mat*);
354 extern PetscErrorCode  MatSubMatrixUpdate(Mat,Mat,IS,IS);
355 extern PetscErrorCode  MatCreateLocalRef(Mat,IS,IS,Mat*);
356 
357 extern PetscErrorCode  MatPythonSetType(Mat,const char[]);
358 
359 extern PetscErrorCode  MatSetUp(Mat);
360 extern PetscErrorCode  MatDestroy(Mat*);
361 
362 extern PetscErrorCode  MatConjugate(Mat);
363 extern PetscErrorCode  MatRealPart(Mat);
364 extern PetscErrorCode  MatImaginaryPart(Mat);
365 extern PetscErrorCode  MatGetDiagonalBlock(Mat,Mat*);
366 extern PetscErrorCode  MatGetTrace(Mat,PetscScalar*);
367 
368 /* ------------------------------------------------------------*/
369 extern PetscErrorCode  MatSetValues(Mat,PetscInt,const PetscInt[],PetscInt,const PetscInt[],const PetscScalar[],InsertMode);
370 extern PetscErrorCode  MatSetValuesBlocked(Mat,PetscInt,const PetscInt[],PetscInt,const PetscInt[],const PetscScalar[],InsertMode);
371 extern PetscErrorCode  MatSetValuesRow(Mat,PetscInt,const PetscScalar[]);
372 extern PetscErrorCode  MatSetValuesRowLocal(Mat,PetscInt,const PetscScalar[]);
373 
374 /*S
375      MatStencil - Data structure (C struct) for storing information about a single row or
376         column of a matrix as index on an associated grid.
377 
378    Level: beginner
379 
380   Concepts: matrix; linear operator
381 
382 .seealso:  MatSetValuesStencil(), MatSetStencil(), MatSetValuesBlockStencil()
383 S*/
384 typedef struct {
385   PetscInt k,j,i,c;
386 } MatStencil;
387 
388 extern PetscErrorCode  MatSetValuesStencil(Mat,PetscInt,const MatStencil[],PetscInt,const MatStencil[],const PetscScalar[],InsertMode);
389 extern PetscErrorCode  MatSetValuesBlockedStencil(Mat,PetscInt,const MatStencil[],PetscInt,const MatStencil[],const PetscScalar[],InsertMode);
390 extern PetscErrorCode  MatSetStencil(Mat,PetscInt,const PetscInt[],const PetscInt[],PetscInt);
391 
392 extern PetscErrorCode  MatSetColoring(Mat,ISColoring);
393 extern PetscErrorCode  MatSetValuesAdic(Mat,void*);
394 extern PetscErrorCode  MatSetValuesAdifor(Mat,PetscInt,void*);
395 
396 /*E
397     MatAssemblyType - Indicates if the matrix is now to be used, or if you plan
398      to continue to add values to it
399 
400     Level: beginner
401 
402 .seealso: MatAssemblyBegin(), MatAssemblyEnd()
403 E*/
404 typedef enum {MAT_FLUSH_ASSEMBLY=1,MAT_FINAL_ASSEMBLY=0} MatAssemblyType;
405 extern PetscErrorCode  MatAssemblyBegin(Mat,MatAssemblyType);
406 extern PetscErrorCode  MatAssemblyEnd(Mat,MatAssemblyType);
407 extern PetscErrorCode  MatAssembled(Mat,PetscBool *);
408 
409 
410 
411 /*E
412     MatOption - Options that may be set for a matrix and its behavior or storage
413 
414     Level: beginner
415 
416    Any additions/changes here MUST also be made in include/finclude/petscmat.h
417 
418 .seealso: MatSetOption()
419 E*/
420 typedef enum {MAT_ROW_ORIENTED,MAT_NEW_NONZERO_LOCATIONS,
421               MAT_SYMMETRIC,
422               MAT_STRUCTURALLY_SYMMETRIC,
423               MAT_NEW_DIAGONALS,MAT_IGNORE_OFF_PROC_ENTRIES,
424               MAT_NEW_NONZERO_LOCATION_ERR,
425               MAT_NEW_NONZERO_ALLOCATION_ERR,MAT_USE_HASH_TABLE,
426               MAT_KEEP_NONZERO_PATTERN,MAT_IGNORE_ZERO_ENTRIES,
427               MAT_USE_INODES,
428               MAT_HERMITIAN,
429               MAT_SYMMETRY_ETERNAL,
430               MAT_CHECK_COMPRESSED_ROW,
431               MAT_IGNORE_LOWER_TRIANGULAR,MAT_ERROR_LOWER_TRIANGULAR,
432               MAT_GETROW_UPPERTRIANGULAR,MAT_UNUSED_NONZERO_LOCATION_ERR,
433               MAT_SPD,MAT_NO_OFF_PROC_ENTRIES,MAT_NO_OFF_PROC_ZERO_ROWS,
434               NUM_MAT_OPTIONS} MatOption;
435 extern const char *MatOptions[];
436 extern PetscErrorCode  MatSetOption(Mat,MatOption,PetscBool );
437 extern PetscErrorCode  MatGetType(Mat,const MatType*);
438 PetscPolymorphicFunction(MatGetType,(Mat mat),(mat,&t),const MatType,t)
439 
440 extern PetscErrorCode  MatGetValues(Mat,PetscInt,const PetscInt[],PetscInt,const PetscInt[],PetscScalar[]);
441 extern PetscErrorCode  MatGetRow(Mat,PetscInt,PetscInt *,const PetscInt *[],const PetscScalar*[]);
442 extern PetscErrorCode  MatRestoreRow(Mat,PetscInt,PetscInt *,const PetscInt *[],const PetscScalar*[]);
443 extern PetscErrorCode  MatGetRowUpperTriangular(Mat);
444 extern PetscErrorCode  MatRestoreRowUpperTriangular(Mat);
445 extern PetscErrorCode  MatGetColumn(Mat,PetscInt,PetscInt *,const PetscInt *[],const PetscScalar*[]);
446 extern PetscErrorCode  MatRestoreColumn(Mat,PetscInt,PetscInt *,const PetscInt *[],const PetscScalar*[]);
447 extern PetscErrorCode  MatGetColumnVector(Mat,Vec,PetscInt);
448 extern PetscErrorCode  MatGetArray(Mat,PetscScalar *[]);
449 PetscPolymorphicFunction(MatGetArray,(Mat mat),(mat,&a),PetscScalar*,a)
450 extern PetscErrorCode  MatRestoreArray(Mat,PetscScalar *[]);
451 extern PetscErrorCode  MatGetBlockSize(Mat,PetscInt *);
452 PetscPolymorphicFunction(MatGetBlockSize,(Mat mat),(mat,&a),PetscInt,a)
453 extern PetscErrorCode  MatSetBlockSize(Mat,PetscInt);
454 
455 
456 extern PetscErrorCode  MatMult(Mat,Vec,Vec);
457 extern PetscErrorCode  MatMultDiagonalBlock(Mat,Vec,Vec);
458 extern PetscErrorCode  MatMultAdd(Mat,Vec,Vec,Vec);
459 PetscPolymorphicSubroutine(MatMultAdd,(Mat A,Vec x,Vec y),(A,x,y,y))
460 extern PetscErrorCode  MatMultTranspose(Mat,Vec,Vec);
461 extern PetscErrorCode  MatMultHermitianTranspose(Mat,Vec,Vec);
462 extern PetscErrorCode  MatIsTranspose(Mat,Mat,PetscReal,PetscBool *);
463 PetscPolymorphicFunction(MatIsTranspose,(Mat A,Mat B,PetscReal tol),(A,B,tol,&t),PetscBool ,t)
464 PetscPolymorphicFunction(MatIsTranspose,(Mat A,Mat B),(A,B,0,&t),PetscBool ,t)
465 extern PetscErrorCode  MatIsHermitianTranspose(Mat,Mat,PetscReal,PetscBool *);
466 extern PetscErrorCode  MatMultTransposeAdd(Mat,Vec,Vec,Vec);
467 extern PetscErrorCode  MatMultHermitianTransposeAdd(Mat,Vec,Vec,Vec);
468 PetscPolymorphicSubroutine(MatMultTransposeAdd,(Mat A,Vec x,Vec y),(A,x,y,y))
469 extern PetscErrorCode  MatMultConstrained(Mat,Vec,Vec);
470 extern PetscErrorCode  MatMultTransposeConstrained(Mat,Vec,Vec);
471 extern PetscErrorCode  MatMatSolve(Mat,Mat,Mat);
472 
473 /*E
474     MatDuplicateOption - Indicates if a duplicated sparse matrix should have
475   its numerical values copied over or just its nonzero structure.
476 
477     Level: beginner
478 
479    Any additions/changes here MUST also be made in include/finclude/petscmat.h
480 
481 $   MAT_SHARE_NONZERO_PATTERN - the i and j arrays in the new matrix will be shared with the original matrix
482 $                               this also triggers the MAT_DO_NOT_COPY_VALUES option. This is used when you
483 $                               have several matrices with the same nonzero pattern.
484 
485 .seealso: MatDuplicate()
486 E*/
487 typedef enum {MAT_DO_NOT_COPY_VALUES,MAT_COPY_VALUES,MAT_SHARE_NONZERO_PATTERN} MatDuplicateOption;
488 
489 extern PetscErrorCode  MatConvert(Mat,const MatType,MatReuse,Mat*);
490 PetscPolymorphicFunction(MatConvert,(Mat A,const MatType t),(A,t,MAT_INITIAL_MATRIX,&a),Mat,a)
491 extern PetscErrorCode  MatDuplicate(Mat,MatDuplicateOption,Mat*);
492 PetscPolymorphicFunction(MatDuplicate,(Mat A,MatDuplicateOption o),(A,o,&a),Mat,a)
493 PetscPolymorphicFunction(MatDuplicate,(Mat A),(A,MAT_COPY_VALUES,&a),Mat,a)
494 
495 
496 extern PetscErrorCode  MatCopy(Mat,Mat,MatStructure);
497 extern PetscErrorCode  MatView(Mat,PetscViewer);
498 extern PetscErrorCode  MatIsSymmetric(Mat,PetscReal,PetscBool *);
499 PetscPolymorphicFunction(MatIsSymmetric,(Mat A,PetscReal tol),(A,tol,&t),PetscBool ,t)
500 PetscPolymorphicFunction(MatIsSymmetric,(Mat A),(A,0,&t),PetscBool ,t)
501 extern PetscErrorCode  MatIsStructurallySymmetric(Mat,PetscBool *);
502 PetscPolymorphicFunction(MatIsStructurallySymmetric,(Mat A),(A,&t),PetscBool ,t)
503 extern PetscErrorCode  MatIsHermitian(Mat,PetscReal,PetscBool *);
504 PetscPolymorphicFunction(MatIsHermitian,(Mat A),(A,0,&t),PetscBool ,t)
505 extern PetscErrorCode  MatIsSymmetricKnown(Mat,PetscBool *,PetscBool *);
506 extern PetscErrorCode  MatIsHermitianKnown(Mat,PetscBool *,PetscBool *);
507 extern PetscErrorCode  MatMissingDiagonal(Mat,PetscBool  *,PetscInt *);
508 extern PetscErrorCode  MatLoad(Mat, PetscViewer);
509 
510 extern PetscErrorCode  MatGetRowIJ(Mat,PetscInt,PetscBool ,PetscBool ,PetscInt*,PetscInt *[],PetscInt *[],PetscBool  *);
511 extern PetscErrorCode  MatRestoreRowIJ(Mat,PetscInt,PetscBool ,PetscBool ,PetscInt *,PetscInt *[],PetscInt *[],PetscBool  *);
512 extern PetscErrorCode  MatGetColumnIJ(Mat,PetscInt,PetscBool ,PetscBool ,PetscInt*,PetscInt *[],PetscInt *[],PetscBool  *);
513 extern PetscErrorCode  MatRestoreColumnIJ(Mat,PetscInt,PetscBool ,PetscBool ,PetscInt *,PetscInt *[],PetscInt *[],PetscBool  *);
514 
515 /*S
516      MatInfo - Context of matrix information, used with MatGetInfo()
517 
518    In Fortran this is simply a double precision array of dimension MAT_INFO_SIZE
519 
520    Level: intermediate
521 
522   Concepts: matrix^nonzero information
523 
524 .seealso:  MatGetInfo(), MatInfoType
525 S*/
526 typedef struct {
527   PetscLogDouble block_size;                         /* block size */
528   PetscLogDouble nz_allocated,nz_used,nz_unneeded;   /* number of nonzeros */
529   PetscLogDouble memory;                             /* memory allocated */
530   PetscLogDouble assemblies;                         /* number of matrix assemblies called */
531   PetscLogDouble mallocs;                            /* number of mallocs during MatSetValues() */
532   PetscLogDouble fill_ratio_given,fill_ratio_needed; /* fill ratio for LU/ILU */
533   PetscLogDouble factor_mallocs;                     /* number of mallocs during factorization */
534 } MatInfo;
535 
536 /*E
537     MatInfoType - Indicates if you want information about the local part of the matrix,
538      the entire parallel matrix or the maximum over all the local parts.
539 
540     Level: beginner
541 
542    Any additions/changes here MUST also be made in include/finclude/petscmat.h
543 
544 .seealso: MatGetInfo(), MatInfo
545 E*/
546 typedef enum {MAT_LOCAL=1,MAT_GLOBAL_MAX=2,MAT_GLOBAL_SUM=3} MatInfoType;
547 extern PetscErrorCode  MatGetInfo(Mat,MatInfoType,MatInfo*);
548 extern PetscErrorCode  MatGetDiagonal(Mat,Vec);
549 extern PetscErrorCode  MatGetRowMax(Mat,Vec,PetscInt[]);
550 extern PetscErrorCode  MatGetRowMin(Mat,Vec,PetscInt[]);
551 extern PetscErrorCode  MatGetRowMaxAbs(Mat,Vec,PetscInt[]);
552 extern PetscErrorCode  MatGetRowMinAbs(Mat,Vec,PetscInt[]);
553 extern PetscErrorCode  MatGetRowSum(Mat,Vec);
554 extern PetscErrorCode  MatTranspose(Mat,MatReuse,Mat*);
555 PetscPolymorphicFunction(MatTranspose,(Mat A),(A,MAT_INITIAL_MATRIX,&t),Mat,t)
556 extern PetscErrorCode  MatHermitianTranspose(Mat,MatReuse,Mat*);
557 extern PetscErrorCode  MatPermute(Mat,IS,IS,Mat *);
558 PetscPolymorphicFunction(MatPermute,(Mat A,IS is1,IS is2),(A,is1,is2,&t),Mat,t)
559 extern PetscErrorCode  MatDiagonalScale(Mat,Vec,Vec);
560 extern PetscErrorCode  MatDiagonalSet(Mat,Vec,InsertMode);
561 extern PetscErrorCode  MatEqual(Mat,Mat,PetscBool *);
562 PetscPolymorphicFunction(MatEqual,(Mat A,Mat B),(A,B,&t),PetscBool ,t)
563 extern PetscErrorCode  MatMultEqual(Mat,Mat,PetscInt,PetscBool *);
564 extern PetscErrorCode  MatMultAddEqual(Mat,Mat,PetscInt,PetscBool *);
565 extern PetscErrorCode  MatMultTransposeEqual(Mat,Mat,PetscInt,PetscBool *);
566 extern PetscErrorCode  MatMultTransposeAddEqual(Mat,Mat,PetscInt,PetscBool *);
567 
568 extern PetscErrorCode  MatNorm(Mat,NormType,PetscReal *);
569 PetscPolymorphicFunction(MatNorm,(Mat A,NormType t),(A,t,&n),PetscReal,n)
570 extern PetscErrorCode  MatGetColumnNorms(Mat,NormType,PetscReal *);
571 extern PetscErrorCode  MatZeroEntries(Mat);
572 extern PetscErrorCode  MatZeroRows(Mat,PetscInt,const PetscInt [],PetscScalar,Vec,Vec);
573 extern PetscErrorCode  MatZeroRowsIS(Mat,IS,PetscScalar,Vec,Vec);
574 extern PetscErrorCode  MatZeroRowsStencil(Mat,PetscInt,const MatStencil [],PetscScalar,Vec,Vec);
575 extern PetscErrorCode  MatZeroRowsColumns(Mat,PetscInt,const PetscInt [],PetscScalar,Vec,Vec);
576 extern PetscErrorCode  MatZeroRowsColumnsIS(Mat,IS,PetscScalar,Vec,Vec);
577 
578 extern PetscErrorCode  MatUseScaledForm(Mat,PetscBool );
579 extern PetscErrorCode  MatScaleSystem(Mat,Vec,Vec);
580 extern PetscErrorCode  MatUnScaleSystem(Mat,Vec,Vec);
581 
582 extern PetscErrorCode  MatGetSize(Mat,PetscInt*,PetscInt*);
583 extern PetscErrorCode  MatGetLocalSize(Mat,PetscInt*,PetscInt*);
584 extern PetscErrorCode  MatGetOwnershipRange(Mat,PetscInt*,PetscInt*);
585 extern PetscErrorCode  MatGetOwnershipRanges(Mat,const PetscInt**);
586 extern PetscErrorCode  MatGetOwnershipRangeColumn(Mat,PetscInt*,PetscInt*);
587 extern PetscErrorCode  MatGetOwnershipRangesColumn(Mat,const PetscInt**);
588 
589 extern PetscErrorCode  MatGetSubMatrices(Mat,PetscInt,const IS[],const IS[],MatReuse,Mat *[]);
590 extern PetscErrorCode  MatGetSubMatricesParallel(Mat,PetscInt,const IS[],const IS[],MatReuse,Mat *[]);
591 extern PetscErrorCode  MatDestroyMatrices(PetscInt,Mat *[]);
592 extern PetscErrorCode  MatGetSubMatrix(Mat,IS,IS,MatReuse,Mat *);
593 extern PetscErrorCode  MatGetLocalSubMatrix(Mat,IS,IS,Mat*);
594 extern PetscErrorCode  MatRestoreLocalSubMatrix(Mat,IS,IS,Mat*);
595 extern PetscErrorCode  MatGetSeqNonzeroStructure(Mat,Mat*);
596 extern PetscErrorCode  MatDestroySeqNonzeroStructure(Mat*);
597 
598 extern PetscErrorCode  MatMerge(MPI_Comm,Mat,PetscInt,MatReuse,Mat*);
599 extern PetscErrorCode  MatMerge_SeqsToMPI(MPI_Comm,Mat,PetscInt,PetscInt,MatReuse,Mat*);
600 extern PetscErrorCode  MatMerge_SeqsToMPISymbolic(MPI_Comm,Mat,PetscInt,PetscInt,Mat*);
601 extern PetscErrorCode  MatMerge_SeqsToMPINumeric(Mat,Mat);
602 extern PetscErrorCode  MatMPIAIJGetLocalMat(Mat,MatReuse,Mat*);
603 extern PetscErrorCode  MatMPIAIJGetLocalMatCondensed(Mat,MatReuse,IS*,IS*,Mat*);
604 extern PetscErrorCode  MatGetBrowsOfAcols(Mat,Mat,MatReuse,IS*,IS*,PetscInt*,Mat*);
605 extern PetscErrorCode  MatGetBrowsOfAoCols(Mat,Mat,MatReuse,PetscInt**,PetscInt**,MatScalar**,Mat*);
606 #if defined (PETSC_USE_CTABLE)
607 #include "petscctable.h"
608 extern PetscErrorCode  MatGetCommunicationStructs(Mat, Vec *, PetscTable *, VecScatter *);
609 #else
610 extern PetscErrorCode  MatGetCommunicationStructs(Mat, Vec *, PetscInt *[], VecScatter *);
611 #endif
612 extern PetscErrorCode  MatGetGhosts(Mat, PetscInt *,const PetscInt *[]);
613 
614 extern PetscErrorCode  MatIncreaseOverlap(Mat,PetscInt,IS[],PetscInt);
615 
616 extern PetscErrorCode  MatMatMult(Mat,Mat,MatReuse,PetscReal,Mat*);
617 extern PetscErrorCode  MatMatMultSymbolic(Mat,Mat,PetscReal,Mat*);
618 extern PetscErrorCode  MatMatMultNumeric(Mat,Mat,Mat);
619 
620 extern PetscErrorCode  MatPtAP(Mat,Mat,MatReuse,PetscReal,Mat*);
621 extern PetscErrorCode  MatPtAPSymbolic(Mat,Mat,PetscReal,Mat*);
622 extern PetscErrorCode  MatPtAPNumeric(Mat,Mat,Mat);
623 
624 extern PetscErrorCode  MatMatMultTranspose(Mat,Mat,MatReuse,PetscReal,Mat*);
625 extern PetscErrorCode  MatMatMultTransposeSymbolic(Mat,Mat,PetscReal,Mat*);
626 extern PetscErrorCode  MatMatMultTransposeNumeric(Mat,Mat,Mat);
627 
628 extern PetscErrorCode  MatAXPY(Mat,PetscScalar,Mat,MatStructure);
629 extern PetscErrorCode  MatAYPX(Mat,PetscScalar,Mat,MatStructure);
630 
631 extern PetscErrorCode  MatScale(Mat,PetscScalar);
632 extern PetscErrorCode  MatShift(Mat,PetscScalar);
633 
634 extern PetscErrorCode  MatSetLocalToGlobalMapping(Mat,ISLocalToGlobalMapping,ISLocalToGlobalMapping);
635 extern PetscErrorCode  MatSetLocalToGlobalMappingBlock(Mat,ISLocalToGlobalMapping,ISLocalToGlobalMapping);
636 extern PetscErrorCode  MatGetLocalToGlobalMapping(Mat,ISLocalToGlobalMapping*,ISLocalToGlobalMapping*);
637 extern PetscErrorCode  MatGetLocalToGlobalMappingBlock(Mat,ISLocalToGlobalMapping*,ISLocalToGlobalMapping*);
638 extern PetscErrorCode  MatZeroRowsLocal(Mat,PetscInt,const PetscInt [],PetscScalar,Vec,Vec);
639 extern PetscErrorCode  MatZeroRowsLocalIS(Mat,IS,PetscScalar,Vec,Vec);
640 extern PetscErrorCode  MatZeroRowsColumnsLocal(Mat,PetscInt,const PetscInt [],PetscScalar,Vec,Vec);
641 extern PetscErrorCode  MatZeroRowsColumnsLocalIS(Mat,IS,PetscScalar,Vec,Vec);
642 extern PetscErrorCode  MatSetValuesLocal(Mat,PetscInt,const PetscInt[],PetscInt,const PetscInt[],const PetscScalar[],InsertMode);
643 extern PetscErrorCode  MatSetValuesBlockedLocal(Mat,PetscInt,const PetscInt[],PetscInt,const PetscInt[],const PetscScalar[],InsertMode);
644 
645 extern PetscErrorCode  MatStashSetInitialSize(Mat,PetscInt,PetscInt);
646 extern PetscErrorCode  MatStashGetInfo(Mat,PetscInt*,PetscInt*,PetscInt*,PetscInt*);
647 
648 extern PetscErrorCode  MatInterpolate(Mat,Vec,Vec);
649 extern PetscErrorCode  MatInterpolateAdd(Mat,Vec,Vec,Vec);
650 PetscPolymorphicSubroutine(MatInterpolateAdd,(Mat A,Vec x,Vec y),(A,x,y,y))
651 extern PetscErrorCode  MatRestrict(Mat,Vec,Vec);
652 extern PetscErrorCode  MatGetVecs(Mat,Vec*,Vec*);
653 extern PetscErrorCode  MatGetRedundantMatrix(Mat,PetscInt,MPI_Comm,PetscInt,MatReuse,Mat*);
654 extern PetscErrorCode  MatGetMultiProcBlock(Mat,MPI_Comm,Mat*);
655 extern PetscErrorCode  MatFindZeroDiagonals(Mat,IS*);
656 
657 /*MC
658    MatSetValue - Set a single entry into a matrix.
659 
660    Not collective
661 
662    Input Parameters:
663 +  m - the matrix
664 .  row - the row location of the entry
665 .  col - the column location of the entry
666 .  value - the value to insert
667 -  mode - either INSERT_VALUES or ADD_VALUES
668 
669    Notes:
670    For efficiency one should use MatSetValues() and set several or many
671    values simultaneously if possible.
672 
673    Level: beginner
674 
675 .seealso: MatSetValues(), MatSetValueLocal()
676 M*/
677 PETSC_STATIC_INLINE PetscErrorCode MatSetValue(Mat v,PetscInt i,PetscInt j,PetscScalar va,InsertMode mode) {return MatSetValues(v,1,&i,1,&j,&va,mode);}
678 
679 PETSC_STATIC_INLINE PetscErrorCode MatGetValue(Mat v,PetscInt i,PetscInt j,PetscScalar *va) {return MatGetValues(v,1,&i,1,&j,va);}
680 
681 PETSC_STATIC_INLINE PetscErrorCode MatSetValueLocal(Mat v,PetscInt i,PetscInt j,PetscScalar va,InsertMode mode) {return MatSetValuesLocal(v,1,&i,1,&j,&va,mode);}
682 
683 extern PetscErrorCode MatSeqAIJSetValuesBatch(Mat, PetscInt, PetscInt, PetscInt, PetscInt *, PetscScalar *);
684 
685 /*MC
686    MatPreallocateInitialize - Begins the block of code that will count the number of nonzeros per
687        row in a matrix providing the data that one can use to correctly preallocate the matrix.
688 
689    Synopsis:
690    PetscErrorCode MatPreallocateInitialize(MPI_Comm comm, PetscInt nrows, PetscInt ncols, PetscInt *dnz, PetscInt *onz)
691 
692    Collective on MPI_Comm
693 
694    Input Parameters:
695 +  comm - the communicator that will share the eventually allocated matrix
696 .  nrows - the number of LOCAL rows in the matrix
697 -  ncols - the number of LOCAL columns in the matrix
698 
699    Output Parameters:
700 +  dnz - the array that will be passed to the matrix preallocation routines
701 -  ozn - the other array passed to the matrix preallocation routines
702 
703 
704    Level: intermediate
705 
706    Notes:
707     See the <A href="../../docs/manual.pdf#nameddest=ch_performance">Hints for Performance Improvment</A> chapter in the users manual for more details.
708 
709    Do not malloc or free dnz and onz, that is handled internally by these routines
710 
711    Use MatPreallocateInitializeSymmetric() for symmetric matrices (MPISBAIJ matrices)
712 
713    This is a MACRO not a function because it has a leading { that is closed by PetscPreallocateFinalize().
714 
715   Concepts: preallocation^Matrix
716 
717 .seealso: MatPreallocateFinalize(), MatPreallocateSet(), MatPreallocateSymmetricSet(), MatPreallocateSetLocal(),
718           MatPreallocateInitializeSymmetric(), MatPreallocateSymmetricSetLocal()
719 M*/
720 #define MatPreallocateInitialize(comm,nrows,ncols,dnz,onz) 0; \
721 { \
722   PetscErrorCode _4_ierr; PetscInt __nrows = (nrows),__ctmp = (ncols),__rstart,__start,__end; \
723   _4_ierr = PetscMalloc2(__nrows,PetscInt,&dnz,__nrows,PetscInt,&onz);CHKERRQ(_4_ierr); \
724   _4_ierr = PetscMemzero(dnz,__nrows*sizeof(PetscInt));CHKERRQ(_4_ierr);\
725   _4_ierr = PetscMemzero(onz,__nrows*sizeof(PetscInt));CHKERRQ(_4_ierr);\
726   _4_ierr = MPI_Scan(&__ctmp,&__end,1,MPIU_INT,MPI_SUM,comm);CHKERRQ(_4_ierr); __start = __end - __ctmp;\
727   _4_ierr = MPI_Scan(&__nrows,&__rstart,1,MPIU_INT,MPI_SUM,comm);CHKERRQ(_4_ierr); __rstart = __rstart - __nrows;
728 
729 /*MC
730    MatPreallocateSymmetricInitialize - Begins the block of code that will count the number of nonzeros per
731        row in a matrix providing the data that one can use to correctly preallocate the matrix.
732 
733    Synopsis:
734    PetscErrorCode MatPreallocateSymmetricInitialize(MPI_Comm comm, PetscInt nrows, PetscInt ncols, PetscInt *dnz, PetscInt *onz)
735 
736    Collective on MPI_Comm
737 
738    Input Parameters:
739 +  comm - the communicator that will share the eventually allocated matrix
740 .  nrows - the number of LOCAL rows in the matrix
741 -  ncols - the number of LOCAL columns in the matrix
742 
743    Output Parameters:
744 +  dnz - the array that will be passed to the matrix preallocation routines
745 -  ozn - the other array passed to the matrix preallocation routines
746 
747 
748    Level: intermediate
749 
750    Notes:
751     See the <A href="../../docs/manual.pdf#nameddest=ch_performance">Hints for Performance Improvment</A> chapter in the users manual for more details.
752 
753    Do not malloc or free dnz and onz, that is handled internally by these routines
754 
755    This is a MACRO not a function because it has a leading { that is closed by PetscPreallocateFinalize().
756 
757   Concepts: preallocation^Matrix
758 
759 .seealso: MatPreallocateFinalize(), MatPreallocateSet(), MatPreallocateSymmetricSet(), MatPreallocateSetLocal(),
760           MatPreallocateInitialize(), MatPreallocateSymmetricSetLocal()
761 M*/
762 #define MatPreallocateSymmetricInitialize(comm,nrows,ncols,dnz,onz) 0; \
763 { \
764   PetscErrorCode _4_ierr; PetscInt __nrows = (nrows),__ctmp = (ncols),__rstart,__end; \
765   _4_ierr = PetscMalloc2(__nrows,PetscInt,&dnz,__nrows,PetscInt,&onz);CHKERRQ(_4_ierr); \
766   _4_ierr = PetscMemzero(dnz,__nrows*sizeof(PetscInt));CHKERRQ(_4_ierr);\
767   _4_ierr = PetscMemzero(onz,__nrows*sizeof(PetscInt));CHKERRQ(_4_ierr);\
768   _4_ierr = MPI_Scan(&__ctmp,&__end,1,MPIU_INT,MPI_SUM,comm);CHKERRQ(_4_ierr);\
769   _4_ierr = MPI_Scan(&__nrows,&__rstart,1,MPIU_INT,MPI_SUM,comm);CHKERRQ(_4_ierr); __rstart = __rstart - __nrows;
770 
771 /*MC
772    MatPreallocateSetLocal - Indicates the locations (rows and columns) in the matrix where nonzeros will be
773        inserted using a local number of the rows and columns
774 
775    Synopsis:
776    PetscErrorCode MatPreallocateSetLocal(ISLocalToGlobalMappping map,PetscInt nrows, PetscInt *rows,PetscInt ncols, PetscInt *cols,PetscInt *dnz, PetscInt *onz)
777 
778    Not Collective
779 
780    Input Parameters:
781 +  map - the row mapping from local numbering to global numbering
782 .  nrows - the number of rows indicated
783 .  rows - the indices of the rows
784 .  cmap - the column mapping from local to global numbering
785 .  ncols - the number of columns in the matrix
786 .  cols - the columns indicated
787 .  dnz - the array that will be passed to the matrix preallocation routines
788 -  ozn - the other array passed to the matrix preallocation routines
789 
790 
791    Level: intermediate
792 
793    Notes:
794     See the <A href="../../docs/manual.pdf#nameddest=ch_performance">Hints for Performance Improvment</A> chapter in the users manual for more details.
795 
796    Do not malloc or free dnz and onz, that is handled internally by these routines
797 
798   Concepts: preallocation^Matrix
799 
800 .seealso: MatPreallocateFinalize(), MatPreallocateSet(), MatPreallocateSymmetricSet(), MatPreallocateInitialize(),
801           MatPreallocateInitialize(), MatPreallocateSymmetricSetLocal()
802 M*/
803 #define MatPreallocateSetLocal(rmap,nrows,rows,cmap,ncols,cols,dnz,onz) 0; \
804 {\
805   PetscInt __l;\
806   _4_ierr = ISLocalToGlobalMappingApply(rmap,nrows,rows,rows);CHKERRQ(_4_ierr);\
807   _4_ierr = ISLocalToGlobalMappingApply(cmap,ncols,cols,cols);CHKERRQ(_4_ierr);\
808   for (__l=0;__l<nrows;__l++) {\
809     _4_ierr = MatPreallocateSet((rows)[__l],ncols,cols,dnz,onz);CHKERRQ(_4_ierr);\
810   }\
811 }
812 
813 /*MC
814    MatPreallocateSymmetricSetLocal - Indicates the locations (rows and columns) in the matrix where nonzeros will be
815        inserted using a local number of the rows and columns
816 
817    Synopsis:
818    PetscErrorCode MatPreallocateSymmetricSetLocal(ISLocalToGlobalMappping map,PetscInt nrows, PetscInt *rows,PetscInt ncols, PetscInt *cols,PetscInt *dnz, PetscInt *onz)
819 
820    Not Collective
821 
822    Input Parameters:
823 +  map - the mapping between local numbering and global numbering
824 .  nrows - the number of rows indicated
825 .  rows - the indices of the rows
826 .  ncols - the number of columns in the matrix
827 .  cols - the columns indicated
828 .  dnz - the array that will be passed to the matrix preallocation routines
829 -  ozn - the other array passed to the matrix preallocation routines
830 
831 
832    Level: intermediate
833 
834    Notes:
835     See the <A href="../../docs/manual.pdf#nameddest=ch_performance">Hints for Performance Improvment</A> chapter in the users manual for more details.
836 
837    Do not malloc or free dnz and onz that is handled internally by these routines
838 
839   Concepts: preallocation^Matrix
840 
841 .seealso: MatPreallocateFinalize(), MatPreallocateSet(), MatPreallocateSymmetricSet(), MatPreallocateInitialize(),
842           MatPreallocateInitialize(), MatPreallocateSymmetricSetLocal(), MatPreallocateSetLocal()
843 M*/
844 #define MatPreallocateSymmetricSetLocal(map,nrows,rows,ncols,cols,dnz,onz) 0;\
845 {\
846   PetscInt __l;\
847   _4_ierr = ISLocalToGlobalMappingApply(map,nrows,rows,rows);CHKERRQ(_4_ierr);\
848   _4_ierr = ISLocalToGlobalMappingApply(map,ncols,cols,cols);CHKERRQ(_4_ierr);\
849   for (__l=0;__l<nrows;__l++) {\
850     _4_ierr = MatPreallocateSymmetricSet((rows)[__l],ncols,cols,dnz,onz);CHKERRQ(_4_ierr);\
851   }\
852 }
853 
854 /*MC
855    MatPreallocateSet - Indicates the locations (rows and columns) in the matrix where nonzeros will be
856        inserted using a local number of the rows and columns
857 
858    Synopsis:
859    PetscErrorCode MatPreallocateSet(PetscInt nrows, PetscInt *rows,PetscInt ncols, PetscInt *cols,PetscInt *dnz, PetscInt *onz)
860 
861    Not Collective
862 
863    Input Parameters:
864 +  row - the row
865 .  ncols - the number of columns in the matrix
866 -  cols - the columns indicated
867 
868    Output Parameters:
869 +  dnz - the array that will be passed to the matrix preallocation routines
870 -  ozn - the other array passed to the matrix preallocation routines
871 
872 
873    Level: intermediate
874 
875    Notes:
876     See the <A href="../../docs/manual.pdf#nameddest=ch_performance">Hints for Performance Improvment</A> chapter in the users manual for more details.
877 
878    Do not malloc or free dnz and onz that is handled internally by these routines
879 
880    This is a MACRO not a function because it uses variables declared in MatPreallocateInitialize().
881 
882   Concepts: preallocation^Matrix
883 
884 .seealso: MatPreallocateFinalize(), MatPreallocateSet(), MatPreallocateSymmetricSet(), MatPreallocateInitialize(),
885           MatPreallocateInitialize(), MatPreallocateSymmetricSetLocal(), MatPreallocateSetLocal()
886 M*/
887 #define MatPreallocateSet(row,nc,cols,dnz,onz) 0;\
888 { PetscInt __i; \
889   if (row < __rstart) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Trying to set preallocation for row %D less than first local row %D",row,__rstart);\
890   if (row >= __rstart+__nrows) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Trying to set preallocation for row %D greater than last local row %D",row,__rstart+__nrows-1);\
891   for (__i=0; __i<nc; __i++) {\
892     if ((cols)[__i] < __start || (cols)[__i] >= __end) onz[row - __rstart]++; \
893     else dnz[row - __rstart]++;\
894   }\
895 }
896 
897 /*MC
898    MatPreallocateSymmetricSet - Indicates the locations (rows and columns) in the matrix where nonzeros will be
899        inserted using a local number of the rows and columns
900 
901    Synopsis:
902    PetscErrorCode MatPreallocateSymmetricSet(PetscInt nrows, PetscInt *rows,PetscInt ncols, PetscInt *cols,PetscInt *dnz, PetscInt *onz)
903 
904    Not Collective
905 
906    Input Parameters:
907 +  nrows - the number of rows indicated
908 .  rows - the indices of the rows
909 .  ncols - the number of columns in the matrix
910 .  cols - the columns indicated
911 .  dnz - the array that will be passed to the matrix preallocation routines
912 -  ozn - the other array passed to the matrix preallocation routines
913 
914 
915    Level: intermediate
916 
917    Notes:
918     See the <A href="../../docs/manual.pdf#nameddest=ch_performance">Hints for Performance Improvment</A> chapter in the users manual for more details.
919 
920    Do not malloc or free dnz and onz that is handled internally by these routines
921 
922    This is a MACRO not a function because it uses variables declared in MatPreallocateInitialize().
923 
924   Concepts: preallocation^Matrix
925 
926 .seealso: MatPreallocateFinalize(), MatPreallocateSet(), MatPreallocateSymmetricSet(), MatPreallocateInitialize(),
927           MatPreallocateInitialize(), MatPreallocateSymmetricSetLocal(), MatPreallocateSetLocal()
928 M*/
929 #define MatPreallocateSymmetricSet(row,nc,cols,dnz,onz) 0;\
930 { PetscInt __i; \
931   for (__i=0; __i<nc; __i++) {\
932     if (cols[__i] >= __end) onz[row - __rstart]++; \
933     else if (cols[__i] >= row) dnz[row - __rstart]++;\
934   }\
935 }
936 
937 /*MC
938    MatPreallocateLocation -  An alternative to MatPreallocationSet() that puts the nonzero locations into the matrix if it exists
939 
940    Synopsis:
941    PetscErrorCode MatPreallocateLocations(Mat A,PetscInt row,PetscInt ncols,PetscInt *cols,PetscInt *dnz,PetscInt *onz)
942 
943    Not Collective
944 
945    Input Parameters:
946 .  A - matrix
947 .  row - row where values exist (must be local to this process)
948 .  ncols - number of columns
949 .  cols - columns with nonzeros
950 .  dnz - the array that will be passed to the matrix preallocation routines
951 -  ozn - the other array passed to the matrix preallocation routines
952 
953 
954    Level: intermediate
955 
956    Notes:
957     See the <A href="../../docs/manual.pdf#nameddest=ch_performance">Hints for Performance Improvment</A> chapter in the users manual for more details.
958 
959    Do not malloc or free dnz and onz that is handled internally by these routines
960 
961    This is a MACRO not a function because it uses a bunch of variables private to the MatPreallocation.... routines.
962 
963   Concepts: preallocation^Matrix
964 
965 .seealso: MatPreallocateInitialize(), MatPreallocateSet(), MatPreallocateSymmetricSet(), MatPreallocateSetLocal(),
966           MatPreallocateSymmetricInitialize(), MatPreallocateSymmetricSetLocal()
967 M*/
968 #define MatPreallocateLocation(A,row,ncols,cols,dnz,onz) 0;if (A) {ierr = MatSetValues(A,1,&row,ncols,cols,PETSC_NULL,INSERT_VALUES);CHKERRQ(ierr);} else {ierr =  MatPreallocateSet(row,ncols,cols,dnz,onz);CHKERRQ(ierr);}
969 
970 
971 /*MC
972    MatPreallocateFinalize - Ends the block of code that will count the number of nonzeros per
973        row in a matrix providing the data that one can use to correctly preallocate the matrix.
974 
975    Synopsis:
976    PetscErrorCode MatPreallocateFinalize(PetscInt *dnz, PetscInt *onz)
977 
978    Collective on MPI_Comm
979 
980    Input Parameters:
981 +  dnz - the array that was be passed to the matrix preallocation routines
982 -  ozn - the other array passed to the matrix preallocation routines
983 
984 
985    Level: intermediate
986 
987    Notes:
988     See the <A href="../../docs/manual.pdf#nameddest=ch_performance">Hints for Performance Improvment</A> chapter in the users manual for more details.
989 
990    Do not malloc or free dnz and onz that is handled internally by these routines
991 
992    This is a MACRO not a function because it closes the { started in MatPreallocateInitialize().
993 
994   Concepts: preallocation^Matrix
995 
996 .seealso: MatPreallocateInitialize(), MatPreallocateSet(), MatPreallocateSymmetricSet(), MatPreallocateSetLocal(),
997           MatPreallocateSymmetricInitialize(), MatPreallocateSymmetricSetLocal()
998 M*/
999 #define MatPreallocateFinalize(dnz,onz) 0;_4_ierr = PetscFree2(dnz,onz);CHKERRQ(_4_ierr);}
1000 
1001 
1002 
1003 /* Routines unique to particular data structures */
1004 extern PetscErrorCode  MatShellGetContext(Mat,void *);
1005 PetscPolymorphicFunction(MatShellGetContext,(Mat A),(A,&t),void*,t)
1006 
1007 extern PetscErrorCode  MatInodeAdjustForInodes(Mat,IS*,IS*);
1008 extern PetscErrorCode  MatInodeGetInodeSizes(Mat,PetscInt *,PetscInt *[],PetscInt *);
1009 
1010 extern PetscErrorCode  MatSeqAIJSetColumnIndices(Mat,PetscInt[]);
1011 extern PetscErrorCode  MatSeqBAIJSetColumnIndices(Mat,PetscInt[]);
1012 extern PetscErrorCode  MatCreateSeqAIJWithArrays(MPI_Comm,PetscInt,PetscInt,PetscInt[],PetscInt[],PetscScalar[],Mat*);
1013 extern PetscErrorCode  MatCreateSeqBAIJWithArrays(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt[],PetscInt[],PetscScalar[],Mat*);
1014 extern PetscErrorCode  MatCreateSeqSBAIJWithArrays(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt[],PetscInt[],PetscScalar[],Mat*);
1015 
1016 #define MAT_SKIP_ALLOCATION -4
1017 
1018 extern PetscErrorCode  MatSeqBAIJSetPreallocation(Mat,PetscInt,PetscInt,const PetscInt[]);
1019 PetscPolymorphicSubroutine(MatSeqBAIJSetPreallocation,(Mat A,PetscInt bs,const PetscInt nnz[]),(A,bs,0,nnz))
1020 extern PetscErrorCode  MatSeqSBAIJSetPreallocation(Mat,PetscInt,PetscInt,const PetscInt[]);
1021 PetscPolymorphicSubroutine(MatSeqSBAIJSetPreallocation,(Mat A,PetscInt bs,const PetscInt nnz[]),(A,bs,0,nnz))
1022 extern PetscErrorCode  MatSeqAIJSetPreallocation(Mat,PetscInt,const PetscInt[]);
1023 PetscPolymorphicSubroutine(MatSeqAIJSetPreallocation,(Mat A,const PetscInt nnz[]),(A,0,nnz))
1024 
1025 extern PetscErrorCode  MatMPIBAIJSetPreallocation(Mat,PetscInt,PetscInt,const PetscInt[],PetscInt,const PetscInt[]);
1026 PetscPolymorphicSubroutine(MatMPIBAIJSetPreallocation,(Mat A,PetscInt bs,const PetscInt nnz[],const PetscInt onz[]),(A,bs,0,nnz,0,onz))
1027 extern PetscErrorCode  MatMPISBAIJSetPreallocation(Mat,PetscInt,PetscInt,const PetscInt[],PetscInt,const PetscInt[]);
1028 extern PetscErrorCode  MatMPIAIJSetPreallocation(Mat,PetscInt,const PetscInt[],PetscInt,const PetscInt[]);
1029 extern PetscErrorCode  MatSeqAIJSetPreallocationCSR(Mat,const PetscInt [],const PetscInt [],const PetscScalar []);
1030 extern PetscErrorCode  MatSeqBAIJSetPreallocationCSR(Mat,PetscInt,const PetscInt[],const PetscInt[],const PetscScalar[]);
1031 extern PetscErrorCode  MatMPIAIJSetPreallocationCSR(Mat,const PetscInt[],const PetscInt[],const PetscScalar[]);
1032 extern PetscErrorCode  MatMPIBAIJSetPreallocationCSR(Mat,PetscInt,const PetscInt[],const PetscInt[],const PetscScalar[]);
1033 extern PetscErrorCode  MatMPIAdjSetPreallocation(Mat,PetscInt[],PetscInt[],PetscInt[]);
1034 extern PetscErrorCode  MatMPIDenseSetPreallocation(Mat,PetscScalar[]);
1035 extern PetscErrorCode  MatSeqDenseSetPreallocation(Mat,PetscScalar[]);
1036 extern PetscErrorCode  MatMPIAIJGetSeqAIJ(Mat,Mat*,Mat*,PetscInt*[]);
1037 extern PetscErrorCode  MatMPIBAIJGetSeqBAIJ(Mat,Mat*,Mat*,PetscInt*[]);
1038 extern PetscErrorCode  MatAdicSetLocalFunction(Mat,void (*)(void));
1039 
1040 extern PetscErrorCode  MatSeqDenseSetLDA(Mat,PetscInt);
1041 extern PetscErrorCode  MatDenseGetLocalMatrix(Mat,Mat*);
1042 
1043 extern PetscErrorCode  MatStoreValues(Mat);
1044 extern PetscErrorCode  MatRetrieveValues(Mat);
1045 
1046 extern PetscErrorCode  MatDAADSetCtx(Mat,void*);
1047 
1048 extern PetscErrorCode  MatFindNonzeroRows(Mat,IS*);
1049 /*
1050   These routines are not usually accessed directly, rather solving is
1051   done through the KSP and PC interfaces.
1052 */
1053 
1054 /*E
1055     MatOrderingType - String with the name of a PETSc matrix ordering or the creation function
1056        with an optional dynamic library name, for example
1057        http://www.mcs.anl.gov/petsc/lib.a:orderingcreate()
1058 
1059    Level: beginner
1060 
1061    Cannot use const because the PC objects manipulate the string
1062 
1063 .seealso: MatGetOrdering()
1064 E*/
1065 #define MatOrderingType char*
1066 #define MATORDERINGNATURAL     "natural"
1067 #define MATORDERINGND          "nd"
1068 #define MATORDERING1WD         "1wd"
1069 #define MATORDERINGRCM         "rcm"
1070 #define MATORDERINGQMD         "qmd"
1071 #define MATORDERINGROWLENGTH   "rowlength"
1072 #define MATORDERINGDSC_ND      "dsc_nd"         /* these three are only for DSCPACK, see its documentation for details */
1073 #define MATORDERINGDSC_MMD     "dsc_mmd"
1074 #define MATORDERINGDSC_MDF     "dsc_mdf"
1075 #define MATORDERINGAMD         "amd"            /* only works if UMFPACK is installed with PETSc */
1076 
1077 extern PetscErrorCode  MatGetOrdering(Mat,const MatOrderingType,IS*,IS*);
1078 extern PetscErrorCode  MatGetOrderingList(PetscFList *list);
1079 extern PetscErrorCode  MatOrderingRegister(const char[],const char[],const char[],PetscErrorCode(*)(Mat,const MatOrderingType,IS*,IS*));
1080 
1081 /*MC
1082    MatOrderingRegisterDynamic - Adds a new sparse matrix ordering to the matrix package.
1083 
1084    Synopsis:
1085    PetscErrorCode MatOrderingRegisterDynamic(const char *name_ordering,const char *path,const char *name_create,PetscErrorCode (*routine_create)(MatOrdering))
1086 
1087    Not Collective
1088 
1089    Input Parameters:
1090 +  sname - name of ordering (for example MATORDERINGND)
1091 .  path - location of library where creation routine is
1092 .  name - name of function that creates the ordering type,a string
1093 -  function - function pointer that creates the ordering
1094 
1095    Level: developer
1096 
1097    If dynamic libraries are used, then the fourth input argument (function)
1098    is ignored.
1099 
1100    Sample usage:
1101 .vb
1102    MatOrderingRegisterDynamic("my_order",/home/username/my_lib/lib/libO/solaris/mylib.a,
1103                "MyOrder",MyOrder);
1104 .ve
1105 
1106    Then, your partitioner can be chosen with the procedural interface via
1107 $     MatOrderingSetType(part,"my_order)
1108    or at runtime via the option
1109 $     -pc_factor_mat_ordering_type my_order
1110 
1111    ${PETSC_ARCH} occuring in pathname will be replaced with appropriate values.
1112 
1113 .keywords: matrix, ordering, register
1114 
1115 .seealso: MatOrderingRegisterDestroy(), MatOrderingRegisterAll()
1116 M*/
1117 #if defined(PETSC_USE_DYNAMIC_LIBRARIES)
1118 #define MatOrderingRegisterDynamic(a,b,c,d) MatOrderingRegister(a,b,c,0)
1119 #else
1120 #define MatOrderingRegisterDynamic(a,b,c,d) MatOrderingRegister(a,b,c,d)
1121 #endif
1122 
1123 extern PetscErrorCode  MatOrderingRegisterDestroy(void);
1124 extern PetscErrorCode  MatOrderingRegisterAll(const char[]);
1125 extern PetscBool  MatOrderingRegisterAllCalled;
1126 extern PetscFList MatOrderingList;
1127 
1128 extern PetscErrorCode  MatReorderForNonzeroDiagonal(Mat,PetscReal,IS,IS);
1129 
1130 /*S
1131     MatFactorShiftType - Numeric Shift.
1132 
1133    Level: beginner
1134 
1135 S*/
1136 typedef enum {MAT_SHIFT_NONE,MAT_SHIFT_NONZERO,MAT_SHIFT_POSITIVE_DEFINITE,MAT_SHIFT_INBLOCKS} MatFactorShiftType;
1137 extern const char *MatFactorShiftTypes[];
1138 
1139 /*S
1140    MatFactorInfo - Data passed into the matrix factorization routines
1141 
1142    In Fortran these are simply double precision arrays of size MAT_FACTORINFO_SIZE, that is use
1143 $     MatFactorInfo  info(MAT_FACTORINFO_SIZE)
1144 
1145    Notes: These are not usually directly used by users, instead use PC type of LU, ILU, CHOLESKY or ICC.
1146 
1147       You can use MatFactorInfoInitialize() to set default values.
1148 
1149    Level: developer
1150 
1151 .seealso: MatLUFactorSymbolic(), MatILUFactorSymbolic(), MatCholeskyFactorSymbolic(), MatICCFactorSymbolic(), MatICCFactor(),
1152           MatFactorInfoInitialize()
1153 
1154 S*/
1155 typedef struct {
1156   PetscReal     diagonal_fill;  /* force diagonal to fill in if initially not filled */
1157   PetscReal     usedt;
1158   PetscReal     dt;             /* drop tolerance */
1159   PetscReal     dtcol;          /* tolerance for pivoting */
1160   PetscReal     dtcount;        /* maximum nonzeros to be allowed per row */
1161   PetscReal     fill;           /* expected fill, nonzeros in factored matrix/nonzeros in original matrix */
1162   PetscReal     levels;         /* ICC/ILU(levels) */
1163   PetscReal     pivotinblocks;  /* for BAIJ and SBAIJ matrices pivot in factorization on blocks, default 1.0
1164                                    factorization may be faster if do not pivot */
1165   PetscReal     zeropivot;      /* pivot is called zero if less than this */
1166   PetscReal     shifttype;      /* type of shift added to matrix factor to prevent zero pivots */
1167   PetscReal     shiftamount;     /* how large the shift is */
1168 } MatFactorInfo;
1169 
1170 extern PetscErrorCode  MatFactorInfoInitialize(MatFactorInfo*);
1171 extern PetscErrorCode  MatCholeskyFactor(Mat,IS,const MatFactorInfo*);
1172 extern PetscErrorCode  MatCholeskyFactorSymbolic(Mat,Mat,IS,const MatFactorInfo*);
1173 extern PetscErrorCode  MatCholeskyFactorNumeric(Mat,Mat,const MatFactorInfo*);
1174 extern PetscErrorCode  MatLUFactor(Mat,IS,IS,const MatFactorInfo*);
1175 extern PetscErrorCode  MatILUFactor(Mat,IS,IS,const MatFactorInfo*);
1176 extern PetscErrorCode  MatLUFactorSymbolic(Mat,Mat,IS,IS,const MatFactorInfo*);
1177 extern PetscErrorCode  MatILUFactorSymbolic(Mat,Mat,IS,IS,const MatFactorInfo*);
1178 extern PetscErrorCode  MatICCFactorSymbolic(Mat,Mat,IS,const MatFactorInfo*);
1179 extern PetscErrorCode  MatICCFactor(Mat,IS,const MatFactorInfo*);
1180 extern PetscErrorCode  MatLUFactorNumeric(Mat,Mat,const MatFactorInfo*);
1181 extern PetscErrorCode  MatGetInertia(Mat,PetscInt*,PetscInt*,PetscInt*);
1182 extern PetscErrorCode  MatSolve(Mat,Vec,Vec);
1183 extern PetscErrorCode  MatForwardSolve(Mat,Vec,Vec);
1184 extern PetscErrorCode  MatBackwardSolve(Mat,Vec,Vec);
1185 extern PetscErrorCode  MatSolveAdd(Mat,Vec,Vec,Vec);
1186 extern PetscErrorCode  MatSolveTranspose(Mat,Vec,Vec);
1187 extern PetscErrorCode  MatSolveTransposeAdd(Mat,Vec,Vec,Vec);
1188 extern PetscErrorCode  MatSolves(Mat,Vecs,Vecs);
1189 
1190 extern PetscErrorCode  MatSetUnfactored(Mat);
1191 
1192 /*E
1193     MatSORType - What type of (S)SOR to perform
1194 
1195     Level: beginner
1196 
1197    May be bitwise ORd together
1198 
1199    Any additions/changes here MUST also be made in include/finclude/petscmat.h
1200 
1201    MatSORType may be bitwise ORd together, so do not change the numbers
1202 
1203 .seealso: MatSOR()
1204 E*/
1205 typedef enum {SOR_FORWARD_SWEEP=1,SOR_BACKWARD_SWEEP=2,SOR_SYMMETRIC_SWEEP=3,
1206               SOR_LOCAL_FORWARD_SWEEP=4,SOR_LOCAL_BACKWARD_SWEEP=8,
1207               SOR_LOCAL_SYMMETRIC_SWEEP=12,SOR_ZERO_INITIAL_GUESS=16,
1208               SOR_EISENSTAT=32,SOR_APPLY_UPPER=64,SOR_APPLY_LOWER=128} MatSORType;
1209 extern PetscErrorCode  MatSOR(Mat,Vec,PetscReal,MatSORType,PetscReal,PetscInt,PetscInt,Vec);
1210 
1211 /*
1212     These routines are for efficiently computing Jacobians via finite differences.
1213 */
1214 
1215 /*E
1216     MatColoringType - String with the name of a PETSc matrix coloring or the creation function
1217        with an optional dynamic library name, for example
1218        http://www.mcs.anl.gov/petsc/lib.a:coloringcreate()
1219 
1220    Level: beginner
1221 
1222 .seealso: MatGetColoring()
1223 E*/
1224 #define MatColoringType char*
1225 #define MATCOLORINGNATURAL "natural"
1226 #define MATCOLORINGSL      "sl"
1227 #define MATCOLORINGLF      "lf"
1228 #define MATCOLORINGID      "id"
1229 
1230 extern PetscErrorCode  MatGetColoring(Mat,const MatColoringType,ISColoring*);
1231 extern PetscErrorCode  MatColoringRegister(const char[],const char[],const char[],PetscErrorCode(*)(Mat,MatColoringType,ISColoring *));
1232 
1233 /*MC
1234    MatColoringRegisterDynamic - Adds a new sparse matrix coloring to the
1235                                matrix package.
1236 
1237    Synopsis:
1238    PetscErrorCode MatColoringRegisterDynamic(const char *name_coloring,const char *path,const char *name_create,PetscErrorCode (*routine_create)(MatColoring))
1239 
1240    Not Collective
1241 
1242    Input Parameters:
1243 +  sname - name of Coloring (for example MATCOLORINGSL)
1244 .  path - location of library where creation routine is
1245 .  name - name of function that creates the Coloring type, a string
1246 -  function - function pointer that creates the coloring
1247 
1248    Level: developer
1249 
1250    If dynamic libraries are used, then the fourth input argument (function)
1251    is ignored.
1252 
1253    Sample usage:
1254 .vb
1255    MatColoringRegisterDynamic("my_color",/home/username/my_lib/lib/libO/solaris/mylib.a,
1256                "MyColor",MyColor);
1257 .ve
1258 
1259    Then, your partitioner can be chosen with the procedural interface via
1260 $     MatColoringSetType(part,"my_color")
1261    or at runtime via the option
1262 $     -mat_coloring_type my_color
1263 
1264    $PETSC_ARCH occuring in pathname will be replaced with appropriate values.
1265 
1266 .keywords: matrix, Coloring, register
1267 
1268 .seealso: MatColoringRegisterDestroy(), MatColoringRegisterAll()
1269 M*/
1270 #if defined(PETSC_USE_DYNAMIC_LIBRARIES)
1271 #define MatColoringRegisterDynamic(a,b,c,d) MatColoringRegister(a,b,c,0)
1272 #else
1273 #define MatColoringRegisterDynamic(a,b,c,d) MatColoringRegister(a,b,c,d)
1274 #endif
1275 
1276 extern PetscBool  MatColoringRegisterAllCalled;
1277 
1278 extern PetscErrorCode  MatColoringRegisterAll(const char[]);
1279 extern PetscErrorCode  MatColoringRegisterDestroy(void);
1280 extern PetscErrorCode  MatColoringPatch(Mat,PetscInt,PetscInt,ISColoringValue[],ISColoring*);
1281 
1282 /*S
1283      MatFDColoring - Object for computing a sparse Jacobian via finite differences
1284         and coloring
1285 
1286    Level: beginner
1287 
1288   Concepts: coloring, sparse Jacobian, finite differences
1289 
1290 .seealso:  MatFDColoringCreate()
1291 S*/
1292 typedef struct _p_MatFDColoring* MatFDColoring;
1293 
1294 extern PetscErrorCode  MatFDColoringCreate(Mat,ISColoring,MatFDColoring *);
1295 extern PetscErrorCode  MatFDColoringDestroy(MatFDColoring*);
1296 extern PetscErrorCode  MatFDColoringView(MatFDColoring,PetscViewer);
1297 extern PetscErrorCode  MatFDColoringSetFunction(MatFDColoring,PetscErrorCode (*)(void),void*);
1298 extern PetscErrorCode  MatFDColoringGetFunction(MatFDColoring,PetscErrorCode (**)(void),void**);
1299 extern PetscErrorCode  MatFDColoringSetParameters(MatFDColoring,PetscReal,PetscReal);
1300 extern PetscErrorCode  MatFDColoringSetFromOptions(MatFDColoring);
1301 extern PetscErrorCode  MatFDColoringApply(Mat,MatFDColoring,Vec,MatStructure*,void *);
1302 extern PetscErrorCode  MatFDColoringApplyTS(Mat,MatFDColoring,PetscReal,Vec,MatStructure*,void *);
1303 extern PetscErrorCode  MatFDColoringSetF(MatFDColoring,Vec);
1304 extern PetscErrorCode  MatFDColoringGetPerturbedColumns(MatFDColoring,PetscInt*,PetscInt*[]);
1305 /*
1306     These routines are for partitioning matrices: currently used only
1307   for adjacency matrix, MatCreateMPIAdj().
1308 */
1309 
1310 /*S
1311      MatPartitioning - Object for managing the partitioning of a matrix or graph
1312 
1313    Level: beginner
1314 
1315   Concepts: partitioning
1316 
1317 .seealso:  MatPartitioningCreate(), MatPartitioningType
1318 S*/
1319 typedef struct _p_MatPartitioning* MatPartitioning;
1320 
1321 /*E
1322     MatPartitioningType - String with the name of a PETSc matrix partitioning or the creation function
1323        with an optional dynamic library name, for example
1324        http://www.mcs.anl.gov/petsc/lib.a:partitioningcreate()
1325 
1326    Level: beginner
1327 
1328 .seealso: MatPartitioningCreate(), MatPartitioning
1329 E*/
1330 #define MatPartitioningType char*
1331 #define MATPARTITIONINGCURRENT  "current"
1332 #define MATPARTITIONINGSQUARE   "square"
1333 #define MATPARTITIONINGPARMETIS "parmetis"
1334 #define MATPARTITIONINGCHACO    "chaco"
1335 #define MATPARTITIONINGJOSTLE   "jostle"
1336 #define MATPARTITIONINGPARTY    "party"
1337 #define MATPARTITIONINGSCOTCH   "scotch"
1338 
1339 
1340 extern PetscErrorCode  MatPartitioningCreate(MPI_Comm,MatPartitioning*);
1341 extern PetscErrorCode  MatPartitioningSetType(MatPartitioning,const MatPartitioningType);
1342 extern PetscErrorCode  MatPartitioningSetNParts(MatPartitioning,PetscInt);
1343 extern PetscErrorCode  MatPartitioningSetAdjacency(MatPartitioning,Mat);
1344 extern PetscErrorCode  MatPartitioningSetVertexWeights(MatPartitioning,const PetscInt[]);
1345 extern PetscErrorCode  MatPartitioningSetPartitionWeights(MatPartitioning,const PetscReal []);
1346 extern PetscErrorCode  MatPartitioningApply(MatPartitioning,IS*);
1347 extern PetscErrorCode  MatPartitioningDestroy(MatPartitioning*);
1348 
1349 extern PetscErrorCode  MatPartitioningRegister(const char[],const char[],const char[],PetscErrorCode (*)(MatPartitioning));
1350 
1351 /*MC
1352    MatPartitioningRegisterDynamic - Adds a new sparse matrix partitioning to the
1353    matrix package.
1354 
1355    Synopsis:
1356    PetscErrorCode MatPartitioningRegisterDynamic(const char *name_partitioning,const char *path,const char *name_create,PetscErrorCode (*routine_create)(MatPartitioning))
1357 
1358    Not Collective
1359 
1360    Input Parameters:
1361 +  sname - name of partitioning (for example MATPARTITIONINGCURRENT) or parmetis
1362 .  path - location of library where creation routine is
1363 .  name - name of function that creates the partitioning type, a string
1364 -  function - function pointer that creates the partitioning type
1365 
1366    Level: developer
1367 
1368    If dynamic libraries are used, then the fourth input argument (function)
1369    is ignored.
1370 
1371    Sample usage:
1372 .vb
1373    MatPartitioningRegisterDynamic("my_part",/home/username/my_lib/lib/libO/solaris/mylib.a,
1374                "MyPartCreate",MyPartCreate);
1375 .ve
1376 
1377    Then, your partitioner can be chosen with the procedural interface via
1378 $     MatPartitioningSetType(part,"my_part")
1379    or at runtime via the option
1380 $     -mat_partitioning_type my_part
1381 
1382    $PETSC_ARCH occuring in pathname will be replaced with appropriate values.
1383 
1384 .keywords: matrix, partitioning, register
1385 
1386 .seealso: MatPartitioningRegisterDestroy(), MatPartitioningRegisterAll()
1387 M*/
1388 #if defined(PETSC_USE_DYNAMIC_LIBRARIES)
1389 #define MatPartitioningRegisterDynamic(a,b,c,d) MatPartitioningRegister(a,b,c,0)
1390 #else
1391 #define MatPartitioningRegisterDynamic(a,b,c,d) MatPartitioningRegister(a,b,c,d)
1392 #endif
1393 
1394 extern PetscBool  MatPartitioningRegisterAllCalled;
1395 
1396 extern PetscErrorCode  MatPartitioningRegisterAll(const char[]);
1397 extern PetscErrorCode  MatPartitioningRegisterDestroy(void);
1398 
1399 extern PetscErrorCode  MatPartitioningView(MatPartitioning,PetscViewer);
1400 extern PetscErrorCode  MatPartitioningSetFromOptions(MatPartitioning);
1401 extern PetscErrorCode  MatPartitioningGetType(MatPartitioning,const MatPartitioningType*);
1402 
1403 extern PetscErrorCode  MatPartitioningParmetisSetCoarseSequential(MatPartitioning);
1404 extern PetscErrorCode  MatPartitioningParmetisGetEdgeCut(MatPartitioning, PetscInt *);
1405 
1406 extern PetscErrorCode  MatPartitioningJostleSetCoarseLevel(MatPartitioning,PetscReal);
1407 extern PetscErrorCode  MatPartitioningJostleSetCoarseSequential(MatPartitioning);
1408 
1409 typedef enum {MP_CHACO_MULTILEVEL_KL,MP_CHACO_SPECTRAL,MP_CHACO_LINEAR,MP_CHACO_RANDOM, MP_CHACO_SCATTERED} MPChacoGlobalType;
1410 extern PetscErrorCode  MatPartitioningChacoSetGlobal(MatPartitioning, MPChacoGlobalType);
1411 typedef enum { MP_CHACO_KERNIGHAN_LIN, MP_CHACO_NONE } MPChacoLocalType;
1412 extern PetscErrorCode  MatPartitioningChacoSetLocal(MatPartitioning, MPChacoLocalType);
1413 extern PetscErrorCode  MatPartitioningChacoSetCoarseLevel(MatPartitioning,PetscReal);
1414 typedef enum { MP_CHACO_LANCZOS, MP_CHACO_RQI_SYMMLQ } MPChacoEigenType;
1415 extern PetscErrorCode  MatPartitioningChacoSetEigenSolver(MatPartitioning,MPChacoEigenType);
1416 extern PetscErrorCode  MatPartitioningChacoSetEigenTol(MatPartitioning, PetscReal);
1417 extern PetscErrorCode  MatPartitioningChacoSetEigenNumber(MatPartitioning, PetscInt);
1418 
1419 #define MP_PARTY_OPT "opt"
1420 #define MP_PARTY_LIN "lin"
1421 #define MP_PARTY_SCA "sca"
1422 #define MP_PARTY_RAN "ran"
1423 #define MP_PARTY_GBF "gbf"
1424 #define MP_PARTY_GCF "gcf"
1425 #define MP_PARTY_BUB "bub"
1426 #define MP_PARTY_DEF "def"
1427 extern PetscErrorCode  MatPartitioningPartySetGlobal(MatPartitioning, const char*);
1428 #define MP_PARTY_HELPFUL_SETS "hs"
1429 #define MP_PARTY_KERNIGHAN_LIN "kl"
1430 #define MP_PARTY_NONE "no"
1431 extern PetscErrorCode  MatPartitioningPartySetLocal(MatPartitioning, const char*);
1432 extern PetscErrorCode  MatPartitioningPartySetCoarseLevel(MatPartitioning,PetscReal);
1433 extern PetscErrorCode  MatPartitioningPartySetBipart(MatPartitioning,PetscBool );
1434 extern PetscErrorCode  MatPartitioningPartySetMatchOptimization(MatPartitioning,PetscBool );
1435 
1436 typedef enum { MP_SCOTCH_GREEDY, MP_SCOTCH_GPS, MP_SCOTCH_GR_GPS } MPScotchGlobalType;
1437 extern PetscErrorCode  MatPartitioningScotchSetArch(MatPartitioning,const char*);
1438 extern PetscErrorCode  MatPartitioningScotchSetMultilevel(MatPartitioning);
1439 extern PetscErrorCode  MatPartitioningScotchSetGlobal(MatPartitioning,MPScotchGlobalType);
1440 extern PetscErrorCode  MatPartitioningScotchSetCoarseLevel(MatPartitioning,PetscReal);
1441 extern PetscErrorCode  MatPartitioningScotchSetHostList(MatPartitioning,const char*);
1442 typedef enum { MP_SCOTCH_KERNIGHAN_LIN, MP_SCOTCH_NONE } MPScotchLocalType;
1443 extern PetscErrorCode  MatPartitioningScotchSetLocal(MatPartitioning,MPScotchLocalType);
1444 extern PetscErrorCode  MatPartitioningScotchSetMapping(MatPartitioning);
1445 extern PetscErrorCode  MatPartitioningScotchSetStrategy(MatPartitioning,char*);
1446 
1447 extern PetscErrorCode MatMeshToVertexGraph(Mat,PetscInt,Mat*);
1448 extern PetscErrorCode MatMeshToCellGraph(Mat,PetscInt,Mat*);
1449 
1450 /*
1451     If you add entries here you must also add them to finclude/petscmat.h
1452 */
1453 typedef enum { MATOP_SET_VALUES=0,
1454                MATOP_GET_ROW=1,
1455                MATOP_RESTORE_ROW=2,
1456                MATOP_MULT=3,
1457                MATOP_MULT_ADD=4,
1458                MATOP_MULT_TRANSPOSE=5,
1459                MATOP_MULT_TRANSPOSE_ADD=6,
1460                MATOP_SOLVE=7,
1461                MATOP_SOLVE_ADD=8,
1462                MATOP_SOLVE_TRANSPOSE=9,
1463                MATOP_SOLVE_TRANSPOSE_ADD=10,
1464                MATOP_LUFACTOR=11,
1465                MATOP_CHOLESKYFACTOR=12,
1466                MATOP_SOR=13,
1467                MATOP_TRANSPOSE=14,
1468                MATOP_GETINFO=15,
1469                MATOP_EQUAL=16,
1470                MATOP_GET_DIAGONAL=17,
1471                MATOP_DIAGONAL_SCALE=18,
1472                MATOP_NORM=19,
1473                MATOP_ASSEMBLY_BEGIN=20,
1474                MATOP_ASSEMBLY_END=21,
1475                MATOP_SET_OPTION=22,
1476                MATOP_ZERO_ENTRIES=23,
1477                MATOP_ZERO_ROWS=24,
1478                MATOP_LUFACTOR_SYMBOLIC=25,
1479                MATOP_LUFACTOR_NUMERIC=26,
1480                MATOP_CHOLESKY_FACTOR_SYMBOLIC=27,
1481                MATOP_CHOLESKY_FACTOR_NUMERIC=28,
1482                MATOP_SETUP_PREALLOCATION=29,
1483                MATOP_ILUFACTOR_SYMBOLIC=30,
1484                MATOP_ICCFACTOR_SYMBOLIC=31,
1485                MATOP_GET_ARRAY=32,
1486                MATOP_RESTORE_ARRAY=33,
1487                MATOP_DUPLICATE=34,
1488                MATOP_FORWARD_SOLVE=35,
1489                MATOP_BACKWARD_SOLVE=36,
1490                MATOP_ILUFACTOR=37,
1491                MATOP_ICCFACTOR=38,
1492                MATOP_AXPY=39,
1493                MATOP_GET_SUBMATRICES=40,
1494                MATOP_INCREASE_OVERLAP=41,
1495                MATOP_GET_VALUES=42,
1496                MATOP_COPY=43,
1497                MATOP_GET_ROW_MAX=44,
1498                MATOP_SCALE=45,
1499                MATOP_SHIFT=46,
1500                MATOP_DIAGONAL_SET=47,
1501                MATOP_ILUDT_FACTOR=48,
1502                MATOP_SET_BLOCK_SIZE=49,
1503                MATOP_GET_ROW_IJ=50,
1504                MATOP_RESTORE_ROW_IJ=51,
1505                MATOP_GET_COLUMN_IJ=52,
1506                MATOP_RESTORE_COLUMN_IJ=53,
1507                MATOP_FDCOLORING_CREATE=54,
1508                MATOP_COLORING_PATCH=55,
1509                MATOP_SET_UNFACTORED=56,
1510                MATOP_PERMUTE=57,
1511                MATOP_SET_VALUES_BLOCKED=58,
1512                MATOP_GET_SUBMATRIX=59,
1513                MATOP_DESTROY=60,
1514                MATOP_VIEW=61,
1515                MATOP_CONVERT_FROM=62,
1516                MATOP_USE_SCALED_FORM=63,
1517                MATOP_SCALE_SYSTEM=64,
1518                MATOP_UNSCALE_SYSTEM=65,
1519                MATOP_SET_LOCAL_TO_GLOBAL_MAP=66,
1520                MATOP_SET_VALUES_LOCAL=67,
1521                MATOP_ZERO_ROWS_LOCAL=68,
1522                MATOP_GET_ROW_MAX_ABS=69,
1523                MATOP_GET_ROW_MIN_ABS=70,
1524                MATOP_CONVERT=71,
1525                MATOP_SET_COLORING=72,
1526                MATOP_SET_VALUES_ADIC=73,
1527                MATOP_SET_VALUES_ADIFOR=74,
1528                MATOP_FD_COLORING_APPLY=75,
1529                MATOP_SET_FROM_OPTIONS=76,
1530                MATOP_MULT_CON=77,
1531                MATOP_MULT_TRANSPOSE_CON=78,
1532                MATOP_PERMUTE_SPARSIFY=79,
1533                MATOP_MULT_MULTIPLE=80,
1534                MATOP_SOLVE_MULTIPLE=81,
1535                MATOP_GET_INERTIA=82,
1536                MATOP_LOAD=83,
1537                MATOP_IS_SYMMETRIC=84,
1538                MATOP_IS_HERMITIAN=85,
1539                MATOP_IS_STRUCTURALLY_SYMMETRIC=86,
1540                MATOP_DUMMY=87,
1541                MATOP_GET_VECS=88,
1542                MATOP_MAT_MULT=89,
1543                MATOP_MAT_MULT_SYMBOLIC=90,
1544                MATOP_MAT_MULT_NUMERIC=91,
1545                MATOP_PTAP=92,
1546                MATOP_PTAP_SYMBOLIC=93,
1547                MATOP_PTAP_NUMERIC=94,
1548                MATOP_MAT_MULTTRANSPOSE=95,
1549                MATOP_MAT_MULTTRANSPOSE_SYM=96,
1550                MATOP_MAT_MULTTRANSPOSE_NUM=97,
1551                MATOP_PTAP_SYMBOLIC_SEQAIJ=98,
1552                MATOP_PTAP_NUMERIC_SEQAIJ=99,
1553                MATOP_PTAP_SYMBOLIC_MPIAIJ=100,
1554                MATOP_PTAP_NUMERIC_MPIAIJ=101,
1555                MATOP_CONJUGATE=102,
1556                MATOP_SET_SIZES=103,
1557                MATOP_SET_VALUES_ROW=104,
1558                MATOP_REAL_PART=105,
1559                MATOP_IMAG_PART=106,
1560                MATOP_GET_ROW_UTRIANGULAR=107,
1561                MATOP_RESTORE_ROW_UTRIANGULAR=108,
1562                MATOP_MATSOLVE=109,
1563                MATOP_GET_REDUNDANTMATRIX=110,
1564                MATOP_GET_ROW_MIN=111,
1565                MATOP_GET_COLUMN_VEC=112,
1566                MATOP_MISSING_DIAGONAL=113,
1567                MATOP_MATGETSEQNONZEROSTRUCTURE=114,
1568                MATOP_CREATE=115,
1569                MATOP_GET_GHOSTS=116,
1570                MATOP_GET_LOCALSUBMATRIX=117,
1571                MATOP_RESTORE_LOCALSUBMATRIX=118,
1572                MATOP_MULT_DIAGONAL_BLOCK=119,
1573                MATOP_HERMITIANTRANSPOSE=120,
1574                MATOP_MULTHERMITIANTRANSPOSE=121,
1575                MATOP_MULTHERMITIANTRANSPOSEADD=122,
1576                MATOP_GETMULTIPROCBLOCK=123,
1577                MATOP_GETCOLUMNNORMS=125,
1578 	       MATOP_GET_SUBMATRICES_PARALLEL=128
1579              } MatOperation;
1580 extern PetscErrorCode  MatHasOperation(Mat,MatOperation,PetscBool *);
1581 extern PetscErrorCode  MatShellSetOperation(Mat,MatOperation,void(*)(void));
1582 extern PetscErrorCode  MatShellGetOperation(Mat,MatOperation,void(**)(void));
1583 extern PetscErrorCode  MatShellSetContext(Mat,void*);
1584 
1585 /*
1586    Codes for matrices stored on disk. By default they are
1587    stored in a universal format. By changing the format with
1588    PetscViewerSetFormat(viewer,PETSC_VIEWER_NATIVE); the matrices will
1589    be stored in a way natural for the matrix, for example dense matrices
1590    would be stored as dense. Matrices stored this way may only be
1591    read into matrices of the same type.
1592 */
1593 #define MATRIX_BINARY_FORMAT_DENSE -1
1594 
1595 extern PetscErrorCode  MatMPIBAIJSetHashTableFactor(Mat,PetscReal);
1596 extern PetscErrorCode  MatISGetLocalMat(Mat,Mat*);
1597 
1598 /*S
1599      MatNullSpace - Object that removes a null space from a vector, i.e.
1600          orthogonalizes the vector to a subsapce
1601 
1602    Level: advanced
1603 
1604   Concepts: matrix; linear operator, null space
1605 
1606   Users manual sections:
1607 .   sec_singular
1608 
1609 .seealso:  MatNullSpaceCreate()
1610 S*/
1611 typedef struct _p_MatNullSpace* MatNullSpace;
1612 
1613 extern PetscErrorCode  MatNullSpaceCreate(MPI_Comm,PetscBool ,PetscInt,const Vec[],MatNullSpace*);
1614 extern PetscErrorCode  MatNullSpaceSetFunction(MatNullSpace,PetscErrorCode (*)(MatNullSpace,Vec,void*),void*);
1615 extern PetscErrorCode  MatNullSpaceDestroy(MatNullSpace*);
1616 extern PetscErrorCode  MatNullSpaceRemove(MatNullSpace,Vec,Vec*);
1617 extern PetscErrorCode  MatNullSpaceAttach(Mat,MatNullSpace);
1618 extern PetscErrorCode  MatNullSpaceTest(MatNullSpace,Mat,PetscBool  *);
1619 extern PetscErrorCode  MatNullSpaceView(MatNullSpace,PetscViewer);
1620 
1621 extern PetscErrorCode  MatReorderingSeqSBAIJ(Mat,IS);
1622 extern PetscErrorCode  MatMPISBAIJSetHashTableFactor(Mat,PetscReal);
1623 extern PetscErrorCode  MatSeqSBAIJSetColumnIndices(Mat,PetscInt *);
1624 extern PetscErrorCode  MatSeqBAIJInvertBlockDiagonal(Mat);
1625 
1626 extern PetscErrorCode  MatCreateMAIJ(Mat,PetscInt,Mat*);
1627 extern PetscErrorCode  MatMAIJRedimension(Mat,PetscInt,Mat*);
1628 extern PetscErrorCode  MatMAIJGetAIJ(Mat,Mat*);
1629 
1630 extern PetscErrorCode  MatComputeExplicitOperator(Mat,Mat*);
1631 
1632 extern PetscErrorCode  MatDiagonalScaleLocal(Mat,Vec);
1633 
1634 extern PetscErrorCode  MatCreateMFFD(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,Mat*);
1635 extern PetscErrorCode  MatMFFDSetBase(Mat,Vec,Vec);
1636 extern PetscErrorCode  MatMFFDSetFunction(Mat,PetscErrorCode(*)(void*,Vec,Vec),void*);
1637 extern PetscErrorCode  MatMFFDSetFunctioni(Mat,PetscErrorCode (*)(void*,PetscInt,Vec,PetscScalar*));
1638 extern PetscErrorCode  MatMFFDSetFunctioniBase(Mat,PetscErrorCode (*)(void*,Vec));
1639 extern PetscErrorCode  MatMFFDAddNullSpace(Mat,MatNullSpace);
1640 extern PetscErrorCode  MatMFFDSetHHistory(Mat,PetscScalar[],PetscInt);
1641 extern PetscErrorCode  MatMFFDResetHHistory(Mat);
1642 extern PetscErrorCode  MatMFFDSetFunctionError(Mat,PetscReal);
1643 extern PetscErrorCode  MatMFFDSetPeriod(Mat,PetscInt);
1644 extern PetscErrorCode  MatMFFDGetH(Mat,PetscScalar *);
1645 extern PetscErrorCode  MatMFFDSetOptionsPrefix(Mat,const char[]);
1646 extern PetscErrorCode  MatMFFDCheckPositivity(void*,Vec,Vec,PetscScalar*);
1647 extern PetscErrorCode  MatMFFDSetCheckh(Mat,PetscErrorCode (*)(void*,Vec,Vec,PetscScalar*),void*);
1648 
1649 /*S
1650     MatMFFD - A data structured used to manage the computation of the h differencing parameter for matrix-free
1651               Jacobian vector products
1652 
1653     Notes: MATMFFD is a specific MatType which uses the MatMFFD data structure
1654 
1655            MatMFFD*() methods actually take the Mat as their first argument. Not a MatMFFD data structure
1656 
1657     Level: developer
1658 
1659 .seealso: MATMFFD, MatCreateMFFD(), MatMFFDSetFuction(), MatMFFDSetType(), MatMFFDRegister()
1660 S*/
1661 typedef struct _p_MatMFFD* MatMFFD;
1662 
1663 /*E
1664     MatMFFDType - algorithm used to compute the h used in computing matrix-vector products via differencing of the function
1665 
1666    Level: beginner
1667 
1668 .seealso: MatMFFDSetType(), MatMFFDRegister()
1669 E*/
1670 #define MatMFFDType char*
1671 #define MATMFFD_DS  "ds"
1672 #define MATMFFD_WP  "wp"
1673 
1674 extern PetscErrorCode  MatMFFDSetType(Mat,const MatMFFDType);
1675 extern PetscErrorCode  MatMFFDRegister(const char[],const char[],const char[],PetscErrorCode (*)(MatMFFD));
1676 
1677 /*MC
1678    MatMFFDRegisterDynamic - Adds a method to the MatMFFD registry.
1679 
1680    Synopsis:
1681    PetscErrorCode MatMFFDRegisterDynamic(const char *name_solver,const char *path,const char *name_create,PetscErrorCode (*routine_create)(MatMFFD))
1682 
1683    Not Collective
1684 
1685    Input Parameters:
1686 +  name_solver - name of a new user-defined compute-h module
1687 .  path - path (either absolute or relative) the library containing this solver
1688 .  name_create - name of routine to create method context
1689 -  routine_create - routine to create method context
1690 
1691    Level: developer
1692 
1693    Notes:
1694    MatMFFDRegisterDynamic() may be called multiple times to add several user-defined solvers.
1695 
1696    If dynamic libraries are used, then the fourth input argument (routine_create)
1697    is ignored.
1698 
1699    Sample usage:
1700 .vb
1701    MatMFFDRegisterDynamic("my_h",/home/username/my_lib/lib/libO/solaris/mylib.a,
1702                "MyHCreate",MyHCreate);
1703 .ve
1704 
1705    Then, your solver can be chosen with the procedural interface via
1706 $     MatMFFDSetType(mfctx,"my_h")
1707    or at runtime via the option
1708 $     -snes_mf_type my_h
1709 
1710 .keywords: MatMFFD, register
1711 
1712 .seealso: MatMFFDRegisterAll(), MatMFFDRegisterDestroy()
1713 M*/
1714 #if defined(PETSC_USE_DYNAMIC_LIBRARIES)
1715 #define MatMFFDRegisterDynamic(a,b,c,d) MatMFFDRegister(a,b,c,0)
1716 #else
1717 #define MatMFFDRegisterDynamic(a,b,c,d) MatMFFDRegister(a,b,c,d)
1718 #endif
1719 
1720 extern PetscErrorCode  MatMFFDRegisterAll(const char[]);
1721 extern PetscErrorCode  MatMFFDRegisterDestroy(void);
1722 extern PetscErrorCode  MatMFFDDSSetUmin(Mat,PetscReal);
1723 extern PetscErrorCode  MatMFFDWPSetComputeNormU(Mat,PetscBool );
1724 
1725 
1726 extern PetscErrorCode  PetscViewerMathematicaPutMatrix(PetscViewer, PetscInt, PetscInt, PetscReal *);
1727 extern PetscErrorCode  PetscViewerMathematicaPutCSRMatrix(PetscViewer, PetscInt, PetscInt, PetscInt *, PetscInt *, PetscReal *);
1728 
1729 /*
1730    PETSc interface to MUMPS
1731 */
1732 #ifdef PETSC_HAVE_MUMPS
1733 extern PetscErrorCode  MatMumpsSetIcntl(Mat,PetscInt,PetscInt);
1734 #endif
1735 
1736 /*
1737    PETSc interface to SUPERLU
1738 */
1739 #ifdef PETSC_HAVE_SUPERLU
1740 extern PetscErrorCode  MatSuperluSetILUDropTol(Mat,PetscReal);
1741 #endif
1742 
1743 #if defined(PETSC_HAVE_CUSP)
1744 extern PetscErrorCode  MatCreateSeqAIJCUSP(MPI_Comm,PetscInt,PetscInt,PetscInt,const PetscInt[],Mat*);
1745 extern PetscErrorCode  MatCreateMPIAIJCUSP(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt,const PetscInt[],PetscInt,const PetscInt[],Mat*);
1746 #endif
1747 
1748 extern PetscErrorCode MatCreateNest(MPI_Comm,PetscInt,const IS[],PetscInt,const IS[],const Mat[],Mat*);
1749 extern PetscErrorCode MatNestGetSize(Mat,PetscInt*,PetscInt*);
1750 extern PetscErrorCode MatNestGetSubMats(Mat,PetscInt*,PetscInt*,Mat***);
1751 extern PetscErrorCode MatNestGetSubMat(Mat,PetscInt,PetscInt,Mat*);
1752 extern PetscErrorCode MatNestSetVecType(Mat,const VecType);
1753 extern PetscErrorCode MatNestSetSubMats(Mat,PetscInt,const IS[],PetscInt,const IS[],const Mat[]);
1754 extern PetscErrorCode MatNestSetSubMat(Mat,PetscInt,PetscInt,Mat);
1755 
1756 PETSC_EXTERN_CXX_END
1757 #endif
1758