xref: /petsc/include/petscmat.h (revision ff218e97a57ed641f3ebc93f697e38ef0f3aa217)
1 /*
2      Include file for the matrix component of PETSc
3 */
4 #ifndef __PETSCMAT_H
5 #define __PETSCMAT_H
6 #include "petscvec.h"
7 PETSC_EXTERN_CXX_BEGIN
8 
9 /*S
10      Mat - Abstract PETSc matrix object
11 
12    Level: beginner
13 
14   Concepts: matrix; linear operator
15 
16 .seealso:  MatCreate(), MatType, MatSetType()
17 S*/
18 typedef struct _p_Mat*           Mat;
19 
20 /*E
21     MatType - String with the name of a PETSc matrix or the creation function
22        with an optional dynamic library name, for example
23        http://www.mcs.anl.gov/petsc/lib.a:mymatcreate()
24 
25    Level: beginner
26 
27 .seealso: MatSetType(), Mat, MatSolverPackage
28 E*/
29 #define MatType char*
30 #define MATSAME            "same"
31 #define MATMAIJ            "maij"
32 #define MATSEQMAIJ         "seqmaij"
33 #define MATMPIMAIJ         "mpimaij"
34 #define MATIS              "is"
35 #define MATAIJ             "aij"
36 #define MATSEQAIJ          "seqaij"
37 #define MATSEQPTHREADAIJ     "seqpthreadaij"
38 #define MATMPIAIJ          "mpiaij"
39 #define MATAIJCRL          "aijcrl"
40 #define MATSEQAIJCRL       "seqaijcrl"
41 #define MATMPIAIJCRL       "mpiaijcrl"
42 #define MATAIJCUSP         "aijcusp"
43 #define MATSEQAIJCUSP      "seqaijcusp"
44 #define MATMPIAIJCUSP      "mpiaijcusp"
45 #define MATAIJPERM         "aijperm"
46 #define MATSEQAIJPERM      "seqaijperm"
47 #define MATMPIAIJPERM      "mpiaijperm"
48 #define MATSHELL           "shell"
49 #define MATDENSE           "dense"
50 #define MATSEQDENSE        "seqdense"
51 #define MATMPIDENSE        "mpidense"
52 #define MATBAIJ            "baij"
53 #define MATSEQBAIJ         "seqbaij"
54 #define MATMPIBAIJ         "mpibaij"
55 #define MATMPIADJ          "mpiadj"
56 #define MATSBAIJ           "sbaij"
57 #define MATSEQSBAIJ        "seqsbaij"
58 #define MATMPISBAIJ        "mpisbaij"
59 
60 #define MATSEQBSTRM        "seqbstrm"
61 #define MATMPIBSTRM        "mpibstrm"
62 #define MATBSTRM           "bstrm"
63 #define MATSEQSBSTRM       "seqsbstrm"
64 #define MATMPISBSTRM       "mpisbstrm"
65 #define MATSBSTRM          "sbstrm"
66 
67 #define MATDAAD            "daad"
68 #define MATMFFD            "mffd"
69 #define MATNORMAL          "normal"
70 #define MATLRC             "lrc"
71 #define MATSCATTER         "scatter"
72 #define MATBLOCKMAT        "blockmat"
73 #define MATCOMPOSITE       "composite"
74 #define MATFFT             "fft"
75 #define MATFFTW            "fftw"
76 #define MATSEQCUFFT        "seqcufft"
77 #define MATTRANSPOSEMAT    "transpose"
78 #define MATSCHURCOMPLEMENT "schurcomplement"
79 #define MATPYTHON          "python"
80 #define MATHYPRESTRUCT     "hyprestruct"
81 #define MATHYPRESSTRUCT    "hypresstruct"
82 #define MATSUBMATRIX       "submatrix"
83 #define MATLOCALREF        "localref"
84 #define MATNEST            "nest"
85 
86 /*E
87     MatSolverPackage - String with the name of a PETSc matrix solver type.
88 
89     For example: "petsc" indicates what PETSc provides, "superlu" indicates either
90        SuperLU or SuperLU_Dist etc.
91 
92 
93    Level: beginner
94 
95 .seealso: MatGetFactor(), Mat, MatSetType(), MatType
96 E*/
97 #define MatSolverPackage char*
98 #define MATSOLVERSPOOLES      "spooles"
99 #define MATSOLVERSUPERLU      "superlu"
100 #define MATSOLVERSUPERLU_DIST "superlu_dist"
101 #define MATSOLVERUMFPACK      "umfpack"
102 #define MATSOLVERCHOLMOD      "cholmod"
103 #define MATSOLVERESSL         "essl"
104 #define MATSOLVERLUSOL        "lusol"
105 #define MATSOLVERMUMPS        "mumps"
106 #define MATSOLVERPASTIX       "pastix"
107 #define MATSOLVERDSCPACK      "dscpack"
108 #define MATSOLVERMATLAB       "matlab"
109 #define MATSOLVERPETSC        "petsc"
110 #define MATSOLVERPLAPACK      "plapack"
111 #define MATSOLVERBAS          "bas"
112 
113 #define MATSOLVERBSTRM        "bstrm"
114 #define MATSOLVERSBSTRM       "sbstrm"
115 
116 /*E
117     MatFactorType - indicates what type of factorization is requested
118 
119     Level: beginner
120 
121    Any additions/changes here MUST also be made in include/finclude/petscmat.h
122 
123 .seealso: MatSolverPackage, MatGetFactor()
124 E*/
125 typedef enum {MAT_FACTOR_NONE, MAT_FACTOR_LU, MAT_FACTOR_CHOLESKY, MAT_FACTOR_ILU, MAT_FACTOR_ICC,MAT_FACTOR_ILUDT} MatFactorType;
126 extern const char *const MatFactorTypes[];
127 
128 extern PetscErrorCode  MatGetFactor(Mat,const MatSolverPackage,MatFactorType,Mat*);
129 extern PetscErrorCode  MatGetFactorAvailable(Mat,const MatSolverPackage,MatFactorType,PetscBool *);
130 extern PetscErrorCode  MatFactorGetSolverPackage(Mat,const MatSolverPackage*);
131 extern PetscErrorCode  MatGetFactorType(Mat,MatFactorType*);
132 
133 /* Logging support */
134 #define    MAT_FILE_CLASSID 1211216    /* used to indicate matrices in binary files */
135 extern PetscClassId  MAT_CLASSID;
136 extern PetscClassId  MAT_FDCOLORING_CLASSID;
137 extern PetscClassId  MAT_PARTITIONING_CLASSID;
138 extern PetscClassId  MAT_NULLSPACE_CLASSID;
139 extern PetscClassId  MATMFFD_CLASSID;
140 
141 /*E
142     MatReuse - Indicates if matrices obtained from a previous call to MatGetSubMatrices()
143      or MatGetSubMatrix() are to be reused to store the new matrix values. For MatConvert() is used to indicate
144      that the input matrix is to be replaced with the converted matrix.
145 
146     Level: beginner
147 
148    Any additions/changes here MUST also be made in include/finclude/petscmat.h
149 
150 .seealso: MatGetSubMatrices(), MatGetSubMatrix(), MatDestroyMatrices(), MatConvert()
151 E*/
152 typedef enum {MAT_INITIAL_MATRIX,MAT_REUSE_MATRIX,MAT_IGNORE_MATRIX} MatReuse;
153 
154 /*E
155     MatGetSubMatrixOption - Indicates if matrices obtained from a call to MatGetSubMatrices()
156      include the matrix values. Currently it is only used by MatGetSeqNonzerostructure().
157 
158     Level: beginner
159 
160 .seealso: MatGetSeqNonzerostructure()
161 E*/
162 typedef enum {MAT_DO_NOT_GET_VALUES,MAT_GET_VALUES} MatGetSubMatrixOption;
163 
164 extern PetscErrorCode  MatInitializePackage(const char[]);
165 
166 extern PetscErrorCode  MatCreate(MPI_Comm,Mat*);
167 PetscPolymorphicFunction(MatCreate,(MPI_Comm comm),(comm,&A),Mat,A)
168 PetscPolymorphicFunction(MatCreate,(),(PETSC_COMM_WORLD,&A),Mat,A)
169 extern PetscErrorCode  MatSetSizes(Mat,PetscInt,PetscInt,PetscInt,PetscInt);
170 extern PetscErrorCode  MatSetType(Mat,const MatType);
171 extern PetscErrorCode  MatSetFromOptions(Mat);
172 extern PetscErrorCode  MatSetUpPreallocation(Mat);
173 extern PetscErrorCode  MatRegisterAll(const char[]);
174 extern PetscErrorCode  MatRegister(const char[],const char[],const char[],PetscErrorCode(*)(Mat));
175 extern PetscErrorCode  MatRegisterBaseName(const char[],const char[],const char[]);
176 extern PetscErrorCode  MatSetOptionsPrefix(Mat,const char[]);
177 extern PetscErrorCode  MatAppendOptionsPrefix(Mat,const char[]);
178 extern PetscErrorCode  MatGetOptionsPrefix(Mat,const char*[]);
179 
180 /*MC
181    MatRegisterDynamic - Adds a new matrix type
182 
183    Synopsis:
184    PetscErrorCode MatRegisterDynamic(const char *name,const char *path,const char *name_create,PetscErrorCode (*routine_create)(Mat))
185 
186    Not Collective
187 
188    Input Parameters:
189 +  name - name of a new user-defined matrix type
190 .  path - path (either absolute or relative) the library containing this solver
191 .  name_create - name of routine to create method context
192 -  routine_create - routine to create method context
193 
194    Notes:
195    MatRegisterDynamic() may be called multiple times to add several user-defined solvers.
196 
197    If dynamic libraries are used, then the fourth input argument (routine_create)
198    is ignored.
199 
200    Sample usage:
201 .vb
202    MatRegisterDynamic("my_mat",/home/username/my_lib/lib/libO/solaris/mylib.a,
203                "MyMatCreate",MyMatCreate);
204 .ve
205 
206    Then, your solver can be chosen with the procedural interface via
207 $     MatSetType(Mat,"my_mat")
208    or at runtime via the option
209 $     -mat_type my_mat
210 
211    Level: advanced
212 
213    Notes: ${PETSC_ARCH} occuring in pathname will be replaced with appropriate values.
214          If your function is not being put into a shared library then use VecRegister() instead
215 
216 .keywords: Mat, register
217 
218 .seealso: MatRegisterAll(), MatRegisterDestroy()
219 
220 M*/
221 #if defined(PETSC_USE_DYNAMIC_LIBRARIES)
222 #define MatRegisterDynamic(a,b,c,d) MatRegister(a,b,c,0)
223 #else
224 #define MatRegisterDynamic(a,b,c,d) MatRegister(a,b,c,d)
225 #endif
226 
227 extern PetscBool  MatRegisterAllCalled;
228 extern PetscFList MatList;
229 extern PetscFList MatColoringList;
230 extern PetscFList MatPartitioningList;
231 
232 /*E
233     MatStructure - Indicates if the matrix has the same nonzero structure
234 
235     Level: beginner
236 
237    Any additions/changes here MUST also be made in include/finclude/petscmat.h
238 
239 .seealso: MatCopy(), KSPSetOperators(), PCSetOperators()
240 E*/
241 typedef enum {DIFFERENT_NONZERO_PATTERN,SUBSET_NONZERO_PATTERN,SAME_NONZERO_PATTERN,SAME_PRECONDITIONER} MatStructure;
242 
243 extern PetscErrorCode  MatCreateSeqDense(MPI_Comm,PetscInt,PetscInt,PetscScalar[],Mat*);
244 extern PetscErrorCode  MatCreateMPIDense(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,PetscScalar[],Mat*);
245 extern PetscErrorCode  MatCreateSeqAIJ(MPI_Comm,PetscInt,PetscInt,PetscInt,const PetscInt[],Mat*);
246 PetscPolymorphicFunction(MatCreateSeqAIJ,(PetscInt m,PetscInt n,PetscInt nz,const PetscInt nnz[]),(PETSC_COMM_SELF,m,n,nz,nnz,&A),Mat,A)
247 PetscPolymorphicFunction(MatCreateSeqAIJ,(PetscInt m,PetscInt n,PetscInt nz),(PETSC_COMM_SELF,m,n,nz,PETSC_NULL,&A),Mat,A)
248 PetscPolymorphicFunction(MatCreateSeqAIJ,(PetscInt m,PetscInt n,const PetscInt nnz[]),(PETSC_COMM_SELF,m,n,0,nnz,&A),Mat,A)
249 PetscPolymorphicFunction(MatCreateSeqAIJ,(PetscInt m,PetscInt n),(PETSC_COMM_SELF,m,n,0,PETSC_NULL,&A),Mat,A)
250 PetscPolymorphicSubroutine(MatCreateSeqAIJ,(PetscInt m,PetscInt n,PetscInt nz,Mat *A),(PETSC_COMM_SELF,m,n,nz,PETSC_NULL,A))
251 PetscPolymorphicSubroutine(MatCreateSeqAIJ,(PetscInt m,PetscInt n,const PetscInt nnz[],Mat *A),(PETSC_COMM_SELF,m,n,0,nnz,A))
252 PetscPolymorphicSubroutine(MatCreateSeqAIJ,(PetscInt m,PetscInt n,Mat *A),(PETSC_COMM_SELF,m,n,0,PETSC_NULL,A))
253 extern PetscErrorCode  MatCreateMPIAIJ(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt,const PetscInt[],PetscInt,const PetscInt[],Mat*);
254 PetscPolymorphicFunction(MatCreateMPIAIJ,(MPI_Comm comm,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,const PetscInt nnz[],PetscInt onz,const PetscInt onnz[]),(comm,m,n,M,N,nz,nnz,onz,onnz,&A),Mat,A)
255 PetscPolymorphicFunction(MatCreateMPIAIJ,(MPI_Comm comm,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,PetscInt nnz),(comm,m,n,M,N,nz,PETSC_NULL,nnz,PETSC_NULL,&A),Mat,A)
256 PetscPolymorphicFunction(MatCreateMPIAIJ,(MPI_Comm comm,PetscInt m,PetscInt n,PetscInt M,PetscInt N,const PetscInt nnz[],const PetscInt onz[]),(comm,m,n,M,N,0,nnz,0,onz,&A),Mat,A)
257 PetscPolymorphicFunction(MatCreateMPIAIJ,(MPI_Comm comm,PetscInt m,PetscInt n,PetscInt M,PetscInt N),(comm,m,n,M,N,0,PETSC_NULL,0,PETSC_NULL,&A),Mat,A)
258 PetscPolymorphicSubroutine(MatCreateMPIAIJ,(MPI_Comm comm,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,PetscInt nnz,Mat *A),(comm,m,n,M,N,nz,PETSC_NULL,nnz,PETSC_NULL,A))
259 PetscPolymorphicSubroutine(MatCreateMPIAIJ,(MPI_Comm comm,PetscInt m,PetscInt n,PetscInt M,PetscInt N,const PetscInt nnz[],const PetscInt onz[],Mat *A),(comm,m,n,M,N,0,nnz,0,onz,A))
260 PetscPolymorphicSubroutine(MatCreateMPIAIJ,(MPI_Comm comm,PetscInt m,PetscInt n,PetscInt M,PetscInt N,Mat *A),(comm,m,n,M,N,0,PETSC_NULL,0,PETSC_NULL,A))
261 PetscPolymorphicFunction(MatCreateMPIAIJ,(PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,const PetscInt nnz[],PetscInt onz,const PetscInt onnz[]),(PETSC_COMM_WORLD,m,n,M,N,nz,nnz,onz,onnz,&A),Mat,A)
262 PetscPolymorphicFunction(MatCreateMPIAIJ,(PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,PetscInt nnz),(PETSC_COMM_WORLD,m,n,M,N,nz,PETSC_NULL,nnz,PETSC_NULL,&A),Mat,A)
263 PetscPolymorphicFunction(MatCreateMPIAIJ,(PetscInt m,PetscInt n,PetscInt M,PetscInt N,const PetscInt nnz[],const PetscInt onz[]),(PETSC_COMM_WORLD,m,n,M,N,0,nnz,0,onz,&A),Mat,A)
264 PetscPolymorphicFunction(MatCreateMPIAIJ,(PetscInt m,PetscInt n,PetscInt M,PetscInt N),(PETSC_COMM_WORLD,m,n,M,N,0,PETSC_NULL,0,PETSC_NULL,&A),Mat,A)
265 PetscPolymorphicSubroutine(MatCreateMPIAIJ,(PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,PetscInt nnz,Mat *A),(PETSC_COMM_WORLD,m,n,M,N,nz,PETSC_NULL,nnz,PETSC_NULL,A))
266 PetscPolymorphicSubroutine(MatCreateMPIAIJ,(PetscInt m,PetscInt n,PetscInt M,PetscInt N,const PetscInt nnz[],const PetscInt onz[],Mat *A),(PETSC_COMM_WORLD,m,n,M,N,0,nnz,0,onz,A))
267 PetscPolymorphicSubroutine(MatCreateMPIAIJ,(PetscInt m,PetscInt n,PetscInt M,PetscInt N,Mat *A),(PETSC_COMM_WORLD,m,n,M,N,0,PETSC_NULL,0,PETSC_NULL,A))
268 extern PetscErrorCode  MatCreateMPIAIJWithArrays(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,const PetscInt[],const PetscInt[],const PetscScalar[],Mat *);
269 extern PetscErrorCode  MatCreateMPIAIJWithSplitArrays(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt[],PetscInt[],PetscScalar[],PetscInt[],PetscInt[],PetscScalar[],Mat*);
270 
271 extern PetscErrorCode  MatCreateSeqBAIJ(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,const PetscInt[],Mat*);
272 PetscPolymorphicFunction(MatCreateSeqBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt nz,const PetscInt nnz[]),(PETSC_COMM_SELF,bs,m,n,nz,nnz,&A),Mat,A)
273 PetscPolymorphicFunction(MatCreateSeqBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt nz),(PETSC_COMM_SELF,bs,m,n,nz,PETSC_NULL,&A),Mat,A)
274 PetscPolymorphicFunction(MatCreateSeqBAIJ,(PetscInt bs,PetscInt m,PetscInt n,const PetscInt nnz[]),(PETSC_COMM_SELF,bs,m,n,0,nnz,&A),Mat,A)
275 PetscPolymorphicFunction(MatCreateSeqBAIJ,(PetscInt bs,PetscInt m,PetscInt n),(PETSC_COMM_SELF,bs,m,n,0,PETSC_NULL,&A),Mat,A)
276 PetscPolymorphicSubroutine(MatCreateSeqBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt nz,Mat *A),(PETSC_COMM_SELF,bs,m,n,nz,PETSC_NULL,A))
277 PetscPolymorphicSubroutine(MatCreateSeqBAIJ,(PetscInt bs,PetscInt m,PetscInt n,const PetscInt nnz[],Mat *A),(PETSC_COMM_SELF,bs,m,n,0,nnz,A))
278 PetscPolymorphicSubroutine(MatCreateSeqBAIJ,(PetscInt bs,PetscInt m,PetscInt n,Mat *A),(PETSC_COMM_SELF,bs,m,n,0,PETSC_NULL,A))
279 extern PetscErrorCode  MatCreateMPIBAIJ(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt,const PetscInt[],PetscInt,const PetscInt[],Mat*);
280 PetscPolymorphicFunction(MatCreateMPIBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,const PetscInt nnz[],PetscInt onz,const PetscInt onnz[]),(comm,bs,m,n,M,N,nz,nnz,onz,onnz,&A),Mat,A)
281 PetscPolymorphicFunction(MatCreateMPIBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,PetscInt nnz),(comm,bs,m,n,M,N,nz,PETSC_NULL,nnz,PETSC_NULL,&A),Mat,A)
282 PetscPolymorphicFunction(MatCreateMPIBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,const PetscInt nnz[],const PetscInt onz[]),(comm,bs,m,n,M,N,0,nnz,0,onz,&A),Mat,A)
283 PetscPolymorphicFunction(MatCreateMPIBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N),(comm,bs,m,n,M,N,0,PETSC_NULL,0,PETSC_NULL,&A),Mat,A)
284 PetscPolymorphicSubroutine(MatCreateMPIBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,PetscInt nnz,Mat *A),(comm,bs,m,n,M,N,nz,PETSC_NULL,nnz,PETSC_NULL,A))
285 PetscPolymorphicSubroutine(MatCreateMPIBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,const PetscInt nnz[],const PetscInt onz[],Mat *A),(comm,bs,m,n,M,N,0,nnz,0,onz,A))
286 PetscPolymorphicSubroutine(MatCreateMPIBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,Mat *A),(comm,bs,m,n,M,N,0,PETSC_NULL,0,PETSC_NULL,A))
287 PetscPolymorphicFunction(MatCreateMPIBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,const PetscInt nnz[],PetscInt onz,const PetscInt onnz[]),(PETSC_COMM_WORLD,bs,m,n,M,N,nz,nnz,onz,onnz,&A),Mat,A)
288 PetscPolymorphicFunction(MatCreateMPIBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,PetscInt nnz),(PETSC_COMM_WORLD,bs,m,n,M,N,nz,PETSC_NULL,nnz,PETSC_NULL,&A),Mat,A)
289 PetscPolymorphicFunction(MatCreateMPIBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,const PetscInt nnz[],const PetscInt onz[]),(PETSC_COMM_WORLD,bs,m,n,M,N,0,nnz,0,onz,&A),Mat,A)
290 PetscPolymorphicFunction(MatCreateMPIBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N),(PETSC_COMM_WORLD,bs,m,n,M,N,0,PETSC_NULL,0,PETSC_NULL,&A),Mat,A)
291 PetscPolymorphicSubroutine(MatCreateMPIBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,PetscInt nnz,Mat *A),(PETSC_COMM_WORLD,bs,m,n,M,N,nz,PETSC_NULL,nnz,PETSC_NULL,A))
292 PetscPolymorphicSubroutine(MatCreateMPIBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,const PetscInt nnz[],const PetscInt onz[],Mat *A),(PETSC_COMM_WORLD,bs,m,n,M,N,0,nnz,0,onz,A))
293 PetscPolymorphicSubroutine(MatCreateMPIBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,Mat *A),(PETSC_COMM_WORLD,bs,m,n,M,N,0,PETSC_NULL,0,PETSC_NULL,A))
294 extern PetscErrorCode  MatCreateMPIBAIJWithArrays(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt,const PetscInt[],const PetscInt[],const PetscScalar[],Mat*);
295 
296 extern PetscErrorCode  MatCreateMPIAdj(MPI_Comm,PetscInt,PetscInt,PetscInt[],PetscInt[],PetscInt[],Mat*);
297 extern PetscErrorCode  MatCreateSeqSBAIJ(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,const PetscInt[],Mat*);
298 PetscPolymorphicFunction(MatCreateSeqSBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt nz,const PetscInt nnz[]),(PETSC_COMM_SELF,bs,m,n,nz,nnz,&A),Mat,A)
299 PetscPolymorphicFunction(MatCreateSeqSBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt nz),(PETSC_COMM_SELF,bs,m,n,nz,PETSC_NULL,&A),Mat,A)
300 PetscPolymorphicFunction(MatCreateSeqSBAIJ,(PetscInt bs,PetscInt m,PetscInt n,const PetscInt nnz[]),(PETSC_COMM_SELF,bs,m,n,0,nnz,&A),Mat,A)
301 PetscPolymorphicFunction(MatCreateSeqSBAIJ,(PetscInt bs,PetscInt m,PetscInt n),(PETSC_COMM_SELF,bs,m,n,0,PETSC_NULL,&A),Mat,A)
302 PetscPolymorphicSubroutine(MatCreateSeqSBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt nz,Mat *A),(PETSC_COMM_SELF,bs,m,n,nz,PETSC_NULL,A))
303 PetscPolymorphicSubroutine(MatCreateSeqSBAIJ,(PetscInt bs,PetscInt m,PetscInt n,const PetscInt nnz[],Mat *A),(PETSC_COMM_SELF,bs,m,n,0,nnz,A))
304 PetscPolymorphicSubroutine(MatCreateSeqSBAIJ,(PetscInt bs,PetscInt m,PetscInt n,Mat *A),(PETSC_COMM_SELF,bs,m,n,0,PETSC_NULL,A))
305 
306 extern PetscErrorCode  MatCreateMPISBAIJ(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt,const PetscInt[],PetscInt,const PetscInt[],Mat*);
307 PetscPolymorphicFunction(MatCreateMPISBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,const PetscInt nnz[],PetscInt onz,const PetscInt onnz[]),(comm,bs,m,n,M,N,nz,nnz,onz,onnz,&A),Mat,A)
308 PetscPolymorphicFunction(MatCreateMPISBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,PetscInt nnz),(comm,bs,m,n,M,N,nz,PETSC_NULL,nnz,PETSC_NULL,&A),Mat,A)
309 PetscPolymorphicFunction(MatCreateMPISBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,const PetscInt nnz[],const PetscInt onz[]),(comm,bs,m,n,M,N,0,nnz,0,onz,&A),Mat,A)
310 PetscPolymorphicFunction(MatCreateMPISBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N),(comm,bs,m,n,M,N,0,PETSC_NULL,0,PETSC_NULL,&A),Mat,A)
311 PetscPolymorphicSubroutine(MatCreateMPISBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,PetscInt nnz,Mat *A),(comm,bs,m,n,M,N,nz,PETSC_NULL,nnz,PETSC_NULL,A))
312 PetscPolymorphicSubroutine(MatCreateMPISBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,const PetscInt nnz[],const PetscInt onz[],Mat *A),(comm,bs,m,n,M,N,0,nnz,0,onz,A))
313 PetscPolymorphicSubroutine(MatCreateMPISBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,Mat *A),(comm,bs,m,n,M,N,0,PETSC_NULL,0,PETSC_NULL,A))
314 PetscPolymorphicFunction(MatCreateMPISBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,const PetscInt nnz[],PetscInt onz,const PetscInt onnz[]),(PETSC_COMM_WORLD,bs,m,n,M,N,nz,nnz,onz,onnz,&A),Mat,A)
315 PetscPolymorphicFunction(MatCreateMPISBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,PetscInt nnz),(PETSC_COMM_WORLD,bs,m,n,M,N,nz,PETSC_NULL,nnz,PETSC_NULL,&A),Mat,A)
316 PetscPolymorphicFunction(MatCreateMPISBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,const PetscInt nnz[],const PetscInt onz[]),(PETSC_COMM_WORLD,bs,m,n,M,N,0,nnz,0,onz,&A),Mat,A)
317 PetscPolymorphicFunction(MatCreateMPISBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N),(PETSC_COMM_WORLD,bs,m,n,M,N,0,PETSC_NULL,0,PETSC_NULL,&A),Mat,A)
318 PetscPolymorphicSubroutine(MatCreateMPISBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,PetscInt nnz,Mat *A),(PETSC_COMM_WORLD,bs,m,n,M,N,nz,PETSC_NULL,nnz,PETSC_NULL,A))
319 PetscPolymorphicSubroutine(MatCreateMPISBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,const PetscInt nnz[],const PetscInt onz[],Mat *A),(PETSC_COMM_WORLD,bs,m,n,M,N,0,nnz,0,onz,A))
320 PetscPolymorphicSubroutine(MatCreateMPISBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,Mat *A),(PETSC_COMM_WORLD,bs,m,n,M,N,0,PETSC_NULL,0,PETSC_NULL,A))
321 extern PetscErrorCode  MatCreateMPISBAIJWithArrays(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt,const PetscInt[],const PetscInt[],const PetscScalar[],Mat *);
322 extern PetscErrorCode  MatMPISBAIJSetPreallocationCSR(Mat,PetscInt,const PetscInt[],const PetscInt[],const PetscScalar[]);
323 
324 extern PetscErrorCode  MatCreateShell(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,void *,Mat*);
325 PetscPolymorphicFunction(MatCreateShell,(MPI_Comm comm,PetscInt m,PetscInt n,PetscInt M,PetscInt N,void *ctx),(comm,m,n,M,N,ctx,&A),Mat,A)
326 PetscPolymorphicFunction(MatCreateShell,(PetscInt m,PetscInt n,PetscInt M,PetscInt N,void *ctx),(PETSC_COMM_WORLD,m,n,M,N,ctx,&A),Mat,A)
327 extern PetscErrorCode  MatCreateAdic(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt,void (*)(void),Mat*);
328 extern PetscErrorCode  MatCreateNormal(Mat,Mat*);
329 PetscPolymorphicFunction(MatCreateNormal,(Mat mat),(mat,&A),Mat,A)
330 extern PetscErrorCode  MatCreateLRC(Mat,Mat,Mat,Mat*);
331 extern PetscErrorCode  MatCreateIS(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,ISLocalToGlobalMapping,Mat*);
332 extern PetscErrorCode  MatCreateSeqAIJCRL(MPI_Comm,PetscInt,PetscInt,PetscInt,const PetscInt[],Mat*);
333 extern PetscErrorCode  MatCreateMPIAIJCRL(MPI_Comm,PetscInt,PetscInt,PetscInt,const PetscInt[],PetscInt,const PetscInt[],Mat*);
334 
335 extern PetscErrorCode  MatCreateSeqBSTRM(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,const PetscInt[],Mat*);
336 extern PetscErrorCode  MatCreateMPIBSTRM(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt,const PetscInt[],PetscInt,const PetscInt[],Mat*);
337 extern PetscErrorCode  MatCreateSeqSBSTRM(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,const PetscInt[],Mat*);
338 extern PetscErrorCode  MatCreateMPISBSTRM(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt,const PetscInt[],PetscInt,const PetscInt[],Mat*);
339 
340 extern PetscErrorCode  MatCreateScatter(MPI_Comm,VecScatter,Mat*);
341 extern PetscErrorCode  MatScatterSetVecScatter(Mat,VecScatter);
342 extern PetscErrorCode  MatScatterGetVecScatter(Mat,VecScatter*);
343 extern PetscErrorCode  MatCreateBlockMat(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt*,Mat*);
344 extern PetscErrorCode  MatCompositeAddMat(Mat,Mat);
345 extern PetscErrorCode  MatCompositeMerge(Mat);
346 extern PetscErrorCode  MatCreateComposite(MPI_Comm,PetscInt,const Mat*,Mat*);
347 typedef enum {MAT_COMPOSITE_ADDITIVE,MAT_COMPOSITE_MULTIPLICATIVE} MatCompositeType;
348 extern PetscErrorCode  MatCompositeSetType(Mat,MatCompositeType);
349 
350 extern PetscErrorCode  MatCreateFFT(MPI_Comm,PetscInt,const PetscInt[],const MatType,Mat*);
351 extern PetscErrorCode  MatCreateSeqCUFFT(MPI_Comm,PetscInt,const PetscInt[],Mat*);
352 
353 extern PetscErrorCode  MatCreateTranspose(Mat,Mat*);
354 extern PetscErrorCode  MatCreateSubMatrix(Mat,IS,IS,Mat*);
355 extern PetscErrorCode  MatSubMatrixUpdate(Mat,Mat,IS,IS);
356 extern PetscErrorCode  MatCreateLocalRef(Mat,IS,IS,Mat*);
357 
358 extern PetscErrorCode  MatPythonSetType(Mat,const char[]);
359 
360 extern PetscErrorCode  MatSetUp(Mat);
361 extern PetscErrorCode  MatDestroy(Mat*);
362 
363 extern PetscErrorCode  MatConjugate(Mat);
364 extern PetscErrorCode  MatRealPart(Mat);
365 extern PetscErrorCode  MatImaginaryPart(Mat);
366 extern PetscErrorCode  MatGetDiagonalBlock(Mat,Mat*);
367 extern PetscErrorCode  MatGetTrace(Mat,PetscScalar*);
368 
369 /* ------------------------------------------------------------*/
370 extern PetscErrorCode  MatSetValues(Mat,PetscInt,const PetscInt[],PetscInt,const PetscInt[],const PetscScalar[],InsertMode);
371 extern PetscErrorCode  MatSetValuesBlocked(Mat,PetscInt,const PetscInt[],PetscInt,const PetscInt[],const PetscScalar[],InsertMode);
372 extern PetscErrorCode  MatSetValuesRow(Mat,PetscInt,const PetscScalar[]);
373 extern PetscErrorCode  MatSetValuesRowLocal(Mat,PetscInt,const PetscScalar[]);
374 
375 /*S
376      MatStencil - Data structure (C struct) for storing information about a single row or
377         column of a matrix as index on an associated grid.
378 
379    Level: beginner
380 
381   Concepts: matrix; linear operator
382 
383 .seealso:  MatSetValuesStencil(), MatSetStencil(), MatSetValuesBlockStencil()
384 S*/
385 typedef struct {
386   PetscInt k,j,i,c;
387 } MatStencil;
388 
389 extern PetscErrorCode  MatSetValuesStencil(Mat,PetscInt,const MatStencil[],PetscInt,const MatStencil[],const PetscScalar[],InsertMode);
390 extern PetscErrorCode  MatSetValuesBlockedStencil(Mat,PetscInt,const MatStencil[],PetscInt,const MatStencil[],const PetscScalar[],InsertMode);
391 extern PetscErrorCode  MatSetStencil(Mat,PetscInt,const PetscInt[],const PetscInt[],PetscInt);
392 
393 extern PetscErrorCode  MatSetColoring(Mat,ISColoring);
394 extern PetscErrorCode  MatSetValuesAdic(Mat,void*);
395 extern PetscErrorCode  MatSetValuesAdifor(Mat,PetscInt,void*);
396 
397 /*E
398     MatAssemblyType - Indicates if the matrix is now to be used, or if you plan
399      to continue to add values to it
400 
401     Level: beginner
402 
403 .seealso: MatAssemblyBegin(), MatAssemblyEnd()
404 E*/
405 typedef enum {MAT_FLUSH_ASSEMBLY=1,MAT_FINAL_ASSEMBLY=0} MatAssemblyType;
406 extern PetscErrorCode  MatAssemblyBegin(Mat,MatAssemblyType);
407 extern PetscErrorCode  MatAssemblyEnd(Mat,MatAssemblyType);
408 extern PetscErrorCode  MatAssembled(Mat,PetscBool *);
409 
410 
411 
412 /*E
413     MatOption - Options that may be set for a matrix and its behavior or storage
414 
415     Level: beginner
416 
417    Any additions/changes here MUST also be made in include/finclude/petscmat.h
418 
419 .seealso: MatSetOption()
420 E*/
421 typedef enum {MAT_ROW_ORIENTED,MAT_NEW_NONZERO_LOCATIONS,
422               MAT_SYMMETRIC,
423               MAT_STRUCTURALLY_SYMMETRIC,
424               MAT_NEW_DIAGONALS,MAT_IGNORE_OFF_PROC_ENTRIES,
425               MAT_NEW_NONZERO_LOCATION_ERR,
426               MAT_NEW_NONZERO_ALLOCATION_ERR,MAT_USE_HASH_TABLE,
427               MAT_KEEP_NONZERO_PATTERN,MAT_IGNORE_ZERO_ENTRIES,
428               MAT_USE_INODES,
429               MAT_HERMITIAN,
430               MAT_SYMMETRY_ETERNAL,
431               MAT_CHECK_COMPRESSED_ROW,
432               MAT_IGNORE_LOWER_TRIANGULAR,MAT_ERROR_LOWER_TRIANGULAR,
433               MAT_GETROW_UPPERTRIANGULAR,MAT_UNUSED_NONZERO_LOCATION_ERR,
434               MAT_SPD,MAT_NO_OFF_PROC_ENTRIES,MAT_NO_OFF_PROC_ZERO_ROWS,
435               NUM_MAT_OPTIONS} MatOption;
436 extern const char *MatOptions[];
437 extern PetscErrorCode  MatSetOption(Mat,MatOption,PetscBool );
438 extern PetscErrorCode  MatGetType(Mat,const MatType*);
439 PetscPolymorphicFunction(MatGetType,(Mat mat),(mat,&t),const MatType,t)
440 
441 extern PetscErrorCode  MatGetValues(Mat,PetscInt,const PetscInt[],PetscInt,const PetscInt[],PetscScalar[]);
442 extern PetscErrorCode  MatGetRow(Mat,PetscInt,PetscInt *,const PetscInt *[],const PetscScalar*[]);
443 extern PetscErrorCode  MatRestoreRow(Mat,PetscInt,PetscInt *,const PetscInt *[],const PetscScalar*[]);
444 extern PetscErrorCode  MatGetRowUpperTriangular(Mat);
445 extern PetscErrorCode  MatRestoreRowUpperTriangular(Mat);
446 extern PetscErrorCode  MatGetColumn(Mat,PetscInt,PetscInt *,const PetscInt *[],const PetscScalar*[]);
447 extern PetscErrorCode  MatRestoreColumn(Mat,PetscInt,PetscInt *,const PetscInt *[],const PetscScalar*[]);
448 extern PetscErrorCode  MatGetColumnVector(Mat,Vec,PetscInt);
449 extern PetscErrorCode  MatGetArray(Mat,PetscScalar *[]);
450 PetscPolymorphicFunction(MatGetArray,(Mat mat),(mat,&a),PetscScalar*,a)
451 extern PetscErrorCode  MatRestoreArray(Mat,PetscScalar *[]);
452 extern PetscErrorCode  MatGetBlockSize(Mat,PetscInt *);
453 PetscPolymorphicFunction(MatGetBlockSize,(Mat mat),(mat,&a),PetscInt,a)
454 extern PetscErrorCode  MatSetBlockSize(Mat,PetscInt);
455 
456 
457 extern PetscErrorCode  MatMult(Mat,Vec,Vec);
458 extern PetscErrorCode  MatMultDiagonalBlock(Mat,Vec,Vec);
459 extern PetscErrorCode  MatMultAdd(Mat,Vec,Vec,Vec);
460 PetscPolymorphicSubroutine(MatMultAdd,(Mat A,Vec x,Vec y),(A,x,y,y))
461 extern PetscErrorCode  MatMultTranspose(Mat,Vec,Vec);
462 extern PetscErrorCode  MatMultHermitianTranspose(Mat,Vec,Vec);
463 extern PetscErrorCode  MatIsTranspose(Mat,Mat,PetscReal,PetscBool *);
464 PetscPolymorphicFunction(MatIsTranspose,(Mat A,Mat B,PetscReal tol),(A,B,tol,&t),PetscBool ,t)
465 PetscPolymorphicFunction(MatIsTranspose,(Mat A,Mat B),(A,B,0,&t),PetscBool ,t)
466 extern PetscErrorCode  MatIsHermitianTranspose(Mat,Mat,PetscReal,PetscBool *);
467 extern PetscErrorCode  MatMultTransposeAdd(Mat,Vec,Vec,Vec);
468 extern PetscErrorCode  MatMultHermitianTransposeAdd(Mat,Vec,Vec,Vec);
469 PetscPolymorphicSubroutine(MatMultTransposeAdd,(Mat A,Vec x,Vec y),(A,x,y,y))
470 extern PetscErrorCode  MatMultConstrained(Mat,Vec,Vec);
471 extern PetscErrorCode  MatMultTransposeConstrained(Mat,Vec,Vec);
472 extern PetscErrorCode  MatMatSolve(Mat,Mat,Mat);
473 
474 /*E
475     MatDuplicateOption - Indicates if a duplicated sparse matrix should have
476   its numerical values copied over or just its nonzero structure.
477 
478     Level: beginner
479 
480    Any additions/changes here MUST also be made in include/finclude/petscmat.h
481 
482 $   MAT_SHARE_NONZERO_PATTERN - the i and j arrays in the new matrix will be shared with the original matrix
483 $                               this also triggers the MAT_DO_NOT_COPY_VALUES option. This is used when you
484 $                               have several matrices with the same nonzero pattern.
485 
486 .seealso: MatDuplicate()
487 E*/
488 typedef enum {MAT_DO_NOT_COPY_VALUES,MAT_COPY_VALUES,MAT_SHARE_NONZERO_PATTERN} MatDuplicateOption;
489 
490 extern PetscErrorCode  MatConvert(Mat,const MatType,MatReuse,Mat*);
491 PetscPolymorphicFunction(MatConvert,(Mat A,const MatType t),(A,t,MAT_INITIAL_MATRIX,&a),Mat,a)
492 extern PetscErrorCode  MatDuplicate(Mat,MatDuplicateOption,Mat*);
493 PetscPolymorphicFunction(MatDuplicate,(Mat A,MatDuplicateOption o),(A,o,&a),Mat,a)
494 PetscPolymorphicFunction(MatDuplicate,(Mat A),(A,MAT_COPY_VALUES,&a),Mat,a)
495 
496 
497 extern PetscErrorCode  MatCopy(Mat,Mat,MatStructure);
498 extern PetscErrorCode  MatView(Mat,PetscViewer);
499 extern PetscErrorCode  MatIsSymmetric(Mat,PetscReal,PetscBool *);
500 PetscPolymorphicFunction(MatIsSymmetric,(Mat A,PetscReal tol),(A,tol,&t),PetscBool ,t)
501 PetscPolymorphicFunction(MatIsSymmetric,(Mat A),(A,0,&t),PetscBool ,t)
502 extern PetscErrorCode  MatIsStructurallySymmetric(Mat,PetscBool *);
503 PetscPolymorphicFunction(MatIsStructurallySymmetric,(Mat A),(A,&t),PetscBool ,t)
504 extern PetscErrorCode  MatIsHermitian(Mat,PetscReal,PetscBool *);
505 PetscPolymorphicFunction(MatIsHermitian,(Mat A),(A,0,&t),PetscBool ,t)
506 extern PetscErrorCode  MatIsSymmetricKnown(Mat,PetscBool *,PetscBool *);
507 extern PetscErrorCode  MatIsHermitianKnown(Mat,PetscBool *,PetscBool *);
508 extern PetscErrorCode  MatMissingDiagonal(Mat,PetscBool  *,PetscInt *);
509 extern PetscErrorCode  MatLoad(Mat, PetscViewer);
510 
511 extern PetscErrorCode  MatGetRowIJ(Mat,PetscInt,PetscBool ,PetscBool ,PetscInt*,PetscInt *[],PetscInt *[],PetscBool  *);
512 extern PetscErrorCode  MatRestoreRowIJ(Mat,PetscInt,PetscBool ,PetscBool ,PetscInt *,PetscInt *[],PetscInt *[],PetscBool  *);
513 extern PetscErrorCode  MatGetColumnIJ(Mat,PetscInt,PetscBool ,PetscBool ,PetscInt*,PetscInt *[],PetscInt *[],PetscBool  *);
514 extern PetscErrorCode  MatRestoreColumnIJ(Mat,PetscInt,PetscBool ,PetscBool ,PetscInt *,PetscInt *[],PetscInt *[],PetscBool  *);
515 
516 /*S
517      MatInfo - Context of matrix information, used with MatGetInfo()
518 
519    In Fortran this is simply a double precision array of dimension MAT_INFO_SIZE
520 
521    Level: intermediate
522 
523   Concepts: matrix^nonzero information
524 
525 .seealso:  MatGetInfo(), MatInfoType
526 S*/
527 typedef struct {
528   PetscLogDouble block_size;                         /* block size */
529   PetscLogDouble nz_allocated,nz_used,nz_unneeded;   /* number of nonzeros */
530   PetscLogDouble memory;                             /* memory allocated */
531   PetscLogDouble assemblies;                         /* number of matrix assemblies called */
532   PetscLogDouble mallocs;                            /* number of mallocs during MatSetValues() */
533   PetscLogDouble fill_ratio_given,fill_ratio_needed; /* fill ratio for LU/ILU */
534   PetscLogDouble factor_mallocs;                     /* number of mallocs during factorization */
535 } MatInfo;
536 
537 /*E
538     MatInfoType - Indicates if you want information about the local part of the matrix,
539      the entire parallel matrix or the maximum over all the local parts.
540 
541     Level: beginner
542 
543    Any additions/changes here MUST also be made in include/finclude/petscmat.h
544 
545 .seealso: MatGetInfo(), MatInfo
546 E*/
547 typedef enum {MAT_LOCAL=1,MAT_GLOBAL_MAX=2,MAT_GLOBAL_SUM=3} MatInfoType;
548 extern PetscErrorCode  MatGetInfo(Mat,MatInfoType,MatInfo*);
549 extern PetscErrorCode  MatGetDiagonal(Mat,Vec);
550 extern PetscErrorCode  MatGetRowMax(Mat,Vec,PetscInt[]);
551 extern PetscErrorCode  MatGetRowMin(Mat,Vec,PetscInt[]);
552 extern PetscErrorCode  MatGetRowMaxAbs(Mat,Vec,PetscInt[]);
553 extern PetscErrorCode  MatGetRowMinAbs(Mat,Vec,PetscInt[]);
554 extern PetscErrorCode  MatGetRowSum(Mat,Vec);
555 extern PetscErrorCode  MatTranspose(Mat,MatReuse,Mat*);
556 PetscPolymorphicFunction(MatTranspose,(Mat A),(A,MAT_INITIAL_MATRIX,&t),Mat,t)
557 extern PetscErrorCode  MatHermitianTranspose(Mat,MatReuse,Mat*);
558 extern PetscErrorCode  MatPermute(Mat,IS,IS,Mat *);
559 PetscPolymorphicFunction(MatPermute,(Mat A,IS is1,IS is2),(A,is1,is2,&t),Mat,t)
560 extern PetscErrorCode  MatDiagonalScale(Mat,Vec,Vec);
561 extern PetscErrorCode  MatDiagonalSet(Mat,Vec,InsertMode);
562 extern PetscErrorCode  MatEqual(Mat,Mat,PetscBool *);
563 PetscPolymorphicFunction(MatEqual,(Mat A,Mat B),(A,B,&t),PetscBool ,t)
564 extern PetscErrorCode  MatMultEqual(Mat,Mat,PetscInt,PetscBool *);
565 extern PetscErrorCode  MatMultAddEqual(Mat,Mat,PetscInt,PetscBool *);
566 extern PetscErrorCode  MatMultTransposeEqual(Mat,Mat,PetscInt,PetscBool *);
567 extern PetscErrorCode  MatMultTransposeAddEqual(Mat,Mat,PetscInt,PetscBool *);
568 
569 extern PetscErrorCode  MatNorm(Mat,NormType,PetscReal *);
570 PetscPolymorphicFunction(MatNorm,(Mat A,NormType t),(A,t,&n),PetscReal,n)
571 extern PetscErrorCode  MatGetColumnNorms(Mat,NormType,PetscReal *);
572 extern PetscErrorCode  MatZeroEntries(Mat);
573 extern PetscErrorCode  MatZeroRows(Mat,PetscInt,const PetscInt [],PetscScalar,Vec,Vec);
574 extern PetscErrorCode  MatZeroRowsIS(Mat,IS,PetscScalar,Vec,Vec);
575 extern PetscErrorCode  MatZeroRowsStencil(Mat,PetscInt,const MatStencil [],PetscScalar,Vec,Vec);
576 extern PetscErrorCode  MatZeroRowsColumns(Mat,PetscInt,const PetscInt [],PetscScalar,Vec,Vec);
577 extern PetscErrorCode  MatZeroRowsColumnsIS(Mat,IS,PetscScalar,Vec,Vec);
578 
579 extern PetscErrorCode  MatUseScaledForm(Mat,PetscBool );
580 extern PetscErrorCode  MatScaleSystem(Mat,Vec,Vec);
581 extern PetscErrorCode  MatUnScaleSystem(Mat,Vec,Vec);
582 
583 extern PetscErrorCode  MatGetSize(Mat,PetscInt*,PetscInt*);
584 extern PetscErrorCode  MatGetLocalSize(Mat,PetscInt*,PetscInt*);
585 extern PetscErrorCode  MatGetOwnershipRange(Mat,PetscInt*,PetscInt*);
586 extern PetscErrorCode  MatGetOwnershipRanges(Mat,const PetscInt**);
587 extern PetscErrorCode  MatGetOwnershipRangeColumn(Mat,PetscInt*,PetscInt*);
588 extern PetscErrorCode  MatGetOwnershipRangesColumn(Mat,const PetscInt**);
589 
590 extern PetscErrorCode  MatGetSubMatrices(Mat,PetscInt,const IS[],const IS[],MatReuse,Mat *[]);
591 extern PetscErrorCode  MatGetSubMatricesParallel(Mat,PetscInt,const IS[],const IS[],MatReuse,Mat *[]);
592 extern PetscErrorCode  MatDestroyMatrices(PetscInt,Mat *[]);
593 extern PetscErrorCode  MatGetSubMatrix(Mat,IS,IS,MatReuse,Mat *);
594 extern PetscErrorCode  MatGetLocalSubMatrix(Mat,IS,IS,Mat*);
595 extern PetscErrorCode  MatRestoreLocalSubMatrix(Mat,IS,IS,Mat*);
596 extern PetscErrorCode  MatGetSeqNonzeroStructure(Mat,Mat*);
597 extern PetscErrorCode  MatDestroySeqNonzeroStructure(Mat*);
598 
599 extern PetscErrorCode  MatMerge(MPI_Comm,Mat,PetscInt,MatReuse,Mat*);
600 extern PetscErrorCode  MatMerge_SeqsToMPI(MPI_Comm,Mat,PetscInt,PetscInt,MatReuse,Mat*);
601 extern PetscErrorCode  MatMerge_SeqsToMPISymbolic(MPI_Comm,Mat,PetscInt,PetscInt,Mat*);
602 extern PetscErrorCode  MatMerge_SeqsToMPINumeric(Mat,Mat);
603 extern PetscErrorCode  MatMPIAIJGetLocalMat(Mat,MatReuse,Mat*);
604 extern PetscErrorCode  MatMPIAIJGetLocalMatCondensed(Mat,MatReuse,IS*,IS*,Mat*);
605 extern PetscErrorCode  MatGetBrowsOfAcols(Mat,Mat,MatReuse,IS*,IS*,PetscInt*,Mat*);
606 extern PetscErrorCode  MatGetBrowsOfAoCols(Mat,Mat,MatReuse,PetscInt**,PetscInt**,MatScalar**,Mat*);
607 #if defined (PETSC_USE_CTABLE)
608 #include "petscctable.h"
609 extern PetscErrorCode  MatGetCommunicationStructs(Mat, Vec *, PetscTable *, VecScatter *);
610 #else
611 extern PetscErrorCode  MatGetCommunicationStructs(Mat, Vec *, PetscInt *[], VecScatter *);
612 #endif
613 extern PetscErrorCode  MatGetGhosts(Mat, PetscInt *,const PetscInt *[]);
614 
615 extern PetscErrorCode  MatIncreaseOverlap(Mat,PetscInt,IS[],PetscInt);
616 
617 extern PetscErrorCode  MatMatMult(Mat,Mat,MatReuse,PetscReal,Mat*);
618 extern PetscErrorCode  MatMatMultSymbolic(Mat,Mat,PetscReal,Mat*);
619 extern PetscErrorCode  MatMatMultNumeric(Mat,Mat,Mat);
620 
621 extern PetscErrorCode  MatPtAP(Mat,Mat,MatReuse,PetscReal,Mat*);
622 extern PetscErrorCode  MatPtAPSymbolic(Mat,Mat,PetscReal,Mat*);
623 extern PetscErrorCode  MatPtAPNumeric(Mat,Mat,Mat);
624 
625 extern PetscErrorCode  MatMatMultTranspose(Mat,Mat,MatReuse,PetscReal,Mat*);
626 extern PetscErrorCode  MatMatMultTransposeSymbolic(Mat,Mat,PetscReal,Mat*);
627 extern PetscErrorCode  MatMatMultTransposeNumeric(Mat,Mat,Mat);
628 
629 extern PetscErrorCode  MatAXPY(Mat,PetscScalar,Mat,MatStructure);
630 extern PetscErrorCode  MatAYPX(Mat,PetscScalar,Mat,MatStructure);
631 
632 extern PetscErrorCode  MatScale(Mat,PetscScalar);
633 extern PetscErrorCode  MatShift(Mat,PetscScalar);
634 
635 extern PetscErrorCode  MatSetLocalToGlobalMapping(Mat,ISLocalToGlobalMapping,ISLocalToGlobalMapping);
636 extern PetscErrorCode  MatSetLocalToGlobalMappingBlock(Mat,ISLocalToGlobalMapping,ISLocalToGlobalMapping);
637 extern PetscErrorCode  MatGetLocalToGlobalMapping(Mat,ISLocalToGlobalMapping*,ISLocalToGlobalMapping*);
638 extern PetscErrorCode  MatGetLocalToGlobalMappingBlock(Mat,ISLocalToGlobalMapping*,ISLocalToGlobalMapping*);
639 extern PetscErrorCode  MatZeroRowsLocal(Mat,PetscInt,const PetscInt [],PetscScalar,Vec,Vec);
640 extern PetscErrorCode  MatZeroRowsLocalIS(Mat,IS,PetscScalar,Vec,Vec);
641 extern PetscErrorCode  MatZeroRowsColumnsLocal(Mat,PetscInt,const PetscInt [],PetscScalar,Vec,Vec);
642 extern PetscErrorCode  MatZeroRowsColumnsLocalIS(Mat,IS,PetscScalar,Vec,Vec);
643 extern PetscErrorCode  MatSetValuesLocal(Mat,PetscInt,const PetscInt[],PetscInt,const PetscInt[],const PetscScalar[],InsertMode);
644 extern PetscErrorCode  MatSetValuesBlockedLocal(Mat,PetscInt,const PetscInt[],PetscInt,const PetscInt[],const PetscScalar[],InsertMode);
645 
646 extern PetscErrorCode  MatStashSetInitialSize(Mat,PetscInt,PetscInt);
647 extern PetscErrorCode  MatStashGetInfo(Mat,PetscInt*,PetscInt*,PetscInt*,PetscInt*);
648 
649 extern PetscErrorCode  MatInterpolate(Mat,Vec,Vec);
650 extern PetscErrorCode  MatInterpolateAdd(Mat,Vec,Vec,Vec);
651 PetscPolymorphicSubroutine(MatInterpolateAdd,(Mat A,Vec x,Vec y),(A,x,y,y))
652 extern PetscErrorCode  MatRestrict(Mat,Vec,Vec);
653 extern PetscErrorCode  MatGetVecs(Mat,Vec*,Vec*);
654 extern PetscErrorCode  MatGetRedundantMatrix(Mat,PetscInt,MPI_Comm,PetscInt,MatReuse,Mat*);
655 extern PetscErrorCode  MatGetMultiProcBlock(Mat,MPI_Comm,Mat*);
656 extern PetscErrorCode  MatFindZeroDiagonals(Mat,IS*);
657 
658 /*MC
659    MatSetValue - Set a single entry into a matrix.
660 
661    Not collective
662 
663    Input Parameters:
664 +  m - the matrix
665 .  row - the row location of the entry
666 .  col - the column location of the entry
667 .  value - the value to insert
668 -  mode - either INSERT_VALUES or ADD_VALUES
669 
670    Notes:
671    For efficiency one should use MatSetValues() and set several or many
672    values simultaneously if possible.
673 
674    Level: beginner
675 
676 .seealso: MatSetValues(), MatSetValueLocal()
677 M*/
678 PETSC_STATIC_INLINE PetscErrorCode MatSetValue(Mat v,PetscInt i,PetscInt j,PetscScalar va,InsertMode mode) {return MatSetValues(v,1,&i,1,&j,&va,mode);}
679 
680 PETSC_STATIC_INLINE PetscErrorCode MatGetValue(Mat v,PetscInt i,PetscInt j,PetscScalar *va) {return MatGetValues(v,1,&i,1,&j,va);}
681 
682 PETSC_STATIC_INLINE PetscErrorCode MatSetValueLocal(Mat v,PetscInt i,PetscInt j,PetscScalar va,InsertMode mode) {return MatSetValuesLocal(v,1,&i,1,&j,&va,mode);}
683 
684 extern PetscErrorCode MatSeqAIJSetValuesBatch(Mat, PetscInt, PetscInt, PetscInt *, PetscScalar *);
685 extern PetscErrorCode MatMPIAIJSetValuesBatch(Mat, PetscInt, PetscInt, PetscInt *, PetscScalar *);
686 
687 /*MC
688    MatPreallocateInitialize - Begins the block of code that will count the number of nonzeros per
689        row in a matrix providing the data that one can use to correctly preallocate the matrix.
690 
691    Synopsis:
692    PetscErrorCode MatPreallocateInitialize(MPI_Comm comm, PetscInt nrows, PetscInt ncols, PetscInt *dnz, PetscInt *onz)
693 
694    Collective on MPI_Comm
695 
696    Input Parameters:
697 +  comm - the communicator that will share the eventually allocated matrix
698 .  nrows - the number of LOCAL rows in the matrix
699 -  ncols - the number of LOCAL columns in the matrix
700 
701    Output Parameters:
702 +  dnz - the array that will be passed to the matrix preallocation routines
703 -  ozn - the other array passed to the matrix preallocation routines
704 
705 
706    Level: intermediate
707 
708    Notes:
709     See the <A href="../../docs/manual.pdf#nameddest=ch_performance">Hints for Performance Improvment</A> chapter in the users manual for more details.
710 
711    Do not malloc or free dnz and onz, that is handled internally by these routines
712 
713    Use MatPreallocateInitializeSymmetric() for symmetric matrices (MPISBAIJ matrices)
714 
715    This is a MACRO not a function because it has a leading { that is closed by PetscPreallocateFinalize().
716 
717   Concepts: preallocation^Matrix
718 
719 .seealso: MatPreallocateFinalize(), MatPreallocateSet(), MatPreallocateSymmetricSet(), MatPreallocateSetLocal(),
720           MatPreallocateInitializeSymmetric(), MatPreallocateSymmetricSetLocal()
721 M*/
722 #define MatPreallocateInitialize(comm,nrows,ncols,dnz,onz) 0; \
723 { \
724   PetscErrorCode _4_ierr; PetscInt __nrows = (nrows),__ctmp = (ncols),__rstart,__start,__end; \
725   _4_ierr = PetscMalloc2(__nrows,PetscInt,&dnz,__nrows,PetscInt,&onz);CHKERRQ(_4_ierr); \
726   _4_ierr = PetscMemzero(dnz,__nrows*sizeof(PetscInt));CHKERRQ(_4_ierr);\
727   _4_ierr = PetscMemzero(onz,__nrows*sizeof(PetscInt));CHKERRQ(_4_ierr);\
728   _4_ierr = MPI_Scan(&__ctmp,&__end,1,MPIU_INT,MPI_SUM,comm);CHKERRQ(_4_ierr); __start = __end - __ctmp;\
729   _4_ierr = MPI_Scan(&__nrows,&__rstart,1,MPIU_INT,MPI_SUM,comm);CHKERRQ(_4_ierr); __rstart = __rstart - __nrows;
730 
731 /*MC
732    MatPreallocateSymmetricInitialize - Begins the block of code that will count the number of nonzeros per
733        row in a matrix providing the data that one can use to correctly preallocate the matrix.
734 
735    Synopsis:
736    PetscErrorCode MatPreallocateSymmetricInitialize(MPI_Comm comm, PetscInt nrows, PetscInt ncols, PetscInt *dnz, PetscInt *onz)
737 
738    Collective on MPI_Comm
739 
740    Input Parameters:
741 +  comm - the communicator that will share the eventually allocated matrix
742 .  nrows - the number of LOCAL rows in the matrix
743 -  ncols - the number of LOCAL columns in the matrix
744 
745    Output Parameters:
746 +  dnz - the array that will be passed to the matrix preallocation routines
747 -  ozn - the other array passed to the matrix preallocation routines
748 
749 
750    Level: intermediate
751 
752    Notes:
753     See the <A href="../../docs/manual.pdf#nameddest=ch_performance">Hints for Performance Improvment</A> chapter in the users manual for more details.
754 
755    Do not malloc or free dnz and onz, that is handled internally by these routines
756 
757    This is a MACRO not a function because it has a leading { that is closed by PetscPreallocateFinalize().
758 
759   Concepts: preallocation^Matrix
760 
761 .seealso: MatPreallocateFinalize(), MatPreallocateSet(), MatPreallocateSymmetricSet(), MatPreallocateSetLocal(),
762           MatPreallocateInitialize(), MatPreallocateSymmetricSetLocal()
763 M*/
764 #define MatPreallocateSymmetricInitialize(comm,nrows,ncols,dnz,onz) 0; \
765 { \
766   PetscErrorCode _4_ierr; PetscInt __nrows = (nrows),__ctmp = (ncols),__rstart,__end; \
767   _4_ierr = PetscMalloc2(__nrows,PetscInt,&dnz,__nrows,PetscInt,&onz);CHKERRQ(_4_ierr); \
768   _4_ierr = PetscMemzero(dnz,__nrows*sizeof(PetscInt));CHKERRQ(_4_ierr);\
769   _4_ierr = PetscMemzero(onz,__nrows*sizeof(PetscInt));CHKERRQ(_4_ierr);\
770   _4_ierr = MPI_Scan(&__ctmp,&__end,1,MPIU_INT,MPI_SUM,comm);CHKERRQ(_4_ierr);\
771   _4_ierr = MPI_Scan(&__nrows,&__rstart,1,MPIU_INT,MPI_SUM,comm);CHKERRQ(_4_ierr); __rstart = __rstart - __nrows;
772 
773 /*MC
774    MatPreallocateSetLocal - Indicates the locations (rows and columns) in the matrix where nonzeros will be
775        inserted using a local number of the rows and columns
776 
777    Synopsis:
778    PetscErrorCode MatPreallocateSetLocal(ISLocalToGlobalMappping map,PetscInt nrows, PetscInt *rows,PetscInt ncols, PetscInt *cols,PetscInt *dnz, PetscInt *onz)
779 
780    Not Collective
781 
782    Input Parameters:
783 +  map - the row mapping from local numbering to global numbering
784 .  nrows - the number of rows indicated
785 .  rows - the indices of the rows
786 .  cmap - the column mapping from local to global numbering
787 .  ncols - the number of columns in the matrix
788 .  cols - the columns indicated
789 .  dnz - the array that will be passed to the matrix preallocation routines
790 -  ozn - the other array passed to the matrix preallocation routines
791 
792 
793    Level: intermediate
794 
795    Notes:
796     See the <A href="../../docs/manual.pdf#nameddest=ch_performance">Hints for Performance Improvment</A> chapter in the users manual for more details.
797 
798    Do not malloc or free dnz and onz, that is handled internally by these routines
799 
800   Concepts: preallocation^Matrix
801 
802 .seealso: MatPreallocateFinalize(), MatPreallocateSet(), MatPreallocateSymmetricSet(), MatPreallocateInitialize(),
803           MatPreallocateInitialize(), MatPreallocateSymmetricSetLocal()
804 M*/
805 #define MatPreallocateSetLocal(rmap,nrows,rows,cmap,ncols,cols,dnz,onz) 0; \
806 {\
807   PetscInt __l;\
808   _4_ierr = ISLocalToGlobalMappingApply(rmap,nrows,rows,rows);CHKERRQ(_4_ierr);\
809   _4_ierr = ISLocalToGlobalMappingApply(cmap,ncols,cols,cols);CHKERRQ(_4_ierr);\
810   for (__l=0;__l<nrows;__l++) {\
811     _4_ierr = MatPreallocateSet((rows)[__l],ncols,cols,dnz,onz);CHKERRQ(_4_ierr);\
812   }\
813 }
814 
815 /*MC
816    MatPreallocateSymmetricSetLocal - Indicates the locations (rows and columns) in the matrix where nonzeros will be
817        inserted using a local number of the rows and columns
818 
819    Synopsis:
820    PetscErrorCode MatPreallocateSymmetricSetLocal(ISLocalToGlobalMappping map,PetscInt nrows, PetscInt *rows,PetscInt ncols, PetscInt *cols,PetscInt *dnz, PetscInt *onz)
821 
822    Not Collective
823 
824    Input Parameters:
825 +  map - the mapping between local numbering and global numbering
826 .  nrows - the number of rows indicated
827 .  rows - the indices of the rows
828 .  ncols - the number of columns in the matrix
829 .  cols - the columns indicated
830 .  dnz - the array that will be passed to the matrix preallocation routines
831 -  ozn - the other array passed to the matrix preallocation routines
832 
833 
834    Level: intermediate
835 
836    Notes:
837     See the <A href="../../docs/manual.pdf#nameddest=ch_performance">Hints for Performance Improvment</A> chapter in the users manual for more details.
838 
839    Do not malloc or free dnz and onz that is handled internally by these routines
840 
841   Concepts: preallocation^Matrix
842 
843 .seealso: MatPreallocateFinalize(), MatPreallocateSet(), MatPreallocateSymmetricSet(), MatPreallocateInitialize(),
844           MatPreallocateInitialize(), MatPreallocateSymmetricSetLocal(), MatPreallocateSetLocal()
845 M*/
846 #define MatPreallocateSymmetricSetLocal(map,nrows,rows,ncols,cols,dnz,onz) 0;\
847 {\
848   PetscInt __l;\
849   _4_ierr = ISLocalToGlobalMappingApply(map,nrows,rows,rows);CHKERRQ(_4_ierr);\
850   _4_ierr = ISLocalToGlobalMappingApply(map,ncols,cols,cols);CHKERRQ(_4_ierr);\
851   for (__l=0;__l<nrows;__l++) {\
852     _4_ierr = MatPreallocateSymmetricSet((rows)[__l],ncols,cols,dnz,onz);CHKERRQ(_4_ierr);\
853   }\
854 }
855 
856 /*MC
857    MatPreallocateSet - Indicates the locations (rows and columns) in the matrix where nonzeros will be
858        inserted using a local number of the rows and columns
859 
860    Synopsis:
861    PetscErrorCode MatPreallocateSet(PetscInt nrows, PetscInt *rows,PetscInt ncols, PetscInt *cols,PetscInt *dnz, PetscInt *onz)
862 
863    Not Collective
864 
865    Input Parameters:
866 +  row - the row
867 .  ncols - the number of columns in the matrix
868 -  cols - the columns indicated
869 
870    Output Parameters:
871 +  dnz - the array that will be passed to the matrix preallocation routines
872 -  ozn - the other array passed to the matrix preallocation routines
873 
874 
875    Level: intermediate
876 
877    Notes:
878     See the <A href="../../docs/manual.pdf#nameddest=ch_performance">Hints for Performance Improvment</A> chapter in the users manual for more details.
879 
880    Do not malloc or free dnz and onz that is handled internally by these routines
881 
882    This is a MACRO not a function because it uses variables declared in MatPreallocateInitialize().
883 
884   Concepts: preallocation^Matrix
885 
886 .seealso: MatPreallocateFinalize(), MatPreallocateSet(), MatPreallocateSymmetricSet(), MatPreallocateInitialize(),
887           MatPreallocateInitialize(), MatPreallocateSymmetricSetLocal(), MatPreallocateSetLocal()
888 M*/
889 #define MatPreallocateSet(row,nc,cols,dnz,onz) 0;\
890 { PetscInt __i; \
891   if (row < __rstart) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Trying to set preallocation for row %D less than first local row %D",row,__rstart);\
892   if (row >= __rstart+__nrows) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Trying to set preallocation for row %D greater than last local row %D",row,__rstart+__nrows-1);\
893   for (__i=0; __i<nc; __i++) {\
894     if ((cols)[__i] < __start || (cols)[__i] >= __end) onz[row - __rstart]++; \
895     else dnz[row - __rstart]++;\
896   }\
897 }
898 
899 /*MC
900    MatPreallocateSymmetricSet - Indicates the locations (rows and columns) in the matrix where nonzeros will be
901        inserted using a local number of the rows and columns
902 
903    Synopsis:
904    PetscErrorCode MatPreallocateSymmetricSet(PetscInt nrows, PetscInt *rows,PetscInt ncols, PetscInt *cols,PetscInt *dnz, PetscInt *onz)
905 
906    Not Collective
907 
908    Input Parameters:
909 +  nrows - the number of rows indicated
910 .  rows - the indices of the rows
911 .  ncols - the number of columns in the matrix
912 .  cols - the columns indicated
913 .  dnz - the array that will be passed to the matrix preallocation routines
914 -  ozn - the other array passed to the matrix preallocation routines
915 
916 
917    Level: intermediate
918 
919    Notes:
920     See the <A href="../../docs/manual.pdf#nameddest=ch_performance">Hints for Performance Improvment</A> chapter in the users manual for more details.
921 
922    Do not malloc or free dnz and onz that is handled internally by these routines
923 
924    This is a MACRO not a function because it uses variables declared in MatPreallocateInitialize().
925 
926   Concepts: preallocation^Matrix
927 
928 .seealso: MatPreallocateFinalize(), MatPreallocateSet(), MatPreallocateSymmetricSet(), MatPreallocateInitialize(),
929           MatPreallocateInitialize(), MatPreallocateSymmetricSetLocal(), MatPreallocateSetLocal()
930 M*/
931 #define MatPreallocateSymmetricSet(row,nc,cols,dnz,onz) 0;\
932 { PetscInt __i; \
933   for (__i=0; __i<nc; __i++) {\
934     if (cols[__i] >= __end) onz[row - __rstart]++; \
935     else if (cols[__i] >= row) dnz[row - __rstart]++;\
936   }\
937 }
938 
939 /*MC
940    MatPreallocateLocation -  An alternative to MatPreallocationSet() that puts the nonzero locations into the matrix if it exists
941 
942    Synopsis:
943    PetscErrorCode MatPreallocateLocations(Mat A,PetscInt row,PetscInt ncols,PetscInt *cols,PetscInt *dnz,PetscInt *onz)
944 
945    Not Collective
946 
947    Input Parameters:
948 .  A - matrix
949 .  row - row where values exist (must be local to this process)
950 .  ncols - number of columns
951 .  cols - columns with nonzeros
952 .  dnz - the array that will be passed to the matrix preallocation routines
953 -  ozn - the other array passed to the matrix preallocation routines
954 
955 
956    Level: intermediate
957 
958    Notes:
959     See the <A href="../../docs/manual.pdf#nameddest=ch_performance">Hints for Performance Improvment</A> chapter in the users manual for more details.
960 
961    Do not malloc or free dnz and onz that is handled internally by these routines
962 
963    This is a MACRO not a function because it uses a bunch of variables private to the MatPreallocation.... routines.
964 
965   Concepts: preallocation^Matrix
966 
967 .seealso: MatPreallocateInitialize(), MatPreallocateSet(), MatPreallocateSymmetricSet(), MatPreallocateSetLocal(),
968           MatPreallocateSymmetricInitialize(), MatPreallocateSymmetricSetLocal()
969 M*/
970 #define MatPreallocateLocation(A,row,ncols,cols,dnz,onz) 0;if (A) {ierr = MatSetValues(A,1,&row,ncols,cols,PETSC_NULL,INSERT_VALUES);CHKERRQ(ierr);} else {ierr =  MatPreallocateSet(row,ncols,cols,dnz,onz);CHKERRQ(ierr);}
971 
972 
973 /*MC
974    MatPreallocateFinalize - Ends the block of code that will count the number of nonzeros per
975        row in a matrix providing the data that one can use to correctly preallocate the matrix.
976 
977    Synopsis:
978    PetscErrorCode MatPreallocateFinalize(PetscInt *dnz, PetscInt *onz)
979 
980    Collective on MPI_Comm
981 
982    Input Parameters:
983 +  dnz - the array that was be passed to the matrix preallocation routines
984 -  ozn - the other array passed to the matrix preallocation routines
985 
986 
987    Level: intermediate
988 
989    Notes:
990     See the <A href="../../docs/manual.pdf#nameddest=ch_performance">Hints for Performance Improvment</A> chapter in the users manual for more details.
991 
992    Do not malloc or free dnz and onz that is handled internally by these routines
993 
994    This is a MACRO not a function because it closes the { started in MatPreallocateInitialize().
995 
996   Concepts: preallocation^Matrix
997 
998 .seealso: MatPreallocateInitialize(), MatPreallocateSet(), MatPreallocateSymmetricSet(), MatPreallocateSetLocal(),
999           MatPreallocateSymmetricInitialize(), MatPreallocateSymmetricSetLocal()
1000 M*/
1001 #define MatPreallocateFinalize(dnz,onz) 0;_4_ierr = PetscFree2(dnz,onz);CHKERRQ(_4_ierr);}
1002 
1003 
1004 
1005 /* Routines unique to particular data structures */
1006 extern PetscErrorCode  MatShellGetContext(Mat,void *);
1007 PetscPolymorphicFunction(MatShellGetContext,(Mat A),(A,&t),void*,t)
1008 
1009 extern PetscErrorCode  MatInodeAdjustForInodes(Mat,IS*,IS*);
1010 extern PetscErrorCode  MatInodeGetInodeSizes(Mat,PetscInt *,PetscInt *[],PetscInt *);
1011 
1012 extern PetscErrorCode  MatSeqAIJSetColumnIndices(Mat,PetscInt[]);
1013 extern PetscErrorCode  MatSeqBAIJSetColumnIndices(Mat,PetscInt[]);
1014 extern PetscErrorCode  MatCreateSeqAIJWithArrays(MPI_Comm,PetscInt,PetscInt,PetscInt[],PetscInt[],PetscScalar[],Mat*);
1015 extern PetscErrorCode  MatCreateSeqBAIJWithArrays(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt[],PetscInt[],PetscScalar[],Mat*);
1016 extern PetscErrorCode  MatCreateSeqSBAIJWithArrays(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt[],PetscInt[],PetscScalar[],Mat*);
1017 
1018 #define MAT_SKIP_ALLOCATION -4
1019 
1020 extern PetscErrorCode  MatSeqBAIJSetPreallocation(Mat,PetscInt,PetscInt,const PetscInt[]);
1021 PetscPolymorphicSubroutine(MatSeqBAIJSetPreallocation,(Mat A,PetscInt bs,const PetscInt nnz[]),(A,bs,0,nnz))
1022 extern PetscErrorCode  MatSeqSBAIJSetPreallocation(Mat,PetscInt,PetscInt,const PetscInt[]);
1023 PetscPolymorphicSubroutine(MatSeqSBAIJSetPreallocation,(Mat A,PetscInt bs,const PetscInt nnz[]),(A,bs,0,nnz))
1024 extern PetscErrorCode  MatSeqAIJSetPreallocation(Mat,PetscInt,const PetscInt[]);
1025 PetscPolymorphicSubroutine(MatSeqAIJSetPreallocation,(Mat A,const PetscInt nnz[]),(A,0,nnz))
1026 
1027 extern PetscErrorCode  MatMPIBAIJSetPreallocation(Mat,PetscInt,PetscInt,const PetscInt[],PetscInt,const PetscInt[]);
1028 PetscPolymorphicSubroutine(MatMPIBAIJSetPreallocation,(Mat A,PetscInt bs,const PetscInt nnz[],const PetscInt onz[]),(A,bs,0,nnz,0,onz))
1029 extern PetscErrorCode  MatMPISBAIJSetPreallocation(Mat,PetscInt,PetscInt,const PetscInt[],PetscInt,const PetscInt[]);
1030 extern PetscErrorCode  MatMPIAIJSetPreallocation(Mat,PetscInt,const PetscInt[],PetscInt,const PetscInt[]);
1031 extern PetscErrorCode  MatSeqAIJSetPreallocationCSR(Mat,const PetscInt [],const PetscInt [],const PetscScalar []);
1032 extern PetscErrorCode  MatSeqBAIJSetPreallocationCSR(Mat,PetscInt,const PetscInt[],const PetscInt[],const PetscScalar[]);
1033 extern PetscErrorCode  MatMPIAIJSetPreallocationCSR(Mat,const PetscInt[],const PetscInt[],const PetscScalar[]);
1034 extern PetscErrorCode  MatMPIBAIJSetPreallocationCSR(Mat,PetscInt,const PetscInt[],const PetscInt[],const PetscScalar[]);
1035 extern PetscErrorCode  MatMPIAdjSetPreallocation(Mat,PetscInt[],PetscInt[],PetscInt[]);
1036 extern PetscErrorCode  MatMPIDenseSetPreallocation(Mat,PetscScalar[]);
1037 extern PetscErrorCode  MatSeqDenseSetPreallocation(Mat,PetscScalar[]);
1038 extern PetscErrorCode  MatMPIAIJGetSeqAIJ(Mat,Mat*,Mat*,PetscInt*[]);
1039 extern PetscErrorCode  MatMPIBAIJGetSeqBAIJ(Mat,Mat*,Mat*,PetscInt*[]);
1040 extern PetscErrorCode  MatAdicSetLocalFunction(Mat,void (*)(void));
1041 
1042 extern PetscErrorCode  MatSeqDenseSetLDA(Mat,PetscInt);
1043 extern PetscErrorCode  MatDenseGetLocalMatrix(Mat,Mat*);
1044 
1045 extern PetscErrorCode  MatStoreValues(Mat);
1046 extern PetscErrorCode  MatRetrieveValues(Mat);
1047 
1048 extern PetscErrorCode  MatDAADSetCtx(Mat,void*);
1049 
1050 extern PetscErrorCode  MatFindNonzeroRows(Mat,IS*);
1051 /*
1052   These routines are not usually accessed directly, rather solving is
1053   done through the KSP and PC interfaces.
1054 */
1055 
1056 /*E
1057     MatOrderingType - String with the name of a PETSc matrix ordering or the creation function
1058        with an optional dynamic library name, for example
1059        http://www.mcs.anl.gov/petsc/lib.a:orderingcreate()
1060 
1061    Level: beginner
1062 
1063    Cannot use const because the PC objects manipulate the string
1064 
1065 .seealso: MatGetOrdering()
1066 E*/
1067 #define MatOrderingType char*
1068 #define MATORDERINGNATURAL     "natural"
1069 #define MATORDERINGND          "nd"
1070 #define MATORDERING1WD         "1wd"
1071 #define MATORDERINGRCM         "rcm"
1072 #define MATORDERINGQMD         "qmd"
1073 #define MATORDERINGROWLENGTH   "rowlength"
1074 #define MATORDERINGDSC_ND      "dsc_nd"         /* these three are only for DSCPACK, see its documentation for details */
1075 #define MATORDERINGDSC_MMD     "dsc_mmd"
1076 #define MATORDERINGDSC_MDF     "dsc_mdf"
1077 #define MATORDERINGAMD         "amd"            /* only works if UMFPACK is installed with PETSc */
1078 
1079 extern PetscErrorCode  MatGetOrdering(Mat,const MatOrderingType,IS*,IS*);
1080 extern PetscErrorCode  MatGetOrderingList(PetscFList *list);
1081 extern PetscErrorCode  MatOrderingRegister(const char[],const char[],const char[],PetscErrorCode(*)(Mat,const MatOrderingType,IS*,IS*));
1082 
1083 /*MC
1084    MatOrderingRegisterDynamic - Adds a new sparse matrix ordering to the matrix package.
1085 
1086    Synopsis:
1087    PetscErrorCode MatOrderingRegisterDynamic(const char *name_ordering,const char *path,const char *name_create,PetscErrorCode (*routine_create)(MatOrdering))
1088 
1089    Not Collective
1090 
1091    Input Parameters:
1092 +  sname - name of ordering (for example MATORDERINGND)
1093 .  path - location of library where creation routine is
1094 .  name - name of function that creates the ordering type,a string
1095 -  function - function pointer that creates the ordering
1096 
1097    Level: developer
1098 
1099    If dynamic libraries are used, then the fourth input argument (function)
1100    is ignored.
1101 
1102    Sample usage:
1103 .vb
1104    MatOrderingRegisterDynamic("my_order",/home/username/my_lib/lib/libO/solaris/mylib.a,
1105                "MyOrder",MyOrder);
1106 .ve
1107 
1108    Then, your partitioner can be chosen with the procedural interface via
1109 $     MatOrderingSetType(part,"my_order)
1110    or at runtime via the option
1111 $     -pc_factor_mat_ordering_type my_order
1112 
1113    ${PETSC_ARCH} occuring in pathname will be replaced with appropriate values.
1114 
1115 .keywords: matrix, ordering, register
1116 
1117 .seealso: MatOrderingRegisterDestroy(), MatOrderingRegisterAll()
1118 M*/
1119 #if defined(PETSC_USE_DYNAMIC_LIBRARIES)
1120 #define MatOrderingRegisterDynamic(a,b,c,d) MatOrderingRegister(a,b,c,0)
1121 #else
1122 #define MatOrderingRegisterDynamic(a,b,c,d) MatOrderingRegister(a,b,c,d)
1123 #endif
1124 
1125 extern PetscErrorCode  MatOrderingRegisterDestroy(void);
1126 extern PetscErrorCode  MatOrderingRegisterAll(const char[]);
1127 extern PetscBool  MatOrderingRegisterAllCalled;
1128 extern PetscFList MatOrderingList;
1129 
1130 extern PetscErrorCode  MatReorderForNonzeroDiagonal(Mat,PetscReal,IS,IS);
1131 
1132 /*S
1133     MatFactorShiftType - Numeric Shift.
1134 
1135    Level: beginner
1136 
1137 S*/
1138 typedef enum {MAT_SHIFT_NONE,MAT_SHIFT_NONZERO,MAT_SHIFT_POSITIVE_DEFINITE,MAT_SHIFT_INBLOCKS} MatFactorShiftType;
1139 extern const char *MatFactorShiftTypes[];
1140 
1141 /*S
1142    MatFactorInfo - Data passed into the matrix factorization routines
1143 
1144    In Fortran these are simply double precision arrays of size MAT_FACTORINFO_SIZE, that is use
1145 $     MatFactorInfo  info(MAT_FACTORINFO_SIZE)
1146 
1147    Notes: These are not usually directly used by users, instead use PC type of LU, ILU, CHOLESKY or ICC.
1148 
1149       You can use MatFactorInfoInitialize() to set default values.
1150 
1151    Level: developer
1152 
1153 .seealso: MatLUFactorSymbolic(), MatILUFactorSymbolic(), MatCholeskyFactorSymbolic(), MatICCFactorSymbolic(), MatICCFactor(),
1154           MatFactorInfoInitialize()
1155 
1156 S*/
1157 typedef struct {
1158   PetscReal     diagonal_fill;  /* force diagonal to fill in if initially not filled */
1159   PetscReal     usedt;
1160   PetscReal     dt;             /* drop tolerance */
1161   PetscReal     dtcol;          /* tolerance for pivoting */
1162   PetscReal     dtcount;        /* maximum nonzeros to be allowed per row */
1163   PetscReal     fill;           /* expected fill, nonzeros in factored matrix/nonzeros in original matrix */
1164   PetscReal     levels;         /* ICC/ILU(levels) */
1165   PetscReal     pivotinblocks;  /* for BAIJ and SBAIJ matrices pivot in factorization on blocks, default 1.0
1166                                    factorization may be faster if do not pivot */
1167   PetscReal     zeropivot;      /* pivot is called zero if less than this */
1168   PetscReal     shifttype;      /* type of shift added to matrix factor to prevent zero pivots */
1169   PetscReal     shiftamount;     /* how large the shift is */
1170 } MatFactorInfo;
1171 
1172 extern PetscErrorCode  MatFactorInfoInitialize(MatFactorInfo*);
1173 extern PetscErrorCode  MatCholeskyFactor(Mat,IS,const MatFactorInfo*);
1174 extern PetscErrorCode  MatCholeskyFactorSymbolic(Mat,Mat,IS,const MatFactorInfo*);
1175 extern PetscErrorCode  MatCholeskyFactorNumeric(Mat,Mat,const MatFactorInfo*);
1176 extern PetscErrorCode  MatLUFactor(Mat,IS,IS,const MatFactorInfo*);
1177 extern PetscErrorCode  MatILUFactor(Mat,IS,IS,const MatFactorInfo*);
1178 extern PetscErrorCode  MatLUFactorSymbolic(Mat,Mat,IS,IS,const MatFactorInfo*);
1179 extern PetscErrorCode  MatILUFactorSymbolic(Mat,Mat,IS,IS,const MatFactorInfo*);
1180 extern PetscErrorCode  MatICCFactorSymbolic(Mat,Mat,IS,const MatFactorInfo*);
1181 extern PetscErrorCode  MatICCFactor(Mat,IS,const MatFactorInfo*);
1182 extern PetscErrorCode  MatLUFactorNumeric(Mat,Mat,const MatFactorInfo*);
1183 extern PetscErrorCode  MatGetInertia(Mat,PetscInt*,PetscInt*,PetscInt*);
1184 extern PetscErrorCode  MatSolve(Mat,Vec,Vec);
1185 extern PetscErrorCode  MatForwardSolve(Mat,Vec,Vec);
1186 extern PetscErrorCode  MatBackwardSolve(Mat,Vec,Vec);
1187 extern PetscErrorCode  MatSolveAdd(Mat,Vec,Vec,Vec);
1188 extern PetscErrorCode  MatSolveTranspose(Mat,Vec,Vec);
1189 extern PetscErrorCode  MatSolveTransposeAdd(Mat,Vec,Vec,Vec);
1190 extern PetscErrorCode  MatSolves(Mat,Vecs,Vecs);
1191 
1192 extern PetscErrorCode  MatSetUnfactored(Mat);
1193 
1194 /*E
1195     MatSORType - What type of (S)SOR to perform
1196 
1197     Level: beginner
1198 
1199    May be bitwise ORd together
1200 
1201    Any additions/changes here MUST also be made in include/finclude/petscmat.h
1202 
1203    MatSORType may be bitwise ORd together, so do not change the numbers
1204 
1205 .seealso: MatSOR()
1206 E*/
1207 typedef enum {SOR_FORWARD_SWEEP=1,SOR_BACKWARD_SWEEP=2,SOR_SYMMETRIC_SWEEP=3,
1208               SOR_LOCAL_FORWARD_SWEEP=4,SOR_LOCAL_BACKWARD_SWEEP=8,
1209               SOR_LOCAL_SYMMETRIC_SWEEP=12,SOR_ZERO_INITIAL_GUESS=16,
1210               SOR_EISENSTAT=32,SOR_APPLY_UPPER=64,SOR_APPLY_LOWER=128} MatSORType;
1211 extern PetscErrorCode  MatSOR(Mat,Vec,PetscReal,MatSORType,PetscReal,PetscInt,PetscInt,Vec);
1212 
1213 /*
1214     These routines are for efficiently computing Jacobians via finite differences.
1215 */
1216 
1217 /*E
1218     MatColoringType - String with the name of a PETSc matrix coloring or the creation function
1219        with an optional dynamic library name, for example
1220        http://www.mcs.anl.gov/petsc/lib.a:coloringcreate()
1221 
1222    Level: beginner
1223 
1224 .seealso: MatGetColoring()
1225 E*/
1226 #define MatColoringType char*
1227 #define MATCOLORINGNATURAL "natural"
1228 #define MATCOLORINGSL      "sl"
1229 #define MATCOLORINGLF      "lf"
1230 #define MATCOLORINGID      "id"
1231 
1232 extern PetscErrorCode  MatGetColoring(Mat,const MatColoringType,ISColoring*);
1233 extern PetscErrorCode  MatColoringRegister(const char[],const char[],const char[],PetscErrorCode(*)(Mat,MatColoringType,ISColoring *));
1234 
1235 /*MC
1236    MatColoringRegisterDynamic - Adds a new sparse matrix coloring to the
1237                                matrix package.
1238 
1239    Synopsis:
1240    PetscErrorCode MatColoringRegisterDynamic(const char *name_coloring,const char *path,const char *name_create,PetscErrorCode (*routine_create)(MatColoring))
1241 
1242    Not Collective
1243 
1244    Input Parameters:
1245 +  sname - name of Coloring (for example MATCOLORINGSL)
1246 .  path - location of library where creation routine is
1247 .  name - name of function that creates the Coloring type, a string
1248 -  function - function pointer that creates the coloring
1249 
1250    Level: developer
1251 
1252    If dynamic libraries are used, then the fourth input argument (function)
1253    is ignored.
1254 
1255    Sample usage:
1256 .vb
1257    MatColoringRegisterDynamic("my_color",/home/username/my_lib/lib/libO/solaris/mylib.a,
1258                "MyColor",MyColor);
1259 .ve
1260 
1261    Then, your partitioner can be chosen with the procedural interface via
1262 $     MatColoringSetType(part,"my_color")
1263    or at runtime via the option
1264 $     -mat_coloring_type my_color
1265 
1266    $PETSC_ARCH occuring in pathname will be replaced with appropriate values.
1267 
1268 .keywords: matrix, Coloring, register
1269 
1270 .seealso: MatColoringRegisterDestroy(), MatColoringRegisterAll()
1271 M*/
1272 #if defined(PETSC_USE_DYNAMIC_LIBRARIES)
1273 #define MatColoringRegisterDynamic(a,b,c,d) MatColoringRegister(a,b,c,0)
1274 #else
1275 #define MatColoringRegisterDynamic(a,b,c,d) MatColoringRegister(a,b,c,d)
1276 #endif
1277 
1278 extern PetscBool  MatColoringRegisterAllCalled;
1279 
1280 extern PetscErrorCode  MatColoringRegisterAll(const char[]);
1281 extern PetscErrorCode  MatColoringRegisterDestroy(void);
1282 extern PetscErrorCode  MatColoringPatch(Mat,PetscInt,PetscInt,ISColoringValue[],ISColoring*);
1283 
1284 /*S
1285      MatFDColoring - Object for computing a sparse Jacobian via finite differences
1286         and coloring
1287 
1288    Level: beginner
1289 
1290   Concepts: coloring, sparse Jacobian, finite differences
1291 
1292 .seealso:  MatFDColoringCreate()
1293 S*/
1294 typedef struct _p_MatFDColoring* MatFDColoring;
1295 
1296 extern PetscErrorCode  MatFDColoringCreate(Mat,ISColoring,MatFDColoring *);
1297 extern PetscErrorCode  MatFDColoringDestroy(MatFDColoring*);
1298 extern PetscErrorCode  MatFDColoringView(MatFDColoring,PetscViewer);
1299 extern PetscErrorCode  MatFDColoringSetFunction(MatFDColoring,PetscErrorCode (*)(void),void*);
1300 extern PetscErrorCode  MatFDColoringGetFunction(MatFDColoring,PetscErrorCode (**)(void),void**);
1301 extern PetscErrorCode  MatFDColoringSetParameters(MatFDColoring,PetscReal,PetscReal);
1302 extern PetscErrorCode  MatFDColoringSetFromOptions(MatFDColoring);
1303 extern PetscErrorCode  MatFDColoringApply(Mat,MatFDColoring,Vec,MatStructure*,void *);
1304 extern PetscErrorCode  MatFDColoringApplyTS(Mat,MatFDColoring,PetscReal,Vec,MatStructure*,void *);
1305 extern PetscErrorCode  MatFDColoringSetF(MatFDColoring,Vec);
1306 extern PetscErrorCode  MatFDColoringGetPerturbedColumns(MatFDColoring,PetscInt*,PetscInt*[]);
1307 /*
1308     These routines are for partitioning matrices: currently used only
1309   for adjacency matrix, MatCreateMPIAdj().
1310 */
1311 
1312 /*S
1313      MatPartitioning - Object for managing the partitioning of a matrix or graph
1314 
1315    Level: beginner
1316 
1317   Concepts: partitioning
1318 
1319 .seealso:  MatPartitioningCreate(), MatPartitioningType
1320 S*/
1321 typedef struct _p_MatPartitioning* MatPartitioning;
1322 
1323 /*E
1324     MatPartitioningType - String with the name of a PETSc matrix partitioning or the creation function
1325        with an optional dynamic library name, for example
1326        http://www.mcs.anl.gov/petsc/lib.a:partitioningcreate()
1327 
1328    Level: beginner
1329 
1330 .seealso: MatPartitioningCreate(), MatPartitioning
1331 E*/
1332 #define MatPartitioningType char*
1333 #define MATPARTITIONINGCURRENT  "current"
1334 #define MATPARTITIONINGSQUARE   "square"
1335 #define MATPARTITIONINGPARMETIS "parmetis"
1336 #define MATPARTITIONINGCHACO    "chaco"
1337 #define MATPARTITIONINGJOSTLE   "jostle"
1338 #define MATPARTITIONINGPARTY    "party"
1339 #define MATPARTITIONINGSCOTCH   "scotch"
1340 
1341 
1342 extern PetscErrorCode  MatPartitioningCreate(MPI_Comm,MatPartitioning*);
1343 extern PetscErrorCode  MatPartitioningSetType(MatPartitioning,const MatPartitioningType);
1344 extern PetscErrorCode  MatPartitioningSetNParts(MatPartitioning,PetscInt);
1345 extern PetscErrorCode  MatPartitioningSetAdjacency(MatPartitioning,Mat);
1346 extern PetscErrorCode  MatPartitioningSetVertexWeights(MatPartitioning,const PetscInt[]);
1347 extern PetscErrorCode  MatPartitioningSetPartitionWeights(MatPartitioning,const PetscReal []);
1348 extern PetscErrorCode  MatPartitioningApply(MatPartitioning,IS*);
1349 extern PetscErrorCode  MatPartitioningDestroy(MatPartitioning*);
1350 
1351 extern PetscErrorCode  MatPartitioningRegister(const char[],const char[],const char[],PetscErrorCode (*)(MatPartitioning));
1352 
1353 /*MC
1354    MatPartitioningRegisterDynamic - Adds a new sparse matrix partitioning to the
1355    matrix package.
1356 
1357    Synopsis:
1358    PetscErrorCode MatPartitioningRegisterDynamic(const char *name_partitioning,const char *path,const char *name_create,PetscErrorCode (*routine_create)(MatPartitioning))
1359 
1360    Not Collective
1361 
1362    Input Parameters:
1363 +  sname - name of partitioning (for example MATPARTITIONINGCURRENT) or parmetis
1364 .  path - location of library where creation routine is
1365 .  name - name of function that creates the partitioning type, a string
1366 -  function - function pointer that creates the partitioning type
1367 
1368    Level: developer
1369 
1370    If dynamic libraries are used, then the fourth input argument (function)
1371    is ignored.
1372 
1373    Sample usage:
1374 .vb
1375    MatPartitioningRegisterDynamic("my_part",/home/username/my_lib/lib/libO/solaris/mylib.a,
1376                "MyPartCreate",MyPartCreate);
1377 .ve
1378 
1379    Then, your partitioner can be chosen with the procedural interface via
1380 $     MatPartitioningSetType(part,"my_part")
1381    or at runtime via the option
1382 $     -mat_partitioning_type my_part
1383 
1384    $PETSC_ARCH occuring in pathname will be replaced with appropriate values.
1385 
1386 .keywords: matrix, partitioning, register
1387 
1388 .seealso: MatPartitioningRegisterDestroy(), MatPartitioningRegisterAll()
1389 M*/
1390 #if defined(PETSC_USE_DYNAMIC_LIBRARIES)
1391 #define MatPartitioningRegisterDynamic(a,b,c,d) MatPartitioningRegister(a,b,c,0)
1392 #else
1393 #define MatPartitioningRegisterDynamic(a,b,c,d) MatPartitioningRegister(a,b,c,d)
1394 #endif
1395 
1396 extern PetscBool  MatPartitioningRegisterAllCalled;
1397 
1398 extern PetscErrorCode  MatPartitioningRegisterAll(const char[]);
1399 extern PetscErrorCode  MatPartitioningRegisterDestroy(void);
1400 
1401 extern PetscErrorCode  MatPartitioningView(MatPartitioning,PetscViewer);
1402 extern PetscErrorCode  MatPartitioningSetFromOptions(MatPartitioning);
1403 extern PetscErrorCode  MatPartitioningGetType(MatPartitioning,const MatPartitioningType*);
1404 
1405 extern PetscErrorCode  MatPartitioningParmetisSetCoarseSequential(MatPartitioning);
1406 extern PetscErrorCode  MatPartitioningParmetisGetEdgeCut(MatPartitioning, PetscInt *);
1407 
1408 extern PetscErrorCode  MatPartitioningJostleSetCoarseLevel(MatPartitioning,PetscReal);
1409 extern PetscErrorCode  MatPartitioningJostleSetCoarseSequential(MatPartitioning);
1410 
1411 typedef enum {MP_CHACO_MULTILEVEL_KL,MP_CHACO_SPECTRAL,MP_CHACO_LINEAR,MP_CHACO_RANDOM, MP_CHACO_SCATTERED} MPChacoGlobalType;
1412 extern PetscErrorCode  MatPartitioningChacoSetGlobal(MatPartitioning, MPChacoGlobalType);
1413 typedef enum { MP_CHACO_KERNIGHAN_LIN, MP_CHACO_NONE } MPChacoLocalType;
1414 extern PetscErrorCode  MatPartitioningChacoSetLocal(MatPartitioning, MPChacoLocalType);
1415 extern PetscErrorCode  MatPartitioningChacoSetCoarseLevel(MatPartitioning,PetscReal);
1416 typedef enum { MP_CHACO_LANCZOS, MP_CHACO_RQI_SYMMLQ } MPChacoEigenType;
1417 extern PetscErrorCode  MatPartitioningChacoSetEigenSolver(MatPartitioning,MPChacoEigenType);
1418 extern PetscErrorCode  MatPartitioningChacoSetEigenTol(MatPartitioning, PetscReal);
1419 extern PetscErrorCode  MatPartitioningChacoSetEigenNumber(MatPartitioning, PetscInt);
1420 
1421 #define MP_PARTY_OPT "opt"
1422 #define MP_PARTY_LIN "lin"
1423 #define MP_PARTY_SCA "sca"
1424 #define MP_PARTY_RAN "ran"
1425 #define MP_PARTY_GBF "gbf"
1426 #define MP_PARTY_GCF "gcf"
1427 #define MP_PARTY_BUB "bub"
1428 #define MP_PARTY_DEF "def"
1429 extern PetscErrorCode  MatPartitioningPartySetGlobal(MatPartitioning, const char*);
1430 #define MP_PARTY_HELPFUL_SETS "hs"
1431 #define MP_PARTY_KERNIGHAN_LIN "kl"
1432 #define MP_PARTY_NONE "no"
1433 extern PetscErrorCode  MatPartitioningPartySetLocal(MatPartitioning, const char*);
1434 extern PetscErrorCode  MatPartitioningPartySetCoarseLevel(MatPartitioning,PetscReal);
1435 extern PetscErrorCode  MatPartitioningPartySetBipart(MatPartitioning,PetscBool );
1436 extern PetscErrorCode  MatPartitioningPartySetMatchOptimization(MatPartitioning,PetscBool );
1437 
1438 typedef enum { MP_SCOTCH_GREEDY, MP_SCOTCH_GPS, MP_SCOTCH_GR_GPS } MPScotchGlobalType;
1439 extern PetscErrorCode  MatPartitioningScotchSetArch(MatPartitioning,const char*);
1440 extern PetscErrorCode  MatPartitioningScotchSetMultilevel(MatPartitioning);
1441 extern PetscErrorCode  MatPartitioningScotchSetGlobal(MatPartitioning,MPScotchGlobalType);
1442 extern PetscErrorCode  MatPartitioningScotchSetCoarseLevel(MatPartitioning,PetscReal);
1443 extern PetscErrorCode  MatPartitioningScotchSetHostList(MatPartitioning,const char*);
1444 typedef enum { MP_SCOTCH_KERNIGHAN_LIN, MP_SCOTCH_NONE } MPScotchLocalType;
1445 extern PetscErrorCode  MatPartitioningScotchSetLocal(MatPartitioning,MPScotchLocalType);
1446 extern PetscErrorCode  MatPartitioningScotchSetMapping(MatPartitioning);
1447 extern PetscErrorCode  MatPartitioningScotchSetStrategy(MatPartitioning,char*);
1448 
1449 extern PetscErrorCode MatMeshToVertexGraph(Mat,PetscInt,Mat*);
1450 extern PetscErrorCode MatMeshToCellGraph(Mat,PetscInt,Mat*);
1451 
1452 /*
1453     If you add entries here you must also add them to finclude/petscmat.h
1454 */
1455 typedef enum { MATOP_SET_VALUES=0,
1456                MATOP_GET_ROW=1,
1457                MATOP_RESTORE_ROW=2,
1458                MATOP_MULT=3,
1459                MATOP_MULT_ADD=4,
1460                MATOP_MULT_TRANSPOSE=5,
1461                MATOP_MULT_TRANSPOSE_ADD=6,
1462                MATOP_SOLVE=7,
1463                MATOP_SOLVE_ADD=8,
1464                MATOP_SOLVE_TRANSPOSE=9,
1465                MATOP_SOLVE_TRANSPOSE_ADD=10,
1466                MATOP_LUFACTOR=11,
1467                MATOP_CHOLESKYFACTOR=12,
1468                MATOP_SOR=13,
1469                MATOP_TRANSPOSE=14,
1470                MATOP_GETINFO=15,
1471                MATOP_EQUAL=16,
1472                MATOP_GET_DIAGONAL=17,
1473                MATOP_DIAGONAL_SCALE=18,
1474                MATOP_NORM=19,
1475                MATOP_ASSEMBLY_BEGIN=20,
1476                MATOP_ASSEMBLY_END=21,
1477                MATOP_SET_OPTION=22,
1478                MATOP_ZERO_ENTRIES=23,
1479                MATOP_ZERO_ROWS=24,
1480                MATOP_LUFACTOR_SYMBOLIC=25,
1481                MATOP_LUFACTOR_NUMERIC=26,
1482                MATOP_CHOLESKY_FACTOR_SYMBOLIC=27,
1483                MATOP_CHOLESKY_FACTOR_NUMERIC=28,
1484                MATOP_SETUP_PREALLOCATION=29,
1485                MATOP_ILUFACTOR_SYMBOLIC=30,
1486                MATOP_ICCFACTOR_SYMBOLIC=31,
1487                MATOP_GET_ARRAY=32,
1488                MATOP_RESTORE_ARRAY=33,
1489                MATOP_DUPLICATE=34,
1490                MATOP_FORWARD_SOLVE=35,
1491                MATOP_BACKWARD_SOLVE=36,
1492                MATOP_ILUFACTOR=37,
1493                MATOP_ICCFACTOR=38,
1494                MATOP_AXPY=39,
1495                MATOP_GET_SUBMATRICES=40,
1496                MATOP_INCREASE_OVERLAP=41,
1497                MATOP_GET_VALUES=42,
1498                MATOP_COPY=43,
1499                MATOP_GET_ROW_MAX=44,
1500                MATOP_SCALE=45,
1501                MATOP_SHIFT=46,
1502                MATOP_DIAGONAL_SET=47,
1503                MATOP_ILUDT_FACTOR=48,
1504                MATOP_SET_BLOCK_SIZE=49,
1505                MATOP_GET_ROW_IJ=50,
1506                MATOP_RESTORE_ROW_IJ=51,
1507                MATOP_GET_COLUMN_IJ=52,
1508                MATOP_RESTORE_COLUMN_IJ=53,
1509                MATOP_FDCOLORING_CREATE=54,
1510                MATOP_COLORING_PATCH=55,
1511                MATOP_SET_UNFACTORED=56,
1512                MATOP_PERMUTE=57,
1513                MATOP_SET_VALUES_BLOCKED=58,
1514                MATOP_GET_SUBMATRIX=59,
1515                MATOP_DESTROY=60,
1516                MATOP_VIEW=61,
1517                MATOP_CONVERT_FROM=62,
1518                MATOP_USE_SCALED_FORM=63,
1519                MATOP_SCALE_SYSTEM=64,
1520                MATOP_UNSCALE_SYSTEM=65,
1521                MATOP_SET_LOCAL_TO_GLOBAL_MAP=66,
1522                MATOP_SET_VALUES_LOCAL=67,
1523                MATOP_ZERO_ROWS_LOCAL=68,
1524                MATOP_GET_ROW_MAX_ABS=69,
1525                MATOP_GET_ROW_MIN_ABS=70,
1526                MATOP_CONVERT=71,
1527                MATOP_SET_COLORING=72,
1528                MATOP_SET_VALUES_ADIC=73,
1529                MATOP_SET_VALUES_ADIFOR=74,
1530                MATOP_FD_COLORING_APPLY=75,
1531                MATOP_SET_FROM_OPTIONS=76,
1532                MATOP_MULT_CON=77,
1533                MATOP_MULT_TRANSPOSE_CON=78,
1534                MATOP_PERMUTE_SPARSIFY=79,
1535                MATOP_MULT_MULTIPLE=80,
1536                MATOP_SOLVE_MULTIPLE=81,
1537                MATOP_GET_INERTIA=82,
1538                MATOP_LOAD=83,
1539                MATOP_IS_SYMMETRIC=84,
1540                MATOP_IS_HERMITIAN=85,
1541                MATOP_IS_STRUCTURALLY_SYMMETRIC=86,
1542                MATOP_DUMMY=87,
1543                MATOP_GET_VECS=88,
1544                MATOP_MAT_MULT=89,
1545                MATOP_MAT_MULT_SYMBOLIC=90,
1546                MATOP_MAT_MULT_NUMERIC=91,
1547                MATOP_PTAP=92,
1548                MATOP_PTAP_SYMBOLIC=93,
1549                MATOP_PTAP_NUMERIC=94,
1550                MATOP_MAT_MULTTRANSPOSE=95,
1551                MATOP_MAT_MULTTRANSPOSE_SYM=96,
1552                MATOP_MAT_MULTTRANSPOSE_NUM=97,
1553                MATOP_PTAP_SYMBOLIC_SEQAIJ=98,
1554                MATOP_PTAP_NUMERIC_SEQAIJ=99,
1555                MATOP_PTAP_SYMBOLIC_MPIAIJ=100,
1556                MATOP_PTAP_NUMERIC_MPIAIJ=101,
1557                MATOP_CONJUGATE=102,
1558                MATOP_SET_SIZES=103,
1559                MATOP_SET_VALUES_ROW=104,
1560                MATOP_REAL_PART=105,
1561                MATOP_IMAG_PART=106,
1562                MATOP_GET_ROW_UTRIANGULAR=107,
1563                MATOP_RESTORE_ROW_UTRIANGULAR=108,
1564                MATOP_MATSOLVE=109,
1565                MATOP_GET_REDUNDANTMATRIX=110,
1566                MATOP_GET_ROW_MIN=111,
1567                MATOP_GET_COLUMN_VEC=112,
1568                MATOP_MISSING_DIAGONAL=113,
1569                MATOP_MATGETSEQNONZEROSTRUCTURE=114,
1570                MATOP_CREATE=115,
1571                MATOP_GET_GHOSTS=116,
1572                MATOP_GET_LOCALSUBMATRIX=117,
1573                MATOP_RESTORE_LOCALSUBMATRIX=118,
1574                MATOP_MULT_DIAGONAL_BLOCK=119,
1575                MATOP_HERMITIANTRANSPOSE=120,
1576                MATOP_MULTHERMITIANTRANSPOSE=121,
1577                MATOP_MULTHERMITIANTRANSPOSEADD=122,
1578                MATOP_GETMULTIPROCBLOCK=123,
1579                MATOP_GETCOLUMNNORMS=125,
1580 	       MATOP_GET_SUBMATRICES_PARALLEL=128
1581              } MatOperation;
1582 extern PetscErrorCode  MatHasOperation(Mat,MatOperation,PetscBool *);
1583 extern PetscErrorCode  MatShellSetOperation(Mat,MatOperation,void(*)(void));
1584 extern PetscErrorCode  MatShellGetOperation(Mat,MatOperation,void(**)(void));
1585 extern PetscErrorCode  MatShellSetContext(Mat,void*);
1586 
1587 /*
1588    Codes for matrices stored on disk. By default they are
1589    stored in a universal format. By changing the format with
1590    PetscViewerSetFormat(viewer,PETSC_VIEWER_NATIVE); the matrices will
1591    be stored in a way natural for the matrix, for example dense matrices
1592    would be stored as dense. Matrices stored this way may only be
1593    read into matrices of the same type.
1594 */
1595 #define MATRIX_BINARY_FORMAT_DENSE -1
1596 
1597 extern PetscErrorCode  MatMPIBAIJSetHashTableFactor(Mat,PetscReal);
1598 extern PetscErrorCode  MatISGetLocalMat(Mat,Mat*);
1599 
1600 /*S
1601      MatNullSpace - Object that removes a null space from a vector, i.e.
1602          orthogonalizes the vector to a subsapce
1603 
1604    Level: advanced
1605 
1606   Concepts: matrix; linear operator, null space
1607 
1608   Users manual sections:
1609 .   sec_singular
1610 
1611 .seealso:  MatNullSpaceCreate()
1612 S*/
1613 typedef struct _p_MatNullSpace* MatNullSpace;
1614 
1615 extern PetscErrorCode  MatNullSpaceCreate(MPI_Comm,PetscBool ,PetscInt,const Vec[],MatNullSpace*);
1616 extern PetscErrorCode  MatNullSpaceSetFunction(MatNullSpace,PetscErrorCode (*)(MatNullSpace,Vec,void*),void*);
1617 extern PetscErrorCode  MatNullSpaceDestroy(MatNullSpace*);
1618 extern PetscErrorCode  MatNullSpaceRemove(MatNullSpace,Vec,Vec*);
1619 extern PetscErrorCode  MatNullSpaceAttach(Mat,MatNullSpace);
1620 extern PetscErrorCode  MatNullSpaceTest(MatNullSpace,Mat,PetscBool  *);
1621 extern PetscErrorCode  MatNullSpaceView(MatNullSpace,PetscViewer);
1622 
1623 extern PetscErrorCode  MatReorderingSeqSBAIJ(Mat,IS);
1624 extern PetscErrorCode  MatMPISBAIJSetHashTableFactor(Mat,PetscReal);
1625 extern PetscErrorCode  MatSeqSBAIJSetColumnIndices(Mat,PetscInt *);
1626 extern PetscErrorCode  MatSeqBAIJInvertBlockDiagonal(Mat);
1627 
1628 extern PetscErrorCode  MatCreateMAIJ(Mat,PetscInt,Mat*);
1629 extern PetscErrorCode  MatMAIJRedimension(Mat,PetscInt,Mat*);
1630 extern PetscErrorCode  MatMAIJGetAIJ(Mat,Mat*);
1631 
1632 extern PetscErrorCode  MatComputeExplicitOperator(Mat,Mat*);
1633 
1634 extern PetscErrorCode  MatDiagonalScaleLocal(Mat,Vec);
1635 
1636 extern PetscErrorCode  MatCreateMFFD(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,Mat*);
1637 extern PetscErrorCode  MatMFFDSetBase(Mat,Vec,Vec);
1638 extern PetscErrorCode  MatMFFDSetFunction(Mat,PetscErrorCode(*)(void*,Vec,Vec),void*);
1639 extern PetscErrorCode  MatMFFDSetFunctioni(Mat,PetscErrorCode (*)(void*,PetscInt,Vec,PetscScalar*));
1640 extern PetscErrorCode  MatMFFDSetFunctioniBase(Mat,PetscErrorCode (*)(void*,Vec));
1641 extern PetscErrorCode  MatMFFDAddNullSpace(Mat,MatNullSpace);
1642 extern PetscErrorCode  MatMFFDSetHHistory(Mat,PetscScalar[],PetscInt);
1643 extern PetscErrorCode  MatMFFDResetHHistory(Mat);
1644 extern PetscErrorCode  MatMFFDSetFunctionError(Mat,PetscReal);
1645 extern PetscErrorCode  MatMFFDSetPeriod(Mat,PetscInt);
1646 extern PetscErrorCode  MatMFFDGetH(Mat,PetscScalar *);
1647 extern PetscErrorCode  MatMFFDSetOptionsPrefix(Mat,const char[]);
1648 extern PetscErrorCode  MatMFFDCheckPositivity(void*,Vec,Vec,PetscScalar*);
1649 extern PetscErrorCode  MatMFFDSetCheckh(Mat,PetscErrorCode (*)(void*,Vec,Vec,PetscScalar*),void*);
1650 
1651 /*S
1652     MatMFFD - A data structured used to manage the computation of the h differencing parameter for matrix-free
1653               Jacobian vector products
1654 
1655     Notes: MATMFFD is a specific MatType which uses the MatMFFD data structure
1656 
1657            MatMFFD*() methods actually take the Mat as their first argument. Not a MatMFFD data structure
1658 
1659     Level: developer
1660 
1661 .seealso: MATMFFD, MatCreateMFFD(), MatMFFDSetFuction(), MatMFFDSetType(), MatMFFDRegister()
1662 S*/
1663 typedef struct _p_MatMFFD* MatMFFD;
1664 
1665 /*E
1666     MatMFFDType - algorithm used to compute the h used in computing matrix-vector products via differencing of the function
1667 
1668    Level: beginner
1669 
1670 .seealso: MatMFFDSetType(), MatMFFDRegister()
1671 E*/
1672 #define MatMFFDType char*
1673 #define MATMFFD_DS  "ds"
1674 #define MATMFFD_WP  "wp"
1675 
1676 extern PetscErrorCode  MatMFFDSetType(Mat,const MatMFFDType);
1677 extern PetscErrorCode  MatMFFDRegister(const char[],const char[],const char[],PetscErrorCode (*)(MatMFFD));
1678 
1679 /*MC
1680    MatMFFDRegisterDynamic - Adds a method to the MatMFFD registry.
1681 
1682    Synopsis:
1683    PetscErrorCode MatMFFDRegisterDynamic(const char *name_solver,const char *path,const char *name_create,PetscErrorCode (*routine_create)(MatMFFD))
1684 
1685    Not Collective
1686 
1687    Input Parameters:
1688 +  name_solver - name of a new user-defined compute-h module
1689 .  path - path (either absolute or relative) the library containing this solver
1690 .  name_create - name of routine to create method context
1691 -  routine_create - routine to create method context
1692 
1693    Level: developer
1694 
1695    Notes:
1696    MatMFFDRegisterDynamic() may be called multiple times to add several user-defined solvers.
1697 
1698    If dynamic libraries are used, then the fourth input argument (routine_create)
1699    is ignored.
1700 
1701    Sample usage:
1702 .vb
1703    MatMFFDRegisterDynamic("my_h",/home/username/my_lib/lib/libO/solaris/mylib.a,
1704                "MyHCreate",MyHCreate);
1705 .ve
1706 
1707    Then, your solver can be chosen with the procedural interface via
1708 $     MatMFFDSetType(mfctx,"my_h")
1709    or at runtime via the option
1710 $     -snes_mf_type my_h
1711 
1712 .keywords: MatMFFD, register
1713 
1714 .seealso: MatMFFDRegisterAll(), MatMFFDRegisterDestroy()
1715 M*/
1716 #if defined(PETSC_USE_DYNAMIC_LIBRARIES)
1717 #define MatMFFDRegisterDynamic(a,b,c,d) MatMFFDRegister(a,b,c,0)
1718 #else
1719 #define MatMFFDRegisterDynamic(a,b,c,d) MatMFFDRegister(a,b,c,d)
1720 #endif
1721 
1722 extern PetscErrorCode  MatMFFDRegisterAll(const char[]);
1723 extern PetscErrorCode  MatMFFDRegisterDestroy(void);
1724 extern PetscErrorCode  MatMFFDDSSetUmin(Mat,PetscReal);
1725 extern PetscErrorCode  MatMFFDWPSetComputeNormU(Mat,PetscBool );
1726 
1727 
1728 extern PetscErrorCode  PetscViewerMathematicaPutMatrix(PetscViewer, PetscInt, PetscInt, PetscReal *);
1729 extern PetscErrorCode  PetscViewerMathematicaPutCSRMatrix(PetscViewer, PetscInt, PetscInt, PetscInt *, PetscInt *, PetscReal *);
1730 
1731 /*
1732    PETSc interface to MUMPS
1733 */
1734 #ifdef PETSC_HAVE_MUMPS
1735 extern PetscErrorCode  MatMumpsSetIcntl(Mat,PetscInt,PetscInt);
1736 #endif
1737 
1738 /*
1739    PETSc interface to SUPERLU
1740 */
1741 #ifdef PETSC_HAVE_SUPERLU
1742 extern PetscErrorCode  MatSuperluSetILUDropTol(Mat,PetscReal);
1743 #endif
1744 
1745 #if defined(PETSC_HAVE_CUSP)
1746 extern PetscErrorCode  MatCreateSeqAIJCUSP(MPI_Comm,PetscInt,PetscInt,PetscInt,const PetscInt[],Mat*);
1747 extern PetscErrorCode  MatCreateMPIAIJCUSP(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt,const PetscInt[],PetscInt,const PetscInt[],Mat*);
1748 #endif
1749 
1750 /*
1751    PETSc interface to FFTW
1752 */
1753 #if defined(PETSC_HAVE_FFTW)
1754 extern PetscErrorCode VecScatterPetscToFFTW(Mat,Vec,Vec);
1755 extern PetscErrorCode VecScatterFFTWToPetsc(Mat,Vec,Vec);
1756 extern PetscErrorCode MatGetVecsFFTW(Mat,Vec*,Vec*,Vec*);
1757 #endif
1758 
1759 extern PetscErrorCode MatCreateNest(MPI_Comm,PetscInt,const IS[],PetscInt,const IS[],const Mat[],Mat*);
1760 extern PetscErrorCode MatNestGetSize(Mat,PetscInt*,PetscInt*);
1761 extern PetscErrorCode MatNestGetSubMats(Mat,PetscInt*,PetscInt*,Mat***);
1762 extern PetscErrorCode MatNestGetSubMat(Mat,PetscInt,PetscInt,Mat*);
1763 extern PetscErrorCode MatNestSetVecType(Mat,const VecType);
1764 extern PetscErrorCode MatNestSetSubMats(Mat,PetscInt,const IS[],PetscInt,const IS[],const Mat[]);
1765 extern PetscErrorCode MatNestSetSubMat(Mat,PetscInt,PetscInt,Mat);
1766 
1767 PETSC_EXTERN_CXX_END
1768 #endif
1769