xref: /petsc/include/petscmat.h (revision 85afcc9ae9ea289cfdbcd5f2fb7e605e311ecd9d)
1 /*
2      Include file for the matrix component of PETSc
3 */
4 #ifndef __PETSCMAT_H
5 #define __PETSCMAT_H
6 #include "petscvec.h"
7 PETSC_EXTERN_CXX_BEGIN
8 
9 /*S
10      Mat - Abstract PETSc matrix object
11 
12    Level: beginner
13 
14   Concepts: matrix; linear operator
15 
16 .seealso:  MatCreate(), MatType, MatSetType()
17 S*/
18 typedef struct _p_Mat*           Mat;
19 
20 /*E
21     MatType - String with the name of a PETSc matrix or the creation function
22        with an optional dynamic library name, for example
23        http://www.mcs.anl.gov/petsc/lib.a:mymatcreate()
24 
25    Level: beginner
26 
27 .seealso: MatSetType(), Mat, MatSolverPackage
28 E*/
29 #define MatType char*
30 #define MATSAME            "same"
31 #define MATMAIJ            "maij"
32 #define MATSEQMAIJ         "seqmaij"
33 #define MATMPIMAIJ         "mpimaij"
34 #define MATIS              "is"
35 #define MATAIJ             "aij"
36 #define MATSEQAIJ          "seqaij"
37 #define MATSEQPTHREADAIJ     "seqpthreadaij"
38 #define MATMPIAIJ          "mpiaij"
39 #define MATAIJCRL          "aijcrl"
40 #define MATSEQAIJCRL       "seqaijcrl"
41 #define MATMPIAIJCRL       "mpiaijcrl"
42 #define MATAIJCUSP         "aijcusp"
43 #define MATSEQAIJCUSP      "seqaijcusp"
44 #define MATMPIAIJCUSP      "mpiaijcusp"
45 #define MATAIJPERM         "aijperm"
46 #define MATSEQAIJPERM      "seqaijperm"
47 #define MATMPIAIJPERM      "mpiaijperm"
48 #define MATSHELL           "shell"
49 #define MATDENSE           "dense"
50 #define MATSEQDENSE        "seqdense"
51 #define MATMPIDENSE        "mpidense"
52 #define MATBAIJ            "baij"
53 #define MATSEQBAIJ         "seqbaij"
54 #define MATMPIBAIJ         "mpibaij"
55 #define MATMPIADJ          "mpiadj"
56 #define MATSBAIJ           "sbaij"
57 #define MATSEQSBAIJ        "seqsbaij"
58 #define MATMPISBAIJ        "mpisbaij"
59 
60 #define MATSEQBSTRM        "seqbstrm"
61 #define MATMPIBSTRM        "mpibstrm"
62 #define MATBSTRM           "bstrm"
63 #define MATSEQSBSTRM       "seqsbstrm"
64 #define MATMPISBSTRM       "mpisbstrm"
65 #define MATSBSTRM          "sbstrm"
66 
67 #define MATDAAD            "daad"
68 #define MATMFFD            "mffd"
69 #define MATNORMAL          "normal"
70 #define MATLRC             "lrc"
71 #define MATSCATTER         "scatter"
72 #define MATBLOCKMAT        "blockmat"
73 #define MATCOMPOSITE       "composite"
74 #define MATFFT             "fft"
75 #define MATFFTW            "fftw"
76 #define MATSEQCUFFT        "seqcufft"
77 #define MATTRANSPOSEMAT    "transpose"
78 #define MATSCHURCOMPLEMENT "schurcomplement"
79 #define MATPYTHON          "python"
80 #define MATHYPRESTRUCT     "hyprestruct"
81 #define MATHYPRESSTRUCT    "hypresstruct"
82 #define MATSUBMATRIX       "submatrix"
83 #define MATLOCALREF        "localref"
84 #define MATNEST            "nest"
85 
86 /*E
87     MatSolverPackage - String with the name of a PETSc matrix solver type.
88 
89     For example: "petsc" indicates what PETSc provides, "superlu" indicates either
90        SuperLU or SuperLU_Dist etc.
91 
92 
93    Level: beginner
94 
95 .seealso: MatGetFactor(), Mat, MatSetType(), MatType
96 E*/
97 #define MatSolverPackage char*
98 #define MATSOLVERSPOOLES      "spooles"
99 #define MATSOLVERSUPERLU      "superlu"
100 #define MATSOLVERSUPERLU_DIST "superlu_dist"
101 #define MATSOLVERUMFPACK      "umfpack"
102 #define MATSOLVERCHOLMOD      "cholmod"
103 #define MATSOLVERESSL         "essl"
104 #define MATSOLVERLUSOL        "lusol"
105 #define MATSOLVERMUMPS        "mumps"
106 #define MATSOLVERPASTIX       "pastix"
107 #define MATSOLVERDSCPACK      "dscpack"
108 #define MATSOLVERMATLAB       "matlab"
109 #define MATSOLVERPETSC        "petsc"
110 #define MATSOLVERPLAPACK      "plapack"
111 #define MATSOLVERBAS          "bas"
112 
113 #define MATSOLVERBSTRM        "bstrm"
114 #define MATSOLVERSBSTRM       "sbstrm"
115 
116 /*E
117     MatFactorType - indicates what type of factorization is requested
118 
119     Level: beginner
120 
121    Any additions/changes here MUST also be made in include/finclude/petscmat.h
122 
123 .seealso: MatSolverPackage, MatGetFactor()
124 E*/
125 typedef enum {MAT_FACTOR_NONE, MAT_FACTOR_LU, MAT_FACTOR_CHOLESKY, MAT_FACTOR_ILU, MAT_FACTOR_ICC,MAT_FACTOR_ILUDT} MatFactorType;
126 extern const char *const MatFactorTypes[];
127 
128 extern PetscErrorCode  MatGetFactor(Mat,const MatSolverPackage,MatFactorType,Mat*);
129 extern PetscErrorCode  MatGetFactorAvailable(Mat,const MatSolverPackage,MatFactorType,PetscBool *);
130 extern PetscErrorCode  MatFactorGetSolverPackage(Mat,const MatSolverPackage*);
131 extern PetscErrorCode  MatGetFactorType(Mat,MatFactorType*);
132 
133 /* Logging support */
134 #define    MAT_FILE_CLASSID 1211216    /* used to indicate matrices in binary files */
135 extern PetscClassId  MAT_CLASSID;
136 extern PetscClassId  MAT_FDCOLORING_CLASSID;
137 extern PetscClassId  MAT_PARTITIONING_CLASSID;
138 extern PetscClassId  MAT_NULLSPACE_CLASSID;
139 extern PetscClassId  MATMFFD_CLASSID;
140 
141 /*E
142     MatReuse - Indicates if matrices obtained from a previous call to MatGetSubMatrices()
143      or MatGetSubMatrix() are to be reused to store the new matrix values. For MatConvert() is used to indicate
144      that the input matrix is to be replaced with the converted matrix.
145 
146     Level: beginner
147 
148    Any additions/changes here MUST also be made in include/finclude/petscmat.h
149 
150 .seealso: MatGetSubMatrices(), MatGetSubMatrix(), MatDestroyMatrices(), MatConvert()
151 E*/
152 typedef enum {MAT_INITIAL_MATRIX,MAT_REUSE_MATRIX,MAT_IGNORE_MATRIX} MatReuse;
153 
154 /*E
155     MatGetSubMatrixOption - Indicates if matrices obtained from a call to MatGetSubMatrices()
156      include the matrix values. Currently it is only used by MatGetSeqNonzerostructure().
157 
158     Level: beginner
159 
160 .seealso: MatGetSeqNonzerostructure()
161 E*/
162 typedef enum {MAT_DO_NOT_GET_VALUES,MAT_GET_VALUES} MatGetSubMatrixOption;
163 
164 extern PetscErrorCode  MatInitializePackage(const char[]);
165 
166 extern PetscErrorCode  MatCreate(MPI_Comm,Mat*);
167 PetscPolymorphicFunction(MatCreate,(MPI_Comm comm),(comm,&A),Mat,A)
168 PetscPolymorphicFunction(MatCreate,(),(PETSC_COMM_WORLD,&A),Mat,A)
169 extern PetscErrorCode  MatSetSizes(Mat,PetscInt,PetscInt,PetscInt,PetscInt);
170 extern PetscErrorCode  MatSetType(Mat,const MatType);
171 extern PetscErrorCode  MatSetFromOptions(Mat);
172 extern PetscErrorCode  MatSetUpPreallocation(Mat);
173 extern PetscErrorCode  MatRegisterAll(const char[]);
174 extern PetscErrorCode  MatRegister(const char[],const char[],const char[],PetscErrorCode(*)(Mat));
175 extern PetscErrorCode  MatRegisterBaseName(const char[],const char[],const char[]);
176 extern PetscErrorCode  MatSetOptionsPrefix(Mat,const char[]);
177 extern PetscErrorCode  MatAppendOptionsPrefix(Mat,const char[]);
178 extern PetscErrorCode  MatGetOptionsPrefix(Mat,const char*[]);
179 
180 /*MC
181    MatRegisterDynamic - Adds a new matrix type
182 
183    Synopsis:
184    PetscErrorCode MatRegisterDynamic(const char *name,const char *path,const char *name_create,PetscErrorCode (*routine_create)(Mat))
185 
186    Not Collective
187 
188    Input Parameters:
189 +  name - name of a new user-defined matrix type
190 .  path - path (either absolute or relative) the library containing this solver
191 .  name_create - name of routine to create method context
192 -  routine_create - routine to create method context
193 
194    Notes:
195    MatRegisterDynamic() may be called multiple times to add several user-defined solvers.
196 
197    If dynamic libraries are used, then the fourth input argument (routine_create)
198    is ignored.
199 
200    Sample usage:
201 .vb
202    MatRegisterDynamic("my_mat",/home/username/my_lib/lib/libO/solaris/mylib.a,
203                "MyMatCreate",MyMatCreate);
204 .ve
205 
206    Then, your solver can be chosen with the procedural interface via
207 $     MatSetType(Mat,"my_mat")
208    or at runtime via the option
209 $     -mat_type my_mat
210 
211    Level: advanced
212 
213    Notes: ${PETSC_ARCH} occuring in pathname will be replaced with appropriate values.
214          If your function is not being put into a shared library then use VecRegister() instead
215 
216 .keywords: Mat, register
217 
218 .seealso: MatRegisterAll(), MatRegisterDestroy()
219 
220 M*/
221 #if defined(PETSC_USE_DYNAMIC_LIBRARIES)
222 #define MatRegisterDynamic(a,b,c,d) MatRegister(a,b,c,0)
223 #else
224 #define MatRegisterDynamic(a,b,c,d) MatRegister(a,b,c,d)
225 #endif
226 
227 extern PetscBool  MatRegisterAllCalled;
228 extern PetscFList MatList;
229 extern PetscFList MatColoringList;
230 extern PetscFList MatPartitioningList;
231 
232 /*E
233     MatStructure - Indicates if the matrix has the same nonzero structure
234 
235     Level: beginner
236 
237    Any additions/changes here MUST also be made in include/finclude/petscmat.h
238 
239 .seealso: MatCopy(), KSPSetOperators(), PCSetOperators()
240 E*/
241 typedef enum {DIFFERENT_NONZERO_PATTERN,SUBSET_NONZERO_PATTERN,SAME_NONZERO_PATTERN,SAME_PRECONDITIONER} MatStructure;
242 
243 extern PetscErrorCode  MatCreateSeqDense(MPI_Comm,PetscInt,PetscInt,PetscScalar[],Mat*);
244 extern PetscErrorCode  MatCreateMPIDense(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,PetscScalar[],Mat*);
245 extern PetscErrorCode  MatCreateSeqAIJ(MPI_Comm,PetscInt,PetscInt,PetscInt,const PetscInt[],Mat*);
246 PetscPolymorphicFunction(MatCreateSeqAIJ,(PetscInt m,PetscInt n,PetscInt nz,const PetscInt nnz[]),(PETSC_COMM_SELF,m,n,nz,nnz,&A),Mat,A)
247 PetscPolymorphicFunction(MatCreateSeqAIJ,(PetscInt m,PetscInt n,PetscInt nz),(PETSC_COMM_SELF,m,n,nz,PETSC_NULL,&A),Mat,A)
248 PetscPolymorphicFunction(MatCreateSeqAIJ,(PetscInt m,PetscInt n,const PetscInt nnz[]),(PETSC_COMM_SELF,m,n,0,nnz,&A),Mat,A)
249 PetscPolymorphicFunction(MatCreateSeqAIJ,(PetscInt m,PetscInt n),(PETSC_COMM_SELF,m,n,0,PETSC_NULL,&A),Mat,A)
250 PetscPolymorphicSubroutine(MatCreateSeqAIJ,(PetscInt m,PetscInt n,PetscInt nz,Mat *A),(PETSC_COMM_SELF,m,n,nz,PETSC_NULL,A))
251 PetscPolymorphicSubroutine(MatCreateSeqAIJ,(PetscInt m,PetscInt n,const PetscInt nnz[],Mat *A),(PETSC_COMM_SELF,m,n,0,nnz,A))
252 PetscPolymorphicSubroutine(MatCreateSeqAIJ,(PetscInt m,PetscInt n,Mat *A),(PETSC_COMM_SELF,m,n,0,PETSC_NULL,A))
253 extern PetscErrorCode  MatCreateMPIAIJ(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt,const PetscInt[],PetscInt,const PetscInt[],Mat*);
254 PetscPolymorphicFunction(MatCreateMPIAIJ,(MPI_Comm comm,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,const PetscInt nnz[],PetscInt onz,const PetscInt onnz[]),(comm,m,n,M,N,nz,nnz,onz,onnz,&A),Mat,A)
255 PetscPolymorphicFunction(MatCreateMPIAIJ,(MPI_Comm comm,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,PetscInt nnz),(comm,m,n,M,N,nz,PETSC_NULL,nnz,PETSC_NULL,&A),Mat,A)
256 PetscPolymorphicFunction(MatCreateMPIAIJ,(MPI_Comm comm,PetscInt m,PetscInt n,PetscInt M,PetscInt N,const PetscInt nnz[],const PetscInt onz[]),(comm,m,n,M,N,0,nnz,0,onz,&A),Mat,A)
257 PetscPolymorphicFunction(MatCreateMPIAIJ,(MPI_Comm comm,PetscInt m,PetscInt n,PetscInt M,PetscInt N),(comm,m,n,M,N,0,PETSC_NULL,0,PETSC_NULL,&A),Mat,A)
258 PetscPolymorphicSubroutine(MatCreateMPIAIJ,(MPI_Comm comm,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,PetscInt nnz,Mat *A),(comm,m,n,M,N,nz,PETSC_NULL,nnz,PETSC_NULL,A))
259 PetscPolymorphicSubroutine(MatCreateMPIAIJ,(MPI_Comm comm,PetscInt m,PetscInt n,PetscInt M,PetscInt N,const PetscInt nnz[],const PetscInt onz[],Mat *A),(comm,m,n,M,N,0,nnz,0,onz,A))
260 PetscPolymorphicSubroutine(MatCreateMPIAIJ,(MPI_Comm comm,PetscInt m,PetscInt n,PetscInt M,PetscInt N,Mat *A),(comm,m,n,M,N,0,PETSC_NULL,0,PETSC_NULL,A))
261 PetscPolymorphicFunction(MatCreateMPIAIJ,(PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,const PetscInt nnz[],PetscInt onz,const PetscInt onnz[]),(PETSC_COMM_WORLD,m,n,M,N,nz,nnz,onz,onnz,&A),Mat,A)
262 PetscPolymorphicFunction(MatCreateMPIAIJ,(PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,PetscInt nnz),(PETSC_COMM_WORLD,m,n,M,N,nz,PETSC_NULL,nnz,PETSC_NULL,&A),Mat,A)
263 PetscPolymorphicFunction(MatCreateMPIAIJ,(PetscInt m,PetscInt n,PetscInt M,PetscInt N,const PetscInt nnz[],const PetscInt onz[]),(PETSC_COMM_WORLD,m,n,M,N,0,nnz,0,onz,&A),Mat,A)
264 PetscPolymorphicFunction(MatCreateMPIAIJ,(PetscInt m,PetscInt n,PetscInt M,PetscInt N),(PETSC_COMM_WORLD,m,n,M,N,0,PETSC_NULL,0,PETSC_NULL,&A),Mat,A)
265 PetscPolymorphicSubroutine(MatCreateMPIAIJ,(PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,PetscInt nnz,Mat *A),(PETSC_COMM_WORLD,m,n,M,N,nz,PETSC_NULL,nnz,PETSC_NULL,A))
266 PetscPolymorphicSubroutine(MatCreateMPIAIJ,(PetscInt m,PetscInt n,PetscInt M,PetscInt N,const PetscInt nnz[],const PetscInt onz[],Mat *A),(PETSC_COMM_WORLD,m,n,M,N,0,nnz,0,onz,A))
267 PetscPolymorphicSubroutine(MatCreateMPIAIJ,(PetscInt m,PetscInt n,PetscInt M,PetscInt N,Mat *A),(PETSC_COMM_WORLD,m,n,M,N,0,PETSC_NULL,0,PETSC_NULL,A))
268 extern PetscErrorCode  MatCreateMPIAIJWithArrays(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,const PetscInt[],const PetscInt[],const PetscScalar[],Mat *);
269 extern PetscErrorCode  MatCreateMPIAIJWithSplitArrays(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt[],PetscInt[],PetscScalar[],PetscInt[],PetscInt[],PetscScalar[],Mat*);
270 
271 extern PetscErrorCode  MatCreateSeqBAIJ(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,const PetscInt[],Mat*);
272 PetscPolymorphicFunction(MatCreateSeqBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt nz,const PetscInt nnz[]),(PETSC_COMM_SELF,bs,m,n,nz,nnz,&A),Mat,A)
273 PetscPolymorphicFunction(MatCreateSeqBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt nz),(PETSC_COMM_SELF,bs,m,n,nz,PETSC_NULL,&A),Mat,A)
274 PetscPolymorphicFunction(MatCreateSeqBAIJ,(PetscInt bs,PetscInt m,PetscInt n,const PetscInt nnz[]),(PETSC_COMM_SELF,bs,m,n,0,nnz,&A),Mat,A)
275 PetscPolymorphicFunction(MatCreateSeqBAIJ,(PetscInt bs,PetscInt m,PetscInt n),(PETSC_COMM_SELF,bs,m,n,0,PETSC_NULL,&A),Mat,A)
276 PetscPolymorphicSubroutine(MatCreateSeqBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt nz,Mat *A),(PETSC_COMM_SELF,bs,m,n,nz,PETSC_NULL,A))
277 PetscPolymorphicSubroutine(MatCreateSeqBAIJ,(PetscInt bs,PetscInt m,PetscInt n,const PetscInt nnz[],Mat *A),(PETSC_COMM_SELF,bs,m,n,0,nnz,A))
278 PetscPolymorphicSubroutine(MatCreateSeqBAIJ,(PetscInt bs,PetscInt m,PetscInt n,Mat *A),(PETSC_COMM_SELF,bs,m,n,0,PETSC_NULL,A))
279 extern PetscErrorCode  MatCreateMPIBAIJ(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt,const PetscInt[],PetscInt,const PetscInt[],Mat*);
280 PetscPolymorphicFunction(MatCreateMPIBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,const PetscInt nnz[],PetscInt onz,const PetscInt onnz[]),(comm,bs,m,n,M,N,nz,nnz,onz,onnz,&A),Mat,A)
281 PetscPolymorphicFunction(MatCreateMPIBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,PetscInt nnz),(comm,bs,m,n,M,N,nz,PETSC_NULL,nnz,PETSC_NULL,&A),Mat,A)
282 PetscPolymorphicFunction(MatCreateMPIBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,const PetscInt nnz[],const PetscInt onz[]),(comm,bs,m,n,M,N,0,nnz,0,onz,&A),Mat,A)
283 PetscPolymorphicFunction(MatCreateMPIBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N),(comm,bs,m,n,M,N,0,PETSC_NULL,0,PETSC_NULL,&A),Mat,A)
284 PetscPolymorphicSubroutine(MatCreateMPIBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,PetscInt nnz,Mat *A),(comm,bs,m,n,M,N,nz,PETSC_NULL,nnz,PETSC_NULL,A))
285 PetscPolymorphicSubroutine(MatCreateMPIBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,const PetscInt nnz[],const PetscInt onz[],Mat *A),(comm,bs,m,n,M,N,0,nnz,0,onz,A))
286 PetscPolymorphicSubroutine(MatCreateMPIBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,Mat *A),(comm,bs,m,n,M,N,0,PETSC_NULL,0,PETSC_NULL,A))
287 PetscPolymorphicFunction(MatCreateMPIBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,const PetscInt nnz[],PetscInt onz,const PetscInt onnz[]),(PETSC_COMM_WORLD,bs,m,n,M,N,nz,nnz,onz,onnz,&A),Mat,A)
288 PetscPolymorphicFunction(MatCreateMPIBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,PetscInt nnz),(PETSC_COMM_WORLD,bs,m,n,M,N,nz,PETSC_NULL,nnz,PETSC_NULL,&A),Mat,A)
289 PetscPolymorphicFunction(MatCreateMPIBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,const PetscInt nnz[],const PetscInt onz[]),(PETSC_COMM_WORLD,bs,m,n,M,N,0,nnz,0,onz,&A),Mat,A)
290 PetscPolymorphicFunction(MatCreateMPIBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N),(PETSC_COMM_WORLD,bs,m,n,M,N,0,PETSC_NULL,0,PETSC_NULL,&A),Mat,A)
291 PetscPolymorphicSubroutine(MatCreateMPIBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,PetscInt nnz,Mat *A),(PETSC_COMM_WORLD,bs,m,n,M,N,nz,PETSC_NULL,nnz,PETSC_NULL,A))
292 PetscPolymorphicSubroutine(MatCreateMPIBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,const PetscInt nnz[],const PetscInt onz[],Mat *A),(PETSC_COMM_WORLD,bs,m,n,M,N,0,nnz,0,onz,A))
293 PetscPolymorphicSubroutine(MatCreateMPIBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,Mat *A),(PETSC_COMM_WORLD,bs,m,n,M,N,0,PETSC_NULL,0,PETSC_NULL,A))
294 extern PetscErrorCode  MatCreateMPIBAIJWithArrays(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt,const PetscInt[],const PetscInt[],const PetscScalar[],Mat*);
295 
296 extern PetscErrorCode  MatCreateMPIAdj(MPI_Comm,PetscInt,PetscInt,PetscInt[],PetscInt[],PetscInt[],Mat*);
297 extern PetscErrorCode  MatCreateSeqSBAIJ(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,const PetscInt[],Mat*);
298 PetscPolymorphicFunction(MatCreateSeqSBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt nz,const PetscInt nnz[]),(PETSC_COMM_SELF,bs,m,n,nz,nnz,&A),Mat,A)
299 PetscPolymorphicFunction(MatCreateSeqSBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt nz),(PETSC_COMM_SELF,bs,m,n,nz,PETSC_NULL,&A),Mat,A)
300 PetscPolymorphicFunction(MatCreateSeqSBAIJ,(PetscInt bs,PetscInt m,PetscInt n,const PetscInt nnz[]),(PETSC_COMM_SELF,bs,m,n,0,nnz,&A),Mat,A)
301 PetscPolymorphicFunction(MatCreateSeqSBAIJ,(PetscInt bs,PetscInt m,PetscInt n),(PETSC_COMM_SELF,bs,m,n,0,PETSC_NULL,&A),Mat,A)
302 PetscPolymorphicSubroutine(MatCreateSeqSBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt nz,Mat *A),(PETSC_COMM_SELF,bs,m,n,nz,PETSC_NULL,A))
303 PetscPolymorphicSubroutine(MatCreateSeqSBAIJ,(PetscInt bs,PetscInt m,PetscInt n,const PetscInt nnz[],Mat *A),(PETSC_COMM_SELF,bs,m,n,0,nnz,A))
304 PetscPolymorphicSubroutine(MatCreateSeqSBAIJ,(PetscInt bs,PetscInt m,PetscInt n,Mat *A),(PETSC_COMM_SELF,bs,m,n,0,PETSC_NULL,A))
305 
306 extern PetscErrorCode  MatCreateMPISBAIJ(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt,const PetscInt[],PetscInt,const PetscInt[],Mat*);
307 PetscPolymorphicFunction(MatCreateMPISBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,const PetscInt nnz[],PetscInt onz,const PetscInt onnz[]),(comm,bs,m,n,M,N,nz,nnz,onz,onnz,&A),Mat,A)
308 PetscPolymorphicFunction(MatCreateMPISBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,PetscInt nnz),(comm,bs,m,n,M,N,nz,PETSC_NULL,nnz,PETSC_NULL,&A),Mat,A)
309 PetscPolymorphicFunction(MatCreateMPISBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,const PetscInt nnz[],const PetscInt onz[]),(comm,bs,m,n,M,N,0,nnz,0,onz,&A),Mat,A)
310 PetscPolymorphicFunction(MatCreateMPISBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N),(comm,bs,m,n,M,N,0,PETSC_NULL,0,PETSC_NULL,&A),Mat,A)
311 PetscPolymorphicSubroutine(MatCreateMPISBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,PetscInt nnz,Mat *A),(comm,bs,m,n,M,N,nz,PETSC_NULL,nnz,PETSC_NULL,A))
312 PetscPolymorphicSubroutine(MatCreateMPISBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,const PetscInt nnz[],const PetscInt onz[],Mat *A),(comm,bs,m,n,M,N,0,nnz,0,onz,A))
313 PetscPolymorphicSubroutine(MatCreateMPISBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,Mat *A),(comm,bs,m,n,M,N,0,PETSC_NULL,0,PETSC_NULL,A))
314 PetscPolymorphicFunction(MatCreateMPISBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,const PetscInt nnz[],PetscInt onz,const PetscInt onnz[]),(PETSC_COMM_WORLD,bs,m,n,M,N,nz,nnz,onz,onnz,&A),Mat,A)
315 PetscPolymorphicFunction(MatCreateMPISBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,PetscInt nnz),(PETSC_COMM_WORLD,bs,m,n,M,N,nz,PETSC_NULL,nnz,PETSC_NULL,&A),Mat,A)
316 PetscPolymorphicFunction(MatCreateMPISBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,const PetscInt nnz[],const PetscInt onz[]),(PETSC_COMM_WORLD,bs,m,n,M,N,0,nnz,0,onz,&A),Mat,A)
317 PetscPolymorphicFunction(MatCreateMPISBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N),(PETSC_COMM_WORLD,bs,m,n,M,N,0,PETSC_NULL,0,PETSC_NULL,&A),Mat,A)
318 PetscPolymorphicSubroutine(MatCreateMPISBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,PetscInt nnz,Mat *A),(PETSC_COMM_WORLD,bs,m,n,M,N,nz,PETSC_NULL,nnz,PETSC_NULL,A))
319 PetscPolymorphicSubroutine(MatCreateMPISBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,const PetscInt nnz[],const PetscInt onz[],Mat *A),(PETSC_COMM_WORLD,bs,m,n,M,N,0,nnz,0,onz,A))
320 PetscPolymorphicSubroutine(MatCreateMPISBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,Mat *A),(PETSC_COMM_WORLD,bs,m,n,M,N,0,PETSC_NULL,0,PETSC_NULL,A))
321 extern PetscErrorCode  MatCreateMPISBAIJWithArrays(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt,const PetscInt[],const PetscInt[],const PetscScalar[],Mat *);
322 extern PetscErrorCode  MatMPISBAIJSetPreallocationCSR(Mat,PetscInt,const PetscInt[],const PetscInt[],const PetscScalar[]);
323 
324 extern PetscErrorCode  MatCreateShell(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,void *,Mat*);
325 PetscPolymorphicFunction(MatCreateShell,(MPI_Comm comm,PetscInt m,PetscInt n,PetscInt M,PetscInt N,void *ctx),(comm,m,n,M,N,ctx,&A),Mat,A)
326 PetscPolymorphicFunction(MatCreateShell,(PetscInt m,PetscInt n,PetscInt M,PetscInt N,void *ctx),(PETSC_COMM_WORLD,m,n,M,N,ctx,&A),Mat,A)
327 extern PetscErrorCode  MatCreateAdic(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt,void (*)(void),Mat*);
328 extern PetscErrorCode  MatCreateNormal(Mat,Mat*);
329 PetscPolymorphicFunction(MatCreateNormal,(Mat mat),(mat,&A),Mat,A)
330 extern PetscErrorCode  MatCreateLRC(Mat,Mat,Mat,Mat*);
331 extern PetscErrorCode  MatCreateIS(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,ISLocalToGlobalMapping,Mat*);
332 extern PetscErrorCode  MatCreateSeqAIJCRL(MPI_Comm,PetscInt,PetscInt,PetscInt,const PetscInt[],Mat*);
333 extern PetscErrorCode  MatCreateMPIAIJCRL(MPI_Comm,PetscInt,PetscInt,PetscInt,const PetscInt[],PetscInt,const PetscInt[],Mat*);
334 
335 extern PetscErrorCode  MatCreateSeqBSTRM(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,const PetscInt[],Mat*);
336 extern PetscErrorCode  MatCreateMPIBSTRM(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt,const PetscInt[],PetscInt,const PetscInt[],Mat*);
337 extern PetscErrorCode  MatCreateSeqSBSTRM(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,const PetscInt[],Mat*);
338 extern PetscErrorCode  MatCreateMPISBSTRM(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt,const PetscInt[],PetscInt,const PetscInt[],Mat*);
339 
340 extern PetscErrorCode  MatCreateScatter(MPI_Comm,VecScatter,Mat*);
341 extern PetscErrorCode  MatScatterSetVecScatter(Mat,VecScatter);
342 extern PetscErrorCode  MatScatterGetVecScatter(Mat,VecScatter*);
343 extern PetscErrorCode  MatCreateBlockMat(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt*,Mat*);
344 extern PetscErrorCode  MatCompositeAddMat(Mat,Mat);
345 extern PetscErrorCode  MatCompositeMerge(Mat);
346 extern PetscErrorCode  MatCreateComposite(MPI_Comm,PetscInt,const Mat*,Mat*);
347 typedef enum {MAT_COMPOSITE_ADDITIVE,MAT_COMPOSITE_MULTIPLICATIVE} MatCompositeType;
348 extern PetscErrorCode  MatCompositeSetType(Mat,MatCompositeType);
349 
350 extern PetscErrorCode  MatCreateFFT(MPI_Comm,PetscInt,const PetscInt[],const MatType,Mat*);
351 extern PetscErrorCode  MatCreateSeqCUFFT(MPI_Comm,PetscInt,const PetscInt[],Mat*);
352 
353 extern PetscErrorCode  MatCreateTranspose(Mat,Mat*);
354 extern PetscErrorCode  MatCreateSubMatrix(Mat,IS,IS,Mat*);
355 extern PetscErrorCode  MatSubMatrixUpdate(Mat,Mat,IS,IS);
356 extern PetscErrorCode  MatCreateLocalRef(Mat,IS,IS,Mat*);
357 
358 extern PetscErrorCode  MatPythonSetType(Mat,const char[]);
359 
360 extern PetscErrorCode  MatSetUp(Mat);
361 extern PetscErrorCode  MatDestroy(Mat*);
362 
363 extern PetscErrorCode  MatConjugate(Mat);
364 extern PetscErrorCode  MatRealPart(Mat);
365 extern PetscErrorCode  MatImaginaryPart(Mat);
366 extern PetscErrorCode  MatGetDiagonalBlock(Mat,Mat*);
367 extern PetscErrorCode  MatGetTrace(Mat,PetscScalar*);
368 extern PetscErrorCode  MatInvertBlockDiagonal(Mat,PetscScalar **);
369 
370 /* ------------------------------------------------------------*/
371 extern PetscErrorCode  MatSetValues(Mat,PetscInt,const PetscInt[],PetscInt,const PetscInt[],const PetscScalar[],InsertMode);
372 extern PetscErrorCode  MatSetValuesBlocked(Mat,PetscInt,const PetscInt[],PetscInt,const PetscInt[],const PetscScalar[],InsertMode);
373 extern PetscErrorCode  MatSetValuesRow(Mat,PetscInt,const PetscScalar[]);
374 extern PetscErrorCode  MatSetValuesRowLocal(Mat,PetscInt,const PetscScalar[]);
375 
376 /*S
377      MatStencil - Data structure (C struct) for storing information about a single row or
378         column of a matrix as index on an associated grid.
379 
380    Level: beginner
381 
382   Concepts: matrix; linear operator
383 
384 .seealso:  MatSetValuesStencil(), MatSetStencil(), MatSetValuesBlockStencil()
385 S*/
386 typedef struct {
387   PetscInt k,j,i,c;
388 } MatStencil;
389 
390 extern PetscErrorCode  MatSetValuesStencil(Mat,PetscInt,const MatStencil[],PetscInt,const MatStencil[],const PetscScalar[],InsertMode);
391 extern PetscErrorCode  MatSetValuesBlockedStencil(Mat,PetscInt,const MatStencil[],PetscInt,const MatStencil[],const PetscScalar[],InsertMode);
392 extern PetscErrorCode  MatSetStencil(Mat,PetscInt,const PetscInt[],const PetscInt[],PetscInt);
393 
394 extern PetscErrorCode  MatSetColoring(Mat,ISColoring);
395 extern PetscErrorCode  MatSetValuesAdic(Mat,void*);
396 extern PetscErrorCode  MatSetValuesAdifor(Mat,PetscInt,void*);
397 
398 /*E
399     MatAssemblyType - Indicates if the matrix is now to be used, or if you plan
400      to continue to add values to it
401 
402     Level: beginner
403 
404 .seealso: MatAssemblyBegin(), MatAssemblyEnd()
405 E*/
406 typedef enum {MAT_FLUSH_ASSEMBLY=1,MAT_FINAL_ASSEMBLY=0} MatAssemblyType;
407 extern PetscErrorCode  MatAssemblyBegin(Mat,MatAssemblyType);
408 extern PetscErrorCode  MatAssemblyEnd(Mat,MatAssemblyType);
409 extern PetscErrorCode  MatAssembled(Mat,PetscBool *);
410 
411 
412 
413 /*E
414     MatOption - Options that may be set for a matrix and its behavior or storage
415 
416     Level: beginner
417 
418    Any additions/changes here MUST also be made in include/finclude/petscmat.h
419 
420 .seealso: MatSetOption()
421 E*/
422 typedef enum {MAT_ROW_ORIENTED,MAT_NEW_NONZERO_LOCATIONS,
423               MAT_SYMMETRIC,
424               MAT_STRUCTURALLY_SYMMETRIC,
425               MAT_NEW_DIAGONALS,MAT_IGNORE_OFF_PROC_ENTRIES,
426               MAT_NEW_NONZERO_LOCATION_ERR,
427               MAT_NEW_NONZERO_ALLOCATION_ERR,MAT_USE_HASH_TABLE,
428               MAT_KEEP_NONZERO_PATTERN,MAT_IGNORE_ZERO_ENTRIES,
429               MAT_USE_INODES,
430               MAT_HERMITIAN,
431               MAT_SYMMETRY_ETERNAL,
432               MAT_CHECK_COMPRESSED_ROW,
433               MAT_IGNORE_LOWER_TRIANGULAR,MAT_ERROR_LOWER_TRIANGULAR,
434               MAT_GETROW_UPPERTRIANGULAR,MAT_UNUSED_NONZERO_LOCATION_ERR,
435               MAT_SPD,MAT_NO_OFF_PROC_ENTRIES,MAT_NO_OFF_PROC_ZERO_ROWS,
436               NUM_MAT_OPTIONS} MatOption;
437 extern const char *MatOptions[];
438 extern PetscErrorCode  MatSetOption(Mat,MatOption,PetscBool );
439 extern PetscErrorCode  MatGetType(Mat,const MatType*);
440 PetscPolymorphicFunction(MatGetType,(Mat mat),(mat,&t),const MatType,t)
441 
442 extern PetscErrorCode  MatGetValues(Mat,PetscInt,const PetscInt[],PetscInt,const PetscInt[],PetscScalar[]);
443 extern PetscErrorCode  MatGetRow(Mat,PetscInt,PetscInt *,const PetscInt *[],const PetscScalar*[]);
444 extern PetscErrorCode  MatRestoreRow(Mat,PetscInt,PetscInt *,const PetscInt *[],const PetscScalar*[]);
445 extern PetscErrorCode  MatGetRowUpperTriangular(Mat);
446 extern PetscErrorCode  MatRestoreRowUpperTriangular(Mat);
447 extern PetscErrorCode  MatGetColumn(Mat,PetscInt,PetscInt *,const PetscInt *[],const PetscScalar*[]);
448 extern PetscErrorCode  MatRestoreColumn(Mat,PetscInt,PetscInt *,const PetscInt *[],const PetscScalar*[]);
449 extern PetscErrorCode  MatGetColumnVector(Mat,Vec,PetscInt);
450 extern PetscErrorCode  MatGetArray(Mat,PetscScalar *[]);
451 PetscPolymorphicFunction(MatGetArray,(Mat mat),(mat,&a),PetscScalar*,a)
452 extern PetscErrorCode  MatRestoreArray(Mat,PetscScalar *[]);
453 extern PetscErrorCode  MatGetBlockSize(Mat,PetscInt *);
454 PetscPolymorphicFunction(MatGetBlockSize,(Mat mat),(mat,&a),PetscInt,a)
455 extern PetscErrorCode  MatSetBlockSize(Mat,PetscInt);
456 
457 
458 extern PetscErrorCode  MatMult(Mat,Vec,Vec);
459 extern PetscErrorCode  MatMultDiagonalBlock(Mat,Vec,Vec);
460 extern PetscErrorCode  MatMultAdd(Mat,Vec,Vec,Vec);
461 PetscPolymorphicSubroutine(MatMultAdd,(Mat A,Vec x,Vec y),(A,x,y,y))
462 extern PetscErrorCode  MatMultTranspose(Mat,Vec,Vec);
463 extern PetscErrorCode  MatMultHermitianTranspose(Mat,Vec,Vec);
464 extern PetscErrorCode  MatIsTranspose(Mat,Mat,PetscReal,PetscBool *);
465 PetscPolymorphicFunction(MatIsTranspose,(Mat A,Mat B,PetscReal tol),(A,B,tol,&t),PetscBool ,t)
466 PetscPolymorphicFunction(MatIsTranspose,(Mat A,Mat B),(A,B,0,&t),PetscBool ,t)
467 extern PetscErrorCode  MatIsHermitianTranspose(Mat,Mat,PetscReal,PetscBool *);
468 extern PetscErrorCode  MatMultTransposeAdd(Mat,Vec,Vec,Vec);
469 extern PetscErrorCode  MatMultHermitianTransposeAdd(Mat,Vec,Vec,Vec);
470 PetscPolymorphicSubroutine(MatMultTransposeAdd,(Mat A,Vec x,Vec y),(A,x,y,y))
471 extern PetscErrorCode  MatMultConstrained(Mat,Vec,Vec);
472 extern PetscErrorCode  MatMultTransposeConstrained(Mat,Vec,Vec);
473 extern PetscErrorCode  MatMatSolve(Mat,Mat,Mat);
474 
475 /*E
476     MatDuplicateOption - Indicates if a duplicated sparse matrix should have
477   its numerical values copied over or just its nonzero structure.
478 
479     Level: beginner
480 
481    Any additions/changes here MUST also be made in include/finclude/petscmat.h
482 
483 $   MAT_SHARE_NONZERO_PATTERN - the i and j arrays in the new matrix will be shared with the original matrix
484 $                               this also triggers the MAT_DO_NOT_COPY_VALUES option. This is used when you
485 $                               have several matrices with the same nonzero pattern.
486 
487 .seealso: MatDuplicate()
488 E*/
489 typedef enum {MAT_DO_NOT_COPY_VALUES,MAT_COPY_VALUES,MAT_SHARE_NONZERO_PATTERN} MatDuplicateOption;
490 
491 extern PetscErrorCode  MatConvert(Mat,const MatType,MatReuse,Mat*);
492 PetscPolymorphicFunction(MatConvert,(Mat A,const MatType t),(A,t,MAT_INITIAL_MATRIX,&a),Mat,a)
493 extern PetscErrorCode  MatDuplicate(Mat,MatDuplicateOption,Mat*);
494 PetscPolymorphicFunction(MatDuplicate,(Mat A,MatDuplicateOption o),(A,o,&a),Mat,a)
495 PetscPolymorphicFunction(MatDuplicate,(Mat A),(A,MAT_COPY_VALUES,&a),Mat,a)
496 
497 
498 extern PetscErrorCode  MatCopy(Mat,Mat,MatStructure);
499 extern PetscErrorCode  MatView(Mat,PetscViewer);
500 extern PetscErrorCode  MatIsSymmetric(Mat,PetscReal,PetscBool *);
501 PetscPolymorphicFunction(MatIsSymmetric,(Mat A,PetscReal tol),(A,tol,&t),PetscBool ,t)
502 PetscPolymorphicFunction(MatIsSymmetric,(Mat A),(A,0,&t),PetscBool ,t)
503 extern PetscErrorCode  MatIsStructurallySymmetric(Mat,PetscBool *);
504 PetscPolymorphicFunction(MatIsStructurallySymmetric,(Mat A),(A,&t),PetscBool ,t)
505 extern PetscErrorCode  MatIsHermitian(Mat,PetscReal,PetscBool *);
506 PetscPolymorphicFunction(MatIsHermitian,(Mat A),(A,0,&t),PetscBool ,t)
507 extern PetscErrorCode  MatIsSymmetricKnown(Mat,PetscBool *,PetscBool *);
508 extern PetscErrorCode  MatIsHermitianKnown(Mat,PetscBool *,PetscBool *);
509 extern PetscErrorCode  MatMissingDiagonal(Mat,PetscBool  *,PetscInt *);
510 extern PetscErrorCode  MatLoad(Mat, PetscViewer);
511 
512 extern PetscErrorCode  MatGetRowIJ(Mat,PetscInt,PetscBool ,PetscBool ,PetscInt*,PetscInt *[],PetscInt *[],PetscBool  *);
513 extern PetscErrorCode  MatRestoreRowIJ(Mat,PetscInt,PetscBool ,PetscBool ,PetscInt *,PetscInt *[],PetscInt *[],PetscBool  *);
514 extern PetscErrorCode  MatGetColumnIJ(Mat,PetscInt,PetscBool ,PetscBool ,PetscInt*,PetscInt *[],PetscInt *[],PetscBool  *);
515 extern PetscErrorCode  MatRestoreColumnIJ(Mat,PetscInt,PetscBool ,PetscBool ,PetscInt *,PetscInt *[],PetscInt *[],PetscBool  *);
516 
517 /*S
518      MatInfo - Context of matrix information, used with MatGetInfo()
519 
520    In Fortran this is simply a double precision array of dimension MAT_INFO_SIZE
521 
522    Level: intermediate
523 
524   Concepts: matrix^nonzero information
525 
526 .seealso:  MatGetInfo(), MatInfoType
527 S*/
528 typedef struct {
529   PetscLogDouble block_size;                         /* block size */
530   PetscLogDouble nz_allocated,nz_used,nz_unneeded;   /* number of nonzeros */
531   PetscLogDouble memory;                             /* memory allocated */
532   PetscLogDouble assemblies;                         /* number of matrix assemblies called */
533   PetscLogDouble mallocs;                            /* number of mallocs during MatSetValues() */
534   PetscLogDouble fill_ratio_given,fill_ratio_needed; /* fill ratio for LU/ILU */
535   PetscLogDouble factor_mallocs;                     /* number of mallocs during factorization */
536 } MatInfo;
537 
538 /*E
539     MatInfoType - Indicates if you want information about the local part of the matrix,
540      the entire parallel matrix or the maximum over all the local parts.
541 
542     Level: beginner
543 
544    Any additions/changes here MUST also be made in include/finclude/petscmat.h
545 
546 .seealso: MatGetInfo(), MatInfo
547 E*/
548 typedef enum {MAT_LOCAL=1,MAT_GLOBAL_MAX=2,MAT_GLOBAL_SUM=3} MatInfoType;
549 extern PetscErrorCode  MatGetInfo(Mat,MatInfoType,MatInfo*);
550 extern PetscErrorCode  MatGetDiagonal(Mat,Vec);
551 extern PetscErrorCode  MatGetRowMax(Mat,Vec,PetscInt[]);
552 extern PetscErrorCode  MatGetRowMin(Mat,Vec,PetscInt[]);
553 extern PetscErrorCode  MatGetRowMaxAbs(Mat,Vec,PetscInt[]);
554 extern PetscErrorCode  MatGetRowMinAbs(Mat,Vec,PetscInt[]);
555 extern PetscErrorCode  MatGetRowSum(Mat,Vec);
556 extern PetscErrorCode  MatTranspose(Mat,MatReuse,Mat*);
557 PetscPolymorphicFunction(MatTranspose,(Mat A),(A,MAT_INITIAL_MATRIX,&t),Mat,t)
558 extern PetscErrorCode  MatHermitianTranspose(Mat,MatReuse,Mat*);
559 extern PetscErrorCode  MatPermute(Mat,IS,IS,Mat *);
560 PetscPolymorphicFunction(MatPermute,(Mat A,IS is1,IS is2),(A,is1,is2,&t),Mat,t)
561 extern PetscErrorCode  MatDiagonalScale(Mat,Vec,Vec);
562 extern PetscErrorCode  MatDiagonalSet(Mat,Vec,InsertMode);
563 extern PetscErrorCode  MatEqual(Mat,Mat,PetscBool *);
564 PetscPolymorphicFunction(MatEqual,(Mat A,Mat B),(A,B,&t),PetscBool ,t)
565 extern PetscErrorCode  MatMultEqual(Mat,Mat,PetscInt,PetscBool *);
566 extern PetscErrorCode  MatMultAddEqual(Mat,Mat,PetscInt,PetscBool *);
567 extern PetscErrorCode  MatMultTransposeEqual(Mat,Mat,PetscInt,PetscBool *);
568 extern PetscErrorCode  MatMultTransposeAddEqual(Mat,Mat,PetscInt,PetscBool *);
569 
570 extern PetscErrorCode  MatNorm(Mat,NormType,PetscReal *);
571 PetscPolymorphicFunction(MatNorm,(Mat A,NormType t),(A,t,&n),PetscReal,n)
572 extern PetscErrorCode  MatGetColumnNorms(Mat,NormType,PetscReal *);
573 extern PetscErrorCode  MatZeroEntries(Mat);
574 extern PetscErrorCode  MatZeroRows(Mat,PetscInt,const PetscInt [],PetscScalar,Vec,Vec);
575 extern PetscErrorCode  MatZeroRowsIS(Mat,IS,PetscScalar,Vec,Vec);
576 extern PetscErrorCode  MatZeroRowsStencil(Mat,PetscInt,const MatStencil [],PetscScalar,Vec,Vec);
577 extern PetscErrorCode  MatZeroRowsColumns(Mat,PetscInt,const PetscInt [],PetscScalar,Vec,Vec);
578 extern PetscErrorCode  MatZeroRowsColumnsIS(Mat,IS,PetscScalar,Vec,Vec);
579 
580 extern PetscErrorCode  MatUseScaledForm(Mat,PetscBool );
581 extern PetscErrorCode  MatScaleSystem(Mat,Vec,Vec);
582 extern PetscErrorCode  MatUnScaleSystem(Mat,Vec,Vec);
583 
584 extern PetscErrorCode  MatGetSize(Mat,PetscInt*,PetscInt*);
585 extern PetscErrorCode  MatGetLocalSize(Mat,PetscInt*,PetscInt*);
586 extern PetscErrorCode  MatGetOwnershipRange(Mat,PetscInt*,PetscInt*);
587 extern PetscErrorCode  MatGetOwnershipRanges(Mat,const PetscInt**);
588 extern PetscErrorCode  MatGetOwnershipRangeColumn(Mat,PetscInt*,PetscInt*);
589 extern PetscErrorCode  MatGetOwnershipRangesColumn(Mat,const PetscInt**);
590 
591 extern PetscErrorCode  MatGetSubMatrices(Mat,PetscInt,const IS[],const IS[],MatReuse,Mat *[]);
592 extern PetscErrorCode  MatGetSubMatricesParallel(Mat,PetscInt,const IS[],const IS[],MatReuse,Mat *[]);
593 extern PetscErrorCode  MatDestroyMatrices(PetscInt,Mat *[]);
594 extern PetscErrorCode  MatGetSubMatrix(Mat,IS,IS,MatReuse,Mat *);
595 extern PetscErrorCode  MatGetLocalSubMatrix(Mat,IS,IS,Mat*);
596 extern PetscErrorCode  MatRestoreLocalSubMatrix(Mat,IS,IS,Mat*);
597 extern PetscErrorCode  MatGetSeqNonzeroStructure(Mat,Mat*);
598 extern PetscErrorCode  MatDestroySeqNonzeroStructure(Mat*);
599 
600 extern PetscErrorCode  MatMerge(MPI_Comm,Mat,PetscInt,MatReuse,Mat*);
601 extern PetscErrorCode  MatMerge_SeqsToMPI(MPI_Comm,Mat,PetscInt,PetscInt,MatReuse,Mat*);
602 extern PetscErrorCode  MatMerge_SeqsToMPISymbolic(MPI_Comm,Mat,PetscInt,PetscInt,Mat*);
603 extern PetscErrorCode  MatMerge_SeqsToMPINumeric(Mat,Mat);
604 extern PetscErrorCode  MatMPIAIJGetLocalMat(Mat,MatReuse,Mat*);
605 extern PetscErrorCode  MatMPIAIJGetLocalMatCondensed(Mat,MatReuse,IS*,IS*,Mat*);
606 extern PetscErrorCode  MatGetBrowsOfAcols(Mat,Mat,MatReuse,IS*,IS*,PetscInt*,Mat*);
607 extern PetscErrorCode  MatGetBrowsOfAoCols(Mat,Mat,MatReuse,PetscInt**,PetscInt**,MatScalar**,Mat*);
608 #if defined (PETSC_USE_CTABLE)
609 #include "petscctable.h"
610 extern PetscErrorCode  MatGetCommunicationStructs(Mat, Vec *, PetscTable *, VecScatter *);
611 #else
612 extern PetscErrorCode  MatGetCommunicationStructs(Mat, Vec *, PetscInt *[], VecScatter *);
613 #endif
614 extern PetscErrorCode  MatGetGhosts(Mat, PetscInt *,const PetscInt *[]);
615 
616 extern PetscErrorCode  MatIncreaseOverlap(Mat,PetscInt,IS[],PetscInt);
617 
618 extern PetscErrorCode  MatMatMult(Mat,Mat,MatReuse,PetscReal,Mat*);
619 extern PetscErrorCode  MatMatMultSymbolic(Mat,Mat,PetscReal,Mat*);
620 extern PetscErrorCode  MatMatMultNumeric(Mat,Mat,Mat);
621 
622 extern PetscErrorCode  MatPtAP(Mat,Mat,MatReuse,PetscReal,Mat*);
623 extern PetscErrorCode  MatPtAPSymbolic(Mat,Mat,PetscReal,Mat*);
624 extern PetscErrorCode  MatPtAPNumeric(Mat,Mat,Mat);
625 
626 extern PetscErrorCode  MatMatMultTranspose(Mat,Mat,MatReuse,PetscReal,Mat*);
627 extern PetscErrorCode  MatMatMultTransposeSymbolic(Mat,Mat,PetscReal,Mat*);
628 extern PetscErrorCode  MatMatMultTransposeNumeric(Mat,Mat,Mat);
629 
630 extern PetscErrorCode  MatAXPY(Mat,PetscScalar,Mat,MatStructure);
631 extern PetscErrorCode  MatAYPX(Mat,PetscScalar,Mat,MatStructure);
632 
633 extern PetscErrorCode  MatScale(Mat,PetscScalar);
634 extern PetscErrorCode  MatShift(Mat,PetscScalar);
635 
636 extern PetscErrorCode  MatSetLocalToGlobalMapping(Mat,ISLocalToGlobalMapping,ISLocalToGlobalMapping);
637 extern PetscErrorCode  MatSetLocalToGlobalMappingBlock(Mat,ISLocalToGlobalMapping,ISLocalToGlobalMapping);
638 extern PetscErrorCode  MatGetLocalToGlobalMapping(Mat,ISLocalToGlobalMapping*,ISLocalToGlobalMapping*);
639 extern PetscErrorCode  MatGetLocalToGlobalMappingBlock(Mat,ISLocalToGlobalMapping*,ISLocalToGlobalMapping*);
640 extern PetscErrorCode  MatZeroRowsLocal(Mat,PetscInt,const PetscInt [],PetscScalar,Vec,Vec);
641 extern PetscErrorCode  MatZeroRowsLocalIS(Mat,IS,PetscScalar,Vec,Vec);
642 extern PetscErrorCode  MatZeroRowsColumnsLocal(Mat,PetscInt,const PetscInt [],PetscScalar,Vec,Vec);
643 extern PetscErrorCode  MatZeroRowsColumnsLocalIS(Mat,IS,PetscScalar,Vec,Vec);
644 extern PetscErrorCode  MatSetValuesLocal(Mat,PetscInt,const PetscInt[],PetscInt,const PetscInt[],const PetscScalar[],InsertMode);
645 extern PetscErrorCode  MatSetValuesBlockedLocal(Mat,PetscInt,const PetscInt[],PetscInt,const PetscInt[],const PetscScalar[],InsertMode);
646 
647 extern PetscErrorCode  MatStashSetInitialSize(Mat,PetscInt,PetscInt);
648 extern PetscErrorCode  MatStashGetInfo(Mat,PetscInt*,PetscInt*,PetscInt*,PetscInt*);
649 
650 extern PetscErrorCode  MatInterpolate(Mat,Vec,Vec);
651 extern PetscErrorCode  MatInterpolateAdd(Mat,Vec,Vec,Vec);
652 PetscPolymorphicSubroutine(MatInterpolateAdd,(Mat A,Vec x,Vec y),(A,x,y,y))
653 extern PetscErrorCode  MatRestrict(Mat,Vec,Vec);
654 extern PetscErrorCode  MatGetVecs(Mat,Vec*,Vec*);
655 extern PetscErrorCode  MatGetRedundantMatrix(Mat,PetscInt,MPI_Comm,PetscInt,MatReuse,Mat*);
656 extern PetscErrorCode  MatGetMultiProcBlock(Mat,MPI_Comm,Mat*);
657 extern PetscErrorCode  MatFindZeroDiagonals(Mat,IS*);
658 
659 /*MC
660    MatSetValue - Set a single entry into a matrix.
661 
662    Not collective
663 
664    Input Parameters:
665 +  m - the matrix
666 .  row - the row location of the entry
667 .  col - the column location of the entry
668 .  value - the value to insert
669 -  mode - either INSERT_VALUES or ADD_VALUES
670 
671    Notes:
672    For efficiency one should use MatSetValues() and set several or many
673    values simultaneously if possible.
674 
675    Level: beginner
676 
677 .seealso: MatSetValues(), MatSetValueLocal()
678 M*/
679 PETSC_STATIC_INLINE PetscErrorCode MatSetValue(Mat v,PetscInt i,PetscInt j,PetscScalar va,InsertMode mode) {return MatSetValues(v,1,&i,1,&j,&va,mode);}
680 
681 PETSC_STATIC_INLINE PetscErrorCode MatGetValue(Mat v,PetscInt i,PetscInt j,PetscScalar *va) {return MatGetValues(v,1,&i,1,&j,va);}
682 
683 PETSC_STATIC_INLINE PetscErrorCode MatSetValueLocal(Mat v,PetscInt i,PetscInt j,PetscScalar va,InsertMode mode) {return MatSetValuesLocal(v,1,&i,1,&j,&va,mode);}
684 
685 extern PetscErrorCode MatSeqAIJSetValuesBatch(Mat, PetscInt, PetscInt, PetscInt *, PetscScalar *);
686 extern PetscErrorCode MatMPIAIJSetValuesBatch(Mat, PetscInt, PetscInt, PetscInt *, PetscScalar *);
687 
688 /*MC
689    MatPreallocateInitialize - Begins the block of code that will count the number of nonzeros per
690        row in a matrix providing the data that one can use to correctly preallocate the matrix.
691 
692    Synopsis:
693    PetscErrorCode MatPreallocateInitialize(MPI_Comm comm, PetscInt nrows, PetscInt ncols, PetscInt *dnz, PetscInt *onz)
694 
695    Collective on MPI_Comm
696 
697    Input Parameters:
698 +  comm - the communicator that will share the eventually allocated matrix
699 .  nrows - the number of LOCAL rows in the matrix
700 -  ncols - the number of LOCAL columns in the matrix
701 
702    Output Parameters:
703 +  dnz - the array that will be passed to the matrix preallocation routines
704 -  ozn - the other array passed to the matrix preallocation routines
705 
706 
707    Level: intermediate
708 
709    Notes:
710     See the <A href="../../docs/manual.pdf#nameddest=ch_performance">Hints for Performance Improvment</A> chapter in the users manual for more details.
711 
712    Do not malloc or free dnz and onz, that is handled internally by these routines
713 
714    Use MatPreallocateInitializeSymmetric() for symmetric matrices (MPISBAIJ matrices)
715 
716    This is a MACRO not a function because it has a leading { that is closed by PetscPreallocateFinalize().
717 
718   Concepts: preallocation^Matrix
719 
720 .seealso: MatPreallocateFinalize(), MatPreallocateSet(), MatPreallocateSymmetricSet(), MatPreallocateSetLocal(),
721           MatPreallocateInitializeSymmetric(), MatPreallocateSymmetricSetLocal()
722 M*/
723 #define MatPreallocateInitialize(comm,nrows,ncols,dnz,onz) 0; \
724 { \
725   PetscErrorCode _4_ierr; PetscInt __nrows = (nrows),__ctmp = (ncols),__rstart,__start,__end; \
726   _4_ierr = PetscMalloc2(__nrows,PetscInt,&dnz,__nrows,PetscInt,&onz);CHKERRQ(_4_ierr); \
727   _4_ierr = PetscMemzero(dnz,__nrows*sizeof(PetscInt));CHKERRQ(_4_ierr);\
728   _4_ierr = PetscMemzero(onz,__nrows*sizeof(PetscInt));CHKERRQ(_4_ierr);\
729   _4_ierr = MPI_Scan(&__ctmp,&__end,1,MPIU_INT,MPI_SUM,comm);CHKERRQ(_4_ierr); __start = __end - __ctmp;\
730   _4_ierr = MPI_Scan(&__nrows,&__rstart,1,MPIU_INT,MPI_SUM,comm);CHKERRQ(_4_ierr); __rstart = __rstart - __nrows;
731 
732 /*MC
733    MatPreallocateSymmetricInitialize - Begins the block of code that will count the number of nonzeros per
734        row in a matrix providing the data that one can use to correctly preallocate the matrix.
735 
736    Synopsis:
737    PetscErrorCode MatPreallocateSymmetricInitialize(MPI_Comm comm, PetscInt nrows, PetscInt ncols, PetscInt *dnz, PetscInt *onz)
738 
739    Collective on MPI_Comm
740 
741    Input Parameters:
742 +  comm - the communicator that will share the eventually allocated matrix
743 .  nrows - the number of LOCAL rows in the matrix
744 -  ncols - the number of LOCAL columns in the matrix
745 
746    Output Parameters:
747 +  dnz - the array that will be passed to the matrix preallocation routines
748 -  ozn - the other array passed to the matrix preallocation routines
749 
750 
751    Level: intermediate
752 
753    Notes:
754     See the <A href="../../docs/manual.pdf#nameddest=ch_performance">Hints for Performance Improvment</A> chapter in the users manual for more details.
755 
756    Do not malloc or free dnz and onz, that is handled internally by these routines
757 
758    This is a MACRO not a function because it has a leading { that is closed by PetscPreallocateFinalize().
759 
760   Concepts: preallocation^Matrix
761 
762 .seealso: MatPreallocateFinalize(), MatPreallocateSet(), MatPreallocateSymmetricSet(), MatPreallocateSetLocal(),
763           MatPreallocateInitialize(), MatPreallocateSymmetricSetLocal()
764 M*/
765 #define MatPreallocateSymmetricInitialize(comm,nrows,ncols,dnz,onz) 0; \
766 { \
767   PetscErrorCode _4_ierr; PetscInt __nrows = (nrows),__ctmp = (ncols),__rstart,__end; \
768   _4_ierr = PetscMalloc2(__nrows,PetscInt,&dnz,__nrows,PetscInt,&onz);CHKERRQ(_4_ierr); \
769   _4_ierr = PetscMemzero(dnz,__nrows*sizeof(PetscInt));CHKERRQ(_4_ierr);\
770   _4_ierr = PetscMemzero(onz,__nrows*sizeof(PetscInt));CHKERRQ(_4_ierr);\
771   _4_ierr = MPI_Scan(&__ctmp,&__end,1,MPIU_INT,MPI_SUM,comm);CHKERRQ(_4_ierr);\
772   _4_ierr = MPI_Scan(&__nrows,&__rstart,1,MPIU_INT,MPI_SUM,comm);CHKERRQ(_4_ierr); __rstart = __rstart - __nrows;
773 
774 /*MC
775    MatPreallocateSetLocal - Indicates the locations (rows and columns) in the matrix where nonzeros will be
776        inserted using a local number of the rows and columns
777 
778    Synopsis:
779    PetscErrorCode MatPreallocateSetLocal(ISLocalToGlobalMappping map,PetscInt nrows, PetscInt *rows,PetscInt ncols, PetscInt *cols,PetscInt *dnz, PetscInt *onz)
780 
781    Not Collective
782 
783    Input Parameters:
784 +  map - the row mapping from local numbering to global numbering
785 .  nrows - the number of rows indicated
786 .  rows - the indices of the rows
787 .  cmap - the column mapping from local to global numbering
788 .  ncols - the number of columns in the matrix
789 .  cols - the columns indicated
790 .  dnz - the array that will be passed to the matrix preallocation routines
791 -  ozn - the other array passed to the matrix preallocation routines
792 
793 
794    Level: intermediate
795 
796    Notes:
797     See the <A href="../../docs/manual.pdf#nameddest=ch_performance">Hints for Performance Improvment</A> chapter in the users manual for more details.
798 
799    Do not malloc or free dnz and onz, that is handled internally by these routines
800 
801   Concepts: preallocation^Matrix
802 
803 .seealso: MatPreallocateFinalize(), MatPreallocateSet(), MatPreallocateSymmetricSet(), MatPreallocateInitialize(),
804           MatPreallocateInitialize(), MatPreallocateSymmetricSetLocal()
805 M*/
806 #define MatPreallocateSetLocal(rmap,nrows,rows,cmap,ncols,cols,dnz,onz) 0; \
807 {\
808   PetscInt __l;\
809   _4_ierr = ISLocalToGlobalMappingApply(rmap,nrows,rows,rows);CHKERRQ(_4_ierr);\
810   _4_ierr = ISLocalToGlobalMappingApply(cmap,ncols,cols,cols);CHKERRQ(_4_ierr);\
811   for (__l=0;__l<nrows;__l++) {\
812     _4_ierr = MatPreallocateSet((rows)[__l],ncols,cols,dnz,onz);CHKERRQ(_4_ierr);\
813   }\
814 }
815 
816 /*MC
817    MatPreallocateSymmetricSetLocal - Indicates the locations (rows and columns) in the matrix where nonzeros will be
818        inserted using a local number of the rows and columns
819 
820    Synopsis:
821    PetscErrorCode MatPreallocateSymmetricSetLocal(ISLocalToGlobalMappping map,PetscInt nrows, PetscInt *rows,PetscInt ncols, PetscInt *cols,PetscInt *dnz, PetscInt *onz)
822 
823    Not Collective
824 
825    Input Parameters:
826 +  map - the mapping between local numbering and global numbering
827 .  nrows - the number of rows indicated
828 .  rows - the indices of the rows
829 .  ncols - the number of columns in the matrix
830 .  cols - the columns indicated
831 .  dnz - the array that will be passed to the matrix preallocation routines
832 -  ozn - the other array passed to the matrix preallocation routines
833 
834 
835    Level: intermediate
836 
837    Notes:
838     See the <A href="../../docs/manual.pdf#nameddest=ch_performance">Hints for Performance Improvment</A> chapter in the users manual for more details.
839 
840    Do not malloc or free dnz and onz that is handled internally by these routines
841 
842   Concepts: preallocation^Matrix
843 
844 .seealso: MatPreallocateFinalize(), MatPreallocateSet(), MatPreallocateSymmetricSet(), MatPreallocateInitialize(),
845           MatPreallocateInitialize(), MatPreallocateSymmetricSetLocal(), MatPreallocateSetLocal()
846 M*/
847 #define MatPreallocateSymmetricSetLocal(map,nrows,rows,ncols,cols,dnz,onz) 0;\
848 {\
849   PetscInt __l;\
850   _4_ierr = ISLocalToGlobalMappingApply(map,nrows,rows,rows);CHKERRQ(_4_ierr);\
851   _4_ierr = ISLocalToGlobalMappingApply(map,ncols,cols,cols);CHKERRQ(_4_ierr);\
852   for (__l=0;__l<nrows;__l++) {\
853     _4_ierr = MatPreallocateSymmetricSet((rows)[__l],ncols,cols,dnz,onz);CHKERRQ(_4_ierr);\
854   }\
855 }
856 
857 /*MC
858    MatPreallocateSet - Indicates the locations (rows and columns) in the matrix where nonzeros will be
859        inserted using a local number of the rows and columns
860 
861    Synopsis:
862    PetscErrorCode MatPreallocateSet(PetscInt nrows, PetscInt *rows,PetscInt ncols, PetscInt *cols,PetscInt *dnz, PetscInt *onz)
863 
864    Not Collective
865 
866    Input Parameters:
867 +  row - the row
868 .  ncols - the number of columns in the matrix
869 -  cols - the columns indicated
870 
871    Output Parameters:
872 +  dnz - the array that will be passed to the matrix preallocation routines
873 -  ozn - the other array passed to the matrix preallocation routines
874 
875 
876    Level: intermediate
877 
878    Notes:
879     See the <A href="../../docs/manual.pdf#nameddest=ch_performance">Hints for Performance Improvment</A> chapter in the users manual for more details.
880 
881    Do not malloc or free dnz and onz that is handled internally by these routines
882 
883    This is a MACRO not a function because it uses variables declared in MatPreallocateInitialize().
884 
885   Concepts: preallocation^Matrix
886 
887 .seealso: MatPreallocateFinalize(), MatPreallocateSet(), MatPreallocateSymmetricSet(), MatPreallocateInitialize(),
888           MatPreallocateInitialize(), MatPreallocateSymmetricSetLocal(), MatPreallocateSetLocal()
889 M*/
890 #define MatPreallocateSet(row,nc,cols,dnz,onz) 0;\
891 { PetscInt __i; \
892   if (row < __rstart) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Trying to set preallocation for row %D less than first local row %D",row,__rstart);\
893   if (row >= __rstart+__nrows) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Trying to set preallocation for row %D greater than last local row %D",row,__rstart+__nrows-1);\
894   for (__i=0; __i<nc; __i++) {\
895     if ((cols)[__i] < __start || (cols)[__i] >= __end) onz[row - __rstart]++; \
896     else dnz[row - __rstart]++;\
897   }\
898 }
899 
900 /*MC
901    MatPreallocateSymmetricSet - Indicates the locations (rows and columns) in the matrix where nonzeros will be
902        inserted using a local number of the rows and columns
903 
904    Synopsis:
905    PetscErrorCode MatPreallocateSymmetricSet(PetscInt nrows, PetscInt *rows,PetscInt ncols, PetscInt *cols,PetscInt *dnz, PetscInt *onz)
906 
907    Not Collective
908 
909    Input Parameters:
910 +  nrows - the number of rows indicated
911 .  rows - the indices of the rows
912 .  ncols - the number of columns in the matrix
913 .  cols - the columns indicated
914 .  dnz - the array that will be passed to the matrix preallocation routines
915 -  ozn - the other array passed to the matrix preallocation routines
916 
917 
918    Level: intermediate
919 
920    Notes:
921     See the <A href="../../docs/manual.pdf#nameddest=ch_performance">Hints for Performance Improvment</A> chapter in the users manual for more details.
922 
923    Do not malloc or free dnz and onz that is handled internally by these routines
924 
925    This is a MACRO not a function because it uses variables declared in MatPreallocateInitialize().
926 
927   Concepts: preallocation^Matrix
928 
929 .seealso: MatPreallocateFinalize(), MatPreallocateSet(), MatPreallocateSymmetricSet(), MatPreallocateInitialize(),
930           MatPreallocateInitialize(), MatPreallocateSymmetricSetLocal(), MatPreallocateSetLocal()
931 M*/
932 #define MatPreallocateSymmetricSet(row,nc,cols,dnz,onz) 0;\
933 { PetscInt __i; \
934   for (__i=0; __i<nc; __i++) {\
935     if (cols[__i] >= __end) onz[row - __rstart]++; \
936     else if (cols[__i] >= row) dnz[row - __rstart]++;\
937   }\
938 }
939 
940 /*MC
941    MatPreallocateLocation -  An alternative to MatPreallocationSet() that puts the nonzero locations into the matrix if it exists
942 
943    Synopsis:
944    PetscErrorCode MatPreallocateLocations(Mat A,PetscInt row,PetscInt ncols,PetscInt *cols,PetscInt *dnz,PetscInt *onz)
945 
946    Not Collective
947 
948    Input Parameters:
949 .  A - matrix
950 .  row - row where values exist (must be local to this process)
951 .  ncols - number of columns
952 .  cols - columns with nonzeros
953 .  dnz - the array that will be passed to the matrix preallocation routines
954 -  ozn - the other array passed to the matrix preallocation routines
955 
956 
957    Level: intermediate
958 
959    Notes:
960     See the <A href="../../docs/manual.pdf#nameddest=ch_performance">Hints for Performance Improvment</A> chapter in the users manual for more details.
961 
962    Do not malloc or free dnz and onz that is handled internally by these routines
963 
964    This is a MACRO not a function because it uses a bunch of variables private to the MatPreallocation.... routines.
965 
966   Concepts: preallocation^Matrix
967 
968 .seealso: MatPreallocateInitialize(), MatPreallocateSet(), MatPreallocateSymmetricSet(), MatPreallocateSetLocal(),
969           MatPreallocateSymmetricInitialize(), MatPreallocateSymmetricSetLocal()
970 M*/
971 #define MatPreallocateLocation(A,row,ncols,cols,dnz,onz) 0;if (A) {ierr = MatSetValues(A,1,&row,ncols,cols,PETSC_NULL,INSERT_VALUES);CHKERRQ(ierr);} else {ierr =  MatPreallocateSet(row,ncols,cols,dnz,onz);CHKERRQ(ierr);}
972 
973 
974 /*MC
975    MatPreallocateFinalize - Ends the block of code that will count the number of nonzeros per
976        row in a matrix providing the data that one can use to correctly preallocate the matrix.
977 
978    Synopsis:
979    PetscErrorCode MatPreallocateFinalize(PetscInt *dnz, PetscInt *onz)
980 
981    Collective on MPI_Comm
982 
983    Input Parameters:
984 +  dnz - the array that was be passed to the matrix preallocation routines
985 -  ozn - the other array passed to the matrix preallocation routines
986 
987 
988    Level: intermediate
989 
990    Notes:
991     See the <A href="../../docs/manual.pdf#nameddest=ch_performance">Hints for Performance Improvment</A> chapter in the users manual for more details.
992 
993    Do not malloc or free dnz and onz that is handled internally by these routines
994 
995    This is a MACRO not a function because it closes the { started in MatPreallocateInitialize().
996 
997   Concepts: preallocation^Matrix
998 
999 .seealso: MatPreallocateInitialize(), MatPreallocateSet(), MatPreallocateSymmetricSet(), MatPreallocateSetLocal(),
1000           MatPreallocateSymmetricInitialize(), MatPreallocateSymmetricSetLocal()
1001 M*/
1002 #define MatPreallocateFinalize(dnz,onz) 0;_4_ierr = PetscFree2(dnz,onz);CHKERRQ(_4_ierr);}
1003 
1004 
1005 
1006 /* Routines unique to particular data structures */
1007 extern PetscErrorCode  MatShellGetContext(Mat,void *);
1008 PetscPolymorphicFunction(MatShellGetContext,(Mat A),(A,&t),void*,t)
1009 
1010 extern PetscErrorCode  MatInodeAdjustForInodes(Mat,IS*,IS*);
1011 extern PetscErrorCode  MatInodeGetInodeSizes(Mat,PetscInt *,PetscInt *[],PetscInt *);
1012 
1013 extern PetscErrorCode  MatSeqAIJSetColumnIndices(Mat,PetscInt[]);
1014 extern PetscErrorCode  MatSeqBAIJSetColumnIndices(Mat,PetscInt[]);
1015 extern PetscErrorCode  MatCreateSeqAIJWithArrays(MPI_Comm,PetscInt,PetscInt,PetscInt[],PetscInt[],PetscScalar[],Mat*);
1016 extern PetscErrorCode  MatCreateSeqBAIJWithArrays(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt[],PetscInt[],PetscScalar[],Mat*);
1017 extern PetscErrorCode  MatCreateSeqSBAIJWithArrays(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt[],PetscInt[],PetscScalar[],Mat*);
1018 
1019 #define MAT_SKIP_ALLOCATION -4
1020 
1021 extern PetscErrorCode  MatSeqBAIJSetPreallocation(Mat,PetscInt,PetscInt,const PetscInt[]);
1022 PetscPolymorphicSubroutine(MatSeqBAIJSetPreallocation,(Mat A,PetscInt bs,const PetscInt nnz[]),(A,bs,0,nnz))
1023 extern PetscErrorCode  MatSeqSBAIJSetPreallocation(Mat,PetscInt,PetscInt,const PetscInt[]);
1024 PetscPolymorphicSubroutine(MatSeqSBAIJSetPreallocation,(Mat A,PetscInt bs,const PetscInt nnz[]),(A,bs,0,nnz))
1025 extern PetscErrorCode  MatSeqAIJSetPreallocation(Mat,PetscInt,const PetscInt[]);
1026 PetscPolymorphicSubroutine(MatSeqAIJSetPreallocation,(Mat A,const PetscInt nnz[]),(A,0,nnz))
1027 
1028 extern PetscErrorCode  MatMPIBAIJSetPreallocation(Mat,PetscInt,PetscInt,const PetscInt[],PetscInt,const PetscInt[]);
1029 PetscPolymorphicSubroutine(MatMPIBAIJSetPreallocation,(Mat A,PetscInt bs,const PetscInt nnz[],const PetscInt onz[]),(A,bs,0,nnz,0,onz))
1030 extern PetscErrorCode  MatMPISBAIJSetPreallocation(Mat,PetscInt,PetscInt,const PetscInt[],PetscInt,const PetscInt[]);
1031 extern PetscErrorCode  MatMPIAIJSetPreallocation(Mat,PetscInt,const PetscInt[],PetscInt,const PetscInt[]);
1032 extern PetscErrorCode  MatSeqAIJSetPreallocationCSR(Mat,const PetscInt [],const PetscInt [],const PetscScalar []);
1033 extern PetscErrorCode  MatSeqBAIJSetPreallocationCSR(Mat,PetscInt,const PetscInt[],const PetscInt[],const PetscScalar[]);
1034 extern PetscErrorCode  MatMPIAIJSetPreallocationCSR(Mat,const PetscInt[],const PetscInt[],const PetscScalar[]);
1035 extern PetscErrorCode  MatMPIBAIJSetPreallocationCSR(Mat,PetscInt,const PetscInt[],const PetscInt[],const PetscScalar[]);
1036 extern PetscErrorCode  MatMPIAdjSetPreallocation(Mat,PetscInt[],PetscInt[],PetscInt[]);
1037 extern PetscErrorCode  MatMPIDenseSetPreallocation(Mat,PetscScalar[]);
1038 extern PetscErrorCode  MatSeqDenseSetPreallocation(Mat,PetscScalar[]);
1039 extern PetscErrorCode  MatMPIAIJGetSeqAIJ(Mat,Mat*,Mat*,PetscInt*[]);
1040 extern PetscErrorCode  MatMPIBAIJGetSeqBAIJ(Mat,Mat*,Mat*,PetscInt*[]);
1041 extern PetscErrorCode  MatAdicSetLocalFunction(Mat,void (*)(void));
1042 
1043 extern PetscErrorCode  MatSeqDenseSetLDA(Mat,PetscInt);
1044 extern PetscErrorCode  MatDenseGetLocalMatrix(Mat,Mat*);
1045 
1046 extern PetscErrorCode  MatStoreValues(Mat);
1047 extern PetscErrorCode  MatRetrieveValues(Mat);
1048 
1049 extern PetscErrorCode  MatDAADSetCtx(Mat,void*);
1050 
1051 extern PetscErrorCode  MatFindNonzeroRows(Mat,IS*);
1052 /*
1053   These routines are not usually accessed directly, rather solving is
1054   done through the KSP and PC interfaces.
1055 */
1056 
1057 /*E
1058     MatOrderingType - String with the name of a PETSc matrix ordering or the creation function
1059        with an optional dynamic library name, for example
1060        http://www.mcs.anl.gov/petsc/lib.a:orderingcreate()
1061 
1062    Level: beginner
1063 
1064    Cannot use const because the PC objects manipulate the string
1065 
1066 .seealso: MatGetOrdering()
1067 E*/
1068 #define MatOrderingType char*
1069 #define MATORDERINGNATURAL     "natural"
1070 #define MATORDERINGND          "nd"
1071 #define MATORDERING1WD         "1wd"
1072 #define MATORDERINGRCM         "rcm"
1073 #define MATORDERINGQMD         "qmd"
1074 #define MATORDERINGROWLENGTH   "rowlength"
1075 #define MATORDERINGDSC_ND      "dsc_nd"         /* these three are only for DSCPACK, see its documentation for details */
1076 #define MATORDERINGDSC_MMD     "dsc_mmd"
1077 #define MATORDERINGDSC_MDF     "dsc_mdf"
1078 #define MATORDERINGAMD         "amd"            /* only works if UMFPACK is installed with PETSc */
1079 
1080 extern PetscErrorCode  MatGetOrdering(Mat,const MatOrderingType,IS*,IS*);
1081 extern PetscErrorCode  MatGetOrderingList(PetscFList *list);
1082 extern PetscErrorCode  MatOrderingRegister(const char[],const char[],const char[],PetscErrorCode(*)(Mat,const MatOrderingType,IS*,IS*));
1083 
1084 /*MC
1085    MatOrderingRegisterDynamic - Adds a new sparse matrix ordering to the matrix package.
1086 
1087    Synopsis:
1088    PetscErrorCode MatOrderingRegisterDynamic(const char *name_ordering,const char *path,const char *name_create,PetscErrorCode (*routine_create)(MatOrdering))
1089 
1090    Not Collective
1091 
1092    Input Parameters:
1093 +  sname - name of ordering (for example MATORDERINGND)
1094 .  path - location of library where creation routine is
1095 .  name - name of function that creates the ordering type,a string
1096 -  function - function pointer that creates the ordering
1097 
1098    Level: developer
1099 
1100    If dynamic libraries are used, then the fourth input argument (function)
1101    is ignored.
1102 
1103    Sample usage:
1104 .vb
1105    MatOrderingRegisterDynamic("my_order",/home/username/my_lib/lib/libO/solaris/mylib.a,
1106                "MyOrder",MyOrder);
1107 .ve
1108 
1109    Then, your partitioner can be chosen with the procedural interface via
1110 $     MatOrderingSetType(part,"my_order)
1111    or at runtime via the option
1112 $     -pc_factor_mat_ordering_type my_order
1113 
1114    ${PETSC_ARCH} occuring in pathname will be replaced with appropriate values.
1115 
1116 .keywords: matrix, ordering, register
1117 
1118 .seealso: MatOrderingRegisterDestroy(), MatOrderingRegisterAll()
1119 M*/
1120 #if defined(PETSC_USE_DYNAMIC_LIBRARIES)
1121 #define MatOrderingRegisterDynamic(a,b,c,d) MatOrderingRegister(a,b,c,0)
1122 #else
1123 #define MatOrderingRegisterDynamic(a,b,c,d) MatOrderingRegister(a,b,c,d)
1124 #endif
1125 
1126 extern PetscErrorCode  MatOrderingRegisterDestroy(void);
1127 extern PetscErrorCode  MatOrderingRegisterAll(const char[]);
1128 extern PetscBool  MatOrderingRegisterAllCalled;
1129 extern PetscFList MatOrderingList;
1130 
1131 extern PetscErrorCode  MatReorderForNonzeroDiagonal(Mat,PetscReal,IS,IS);
1132 
1133 /*S
1134     MatFactorShiftType - Numeric Shift.
1135 
1136    Level: beginner
1137 
1138 S*/
1139 typedef enum {MAT_SHIFT_NONE,MAT_SHIFT_NONZERO,MAT_SHIFT_POSITIVE_DEFINITE,MAT_SHIFT_INBLOCKS} MatFactorShiftType;
1140 extern const char *MatFactorShiftTypes[];
1141 
1142 /*S
1143    MatFactorInfo - Data passed into the matrix factorization routines
1144 
1145    In Fortran these are simply double precision arrays of size MAT_FACTORINFO_SIZE, that is use
1146 $     MatFactorInfo  info(MAT_FACTORINFO_SIZE)
1147 
1148    Notes: These are not usually directly used by users, instead use PC type of LU, ILU, CHOLESKY or ICC.
1149 
1150       You can use MatFactorInfoInitialize() to set default values.
1151 
1152    Level: developer
1153 
1154 .seealso: MatLUFactorSymbolic(), MatILUFactorSymbolic(), MatCholeskyFactorSymbolic(), MatICCFactorSymbolic(), MatICCFactor(),
1155           MatFactorInfoInitialize()
1156 
1157 S*/
1158 typedef struct {
1159   PetscReal     diagonal_fill;  /* force diagonal to fill in if initially not filled */
1160   PetscReal     usedt;
1161   PetscReal     dt;             /* drop tolerance */
1162   PetscReal     dtcol;          /* tolerance for pivoting */
1163   PetscReal     dtcount;        /* maximum nonzeros to be allowed per row */
1164   PetscReal     fill;           /* expected fill, nonzeros in factored matrix/nonzeros in original matrix */
1165   PetscReal     levels;         /* ICC/ILU(levels) */
1166   PetscReal     pivotinblocks;  /* for BAIJ and SBAIJ matrices pivot in factorization on blocks, default 1.0
1167                                    factorization may be faster if do not pivot */
1168   PetscReal     zeropivot;      /* pivot is called zero if less than this */
1169   PetscReal     shifttype;      /* type of shift added to matrix factor to prevent zero pivots */
1170   PetscReal     shiftamount;     /* how large the shift is */
1171 } MatFactorInfo;
1172 
1173 extern PetscErrorCode  MatFactorInfoInitialize(MatFactorInfo*);
1174 extern PetscErrorCode  MatCholeskyFactor(Mat,IS,const MatFactorInfo*);
1175 extern PetscErrorCode  MatCholeskyFactorSymbolic(Mat,Mat,IS,const MatFactorInfo*);
1176 extern PetscErrorCode  MatCholeskyFactorNumeric(Mat,Mat,const MatFactorInfo*);
1177 extern PetscErrorCode  MatLUFactor(Mat,IS,IS,const MatFactorInfo*);
1178 extern PetscErrorCode  MatILUFactor(Mat,IS,IS,const MatFactorInfo*);
1179 extern PetscErrorCode  MatLUFactorSymbolic(Mat,Mat,IS,IS,const MatFactorInfo*);
1180 extern PetscErrorCode  MatILUFactorSymbolic(Mat,Mat,IS,IS,const MatFactorInfo*);
1181 extern PetscErrorCode  MatICCFactorSymbolic(Mat,Mat,IS,const MatFactorInfo*);
1182 extern PetscErrorCode  MatICCFactor(Mat,IS,const MatFactorInfo*);
1183 extern PetscErrorCode  MatLUFactorNumeric(Mat,Mat,const MatFactorInfo*);
1184 extern PetscErrorCode  MatGetInertia(Mat,PetscInt*,PetscInt*,PetscInt*);
1185 extern PetscErrorCode  MatSolve(Mat,Vec,Vec);
1186 extern PetscErrorCode  MatForwardSolve(Mat,Vec,Vec);
1187 extern PetscErrorCode  MatBackwardSolve(Mat,Vec,Vec);
1188 extern PetscErrorCode  MatSolveAdd(Mat,Vec,Vec,Vec);
1189 extern PetscErrorCode  MatSolveTranspose(Mat,Vec,Vec);
1190 extern PetscErrorCode  MatSolveTransposeAdd(Mat,Vec,Vec,Vec);
1191 extern PetscErrorCode  MatSolves(Mat,Vecs,Vecs);
1192 
1193 extern PetscErrorCode  MatSetUnfactored(Mat);
1194 
1195 /*E
1196     MatSORType - What type of (S)SOR to perform
1197 
1198     Level: beginner
1199 
1200    May be bitwise ORd together
1201 
1202    Any additions/changes here MUST also be made in include/finclude/petscmat.h
1203 
1204    MatSORType may be bitwise ORd together, so do not change the numbers
1205 
1206 .seealso: MatSOR()
1207 E*/
1208 typedef enum {SOR_FORWARD_SWEEP=1,SOR_BACKWARD_SWEEP=2,SOR_SYMMETRIC_SWEEP=3,
1209               SOR_LOCAL_FORWARD_SWEEP=4,SOR_LOCAL_BACKWARD_SWEEP=8,
1210               SOR_LOCAL_SYMMETRIC_SWEEP=12,SOR_ZERO_INITIAL_GUESS=16,
1211               SOR_EISENSTAT=32,SOR_APPLY_UPPER=64,SOR_APPLY_LOWER=128} MatSORType;
1212 extern PetscErrorCode  MatSOR(Mat,Vec,PetscReal,MatSORType,PetscReal,PetscInt,PetscInt,Vec);
1213 
1214 /*
1215     These routines are for efficiently computing Jacobians via finite differences.
1216 */
1217 
1218 /*E
1219     MatColoringType - String with the name of a PETSc matrix coloring or the creation function
1220        with an optional dynamic library name, for example
1221        http://www.mcs.anl.gov/petsc/lib.a:coloringcreate()
1222 
1223    Level: beginner
1224 
1225 .seealso: MatGetColoring()
1226 E*/
1227 #define MatColoringType char*
1228 #define MATCOLORINGNATURAL "natural"
1229 #define MATCOLORINGSL      "sl"
1230 #define MATCOLORINGLF      "lf"
1231 #define MATCOLORINGID      "id"
1232 
1233 extern PetscErrorCode  MatGetColoring(Mat,const MatColoringType,ISColoring*);
1234 extern PetscErrorCode  MatColoringRegister(const char[],const char[],const char[],PetscErrorCode(*)(Mat,MatColoringType,ISColoring *));
1235 
1236 /*MC
1237    MatColoringRegisterDynamic - Adds a new sparse matrix coloring to the
1238                                matrix package.
1239 
1240    Synopsis:
1241    PetscErrorCode MatColoringRegisterDynamic(const char *name_coloring,const char *path,const char *name_create,PetscErrorCode (*routine_create)(MatColoring))
1242 
1243    Not Collective
1244 
1245    Input Parameters:
1246 +  sname - name of Coloring (for example MATCOLORINGSL)
1247 .  path - location of library where creation routine is
1248 .  name - name of function that creates the Coloring type, a string
1249 -  function - function pointer that creates the coloring
1250 
1251    Level: developer
1252 
1253    If dynamic libraries are used, then the fourth input argument (function)
1254    is ignored.
1255 
1256    Sample usage:
1257 .vb
1258    MatColoringRegisterDynamic("my_color",/home/username/my_lib/lib/libO/solaris/mylib.a,
1259                "MyColor",MyColor);
1260 .ve
1261 
1262    Then, your partitioner can be chosen with the procedural interface via
1263 $     MatColoringSetType(part,"my_color")
1264    or at runtime via the option
1265 $     -mat_coloring_type my_color
1266 
1267    $PETSC_ARCH occuring in pathname will be replaced with appropriate values.
1268 
1269 .keywords: matrix, Coloring, register
1270 
1271 .seealso: MatColoringRegisterDestroy(), MatColoringRegisterAll()
1272 M*/
1273 #if defined(PETSC_USE_DYNAMIC_LIBRARIES)
1274 #define MatColoringRegisterDynamic(a,b,c,d) MatColoringRegister(a,b,c,0)
1275 #else
1276 #define MatColoringRegisterDynamic(a,b,c,d) MatColoringRegister(a,b,c,d)
1277 #endif
1278 
1279 extern PetscBool  MatColoringRegisterAllCalled;
1280 
1281 extern PetscErrorCode  MatColoringRegisterAll(const char[]);
1282 extern PetscErrorCode  MatColoringRegisterDestroy(void);
1283 extern PetscErrorCode  MatColoringPatch(Mat,PetscInt,PetscInt,ISColoringValue[],ISColoring*);
1284 
1285 /*S
1286      MatFDColoring - Object for computing a sparse Jacobian via finite differences
1287         and coloring
1288 
1289    Level: beginner
1290 
1291   Concepts: coloring, sparse Jacobian, finite differences
1292 
1293 .seealso:  MatFDColoringCreate()
1294 S*/
1295 typedef struct _p_MatFDColoring* MatFDColoring;
1296 
1297 extern PetscErrorCode  MatFDColoringCreate(Mat,ISColoring,MatFDColoring *);
1298 extern PetscErrorCode  MatFDColoringDestroy(MatFDColoring*);
1299 extern PetscErrorCode  MatFDColoringView(MatFDColoring,PetscViewer);
1300 extern PetscErrorCode  MatFDColoringSetFunction(MatFDColoring,PetscErrorCode (*)(void),void*);
1301 extern PetscErrorCode  MatFDColoringGetFunction(MatFDColoring,PetscErrorCode (**)(void),void**);
1302 extern PetscErrorCode  MatFDColoringSetParameters(MatFDColoring,PetscReal,PetscReal);
1303 extern PetscErrorCode  MatFDColoringSetFromOptions(MatFDColoring);
1304 extern PetscErrorCode  MatFDColoringApply(Mat,MatFDColoring,Vec,MatStructure*,void *);
1305 extern PetscErrorCode  MatFDColoringApplyTS(Mat,MatFDColoring,PetscReal,Vec,MatStructure*,void *);
1306 extern PetscErrorCode  MatFDColoringSetF(MatFDColoring,Vec);
1307 extern PetscErrorCode  MatFDColoringGetPerturbedColumns(MatFDColoring,PetscInt*,PetscInt*[]);
1308 /*
1309     These routines are for partitioning matrices: currently used only
1310   for adjacency matrix, MatCreateMPIAdj().
1311 */
1312 
1313 /*S
1314      MatPartitioning - Object for managing the partitioning of a matrix or graph
1315 
1316    Level: beginner
1317 
1318   Concepts: partitioning
1319 
1320 .seealso:  MatPartitioningCreate(), MatPartitioningType
1321 S*/
1322 typedef struct _p_MatPartitioning* MatPartitioning;
1323 
1324 /*E
1325     MatPartitioningType - String with the name of a PETSc matrix partitioning or the creation function
1326        with an optional dynamic library name, for example
1327        http://www.mcs.anl.gov/petsc/lib.a:partitioningcreate()
1328 
1329    Level: beginner
1330 
1331 .seealso: MatPartitioningCreate(), MatPartitioning
1332 E*/
1333 #define MatPartitioningType char*
1334 #define MATPARTITIONINGCURRENT  "current"
1335 #define MATPARTITIONINGSQUARE   "square"
1336 #define MATPARTITIONINGPARMETIS "parmetis"
1337 #define MATPARTITIONINGCHACO    "chaco"
1338 #define MATPARTITIONINGJOSTLE   "jostle"
1339 #define MATPARTITIONINGPARTY    "party"
1340 #define MATPARTITIONINGSCOTCH   "scotch"
1341 
1342 
1343 extern PetscErrorCode  MatPartitioningCreate(MPI_Comm,MatPartitioning*);
1344 extern PetscErrorCode  MatPartitioningSetType(MatPartitioning,const MatPartitioningType);
1345 extern PetscErrorCode  MatPartitioningSetNParts(MatPartitioning,PetscInt);
1346 extern PetscErrorCode  MatPartitioningSetAdjacency(MatPartitioning,Mat);
1347 extern PetscErrorCode  MatPartitioningSetVertexWeights(MatPartitioning,const PetscInt[]);
1348 extern PetscErrorCode  MatPartitioningSetPartitionWeights(MatPartitioning,const PetscReal []);
1349 extern PetscErrorCode  MatPartitioningApply(MatPartitioning,IS*);
1350 extern PetscErrorCode  MatPartitioningDestroy(MatPartitioning*);
1351 
1352 extern PetscErrorCode  MatPartitioningRegister(const char[],const char[],const char[],PetscErrorCode (*)(MatPartitioning));
1353 
1354 /*MC
1355    MatPartitioningRegisterDynamic - Adds a new sparse matrix partitioning to the
1356    matrix package.
1357 
1358    Synopsis:
1359    PetscErrorCode MatPartitioningRegisterDynamic(const char *name_partitioning,const char *path,const char *name_create,PetscErrorCode (*routine_create)(MatPartitioning))
1360 
1361    Not Collective
1362 
1363    Input Parameters:
1364 +  sname - name of partitioning (for example MATPARTITIONINGCURRENT) or parmetis
1365 .  path - location of library where creation routine is
1366 .  name - name of function that creates the partitioning type, a string
1367 -  function - function pointer that creates the partitioning type
1368 
1369    Level: developer
1370 
1371    If dynamic libraries are used, then the fourth input argument (function)
1372    is ignored.
1373 
1374    Sample usage:
1375 .vb
1376    MatPartitioningRegisterDynamic("my_part",/home/username/my_lib/lib/libO/solaris/mylib.a,
1377                "MyPartCreate",MyPartCreate);
1378 .ve
1379 
1380    Then, your partitioner can be chosen with the procedural interface via
1381 $     MatPartitioningSetType(part,"my_part")
1382    or at runtime via the option
1383 $     -mat_partitioning_type my_part
1384 
1385    $PETSC_ARCH occuring in pathname will be replaced with appropriate values.
1386 
1387 .keywords: matrix, partitioning, register
1388 
1389 .seealso: MatPartitioningRegisterDestroy(), MatPartitioningRegisterAll()
1390 M*/
1391 #if defined(PETSC_USE_DYNAMIC_LIBRARIES)
1392 #define MatPartitioningRegisterDynamic(a,b,c,d) MatPartitioningRegister(a,b,c,0)
1393 #else
1394 #define MatPartitioningRegisterDynamic(a,b,c,d) MatPartitioningRegister(a,b,c,d)
1395 #endif
1396 
1397 extern PetscBool  MatPartitioningRegisterAllCalled;
1398 
1399 extern PetscErrorCode  MatPartitioningRegisterAll(const char[]);
1400 extern PetscErrorCode  MatPartitioningRegisterDestroy(void);
1401 
1402 extern PetscErrorCode  MatPartitioningView(MatPartitioning,PetscViewer);
1403 extern PetscErrorCode  MatPartitioningSetFromOptions(MatPartitioning);
1404 extern PetscErrorCode  MatPartitioningGetType(MatPartitioning,const MatPartitioningType*);
1405 
1406 extern PetscErrorCode  MatPartitioningParmetisSetCoarseSequential(MatPartitioning);
1407 extern PetscErrorCode  MatPartitioningParmetisGetEdgeCut(MatPartitioning, PetscInt *);
1408 
1409 extern PetscErrorCode  MatPartitioningJostleSetCoarseLevel(MatPartitioning,PetscReal);
1410 extern PetscErrorCode  MatPartitioningJostleSetCoarseSequential(MatPartitioning);
1411 
1412 typedef enum {MP_CHACO_MULTILEVEL_KL,MP_CHACO_SPECTRAL,MP_CHACO_LINEAR,MP_CHACO_RANDOM, MP_CHACO_SCATTERED} MPChacoGlobalType;
1413 extern PetscErrorCode  MatPartitioningChacoSetGlobal(MatPartitioning, MPChacoGlobalType);
1414 typedef enum { MP_CHACO_KERNIGHAN_LIN, MP_CHACO_NONE } MPChacoLocalType;
1415 extern PetscErrorCode  MatPartitioningChacoSetLocal(MatPartitioning, MPChacoLocalType);
1416 extern PetscErrorCode  MatPartitioningChacoSetCoarseLevel(MatPartitioning,PetscReal);
1417 typedef enum { MP_CHACO_LANCZOS, MP_CHACO_RQI_SYMMLQ } MPChacoEigenType;
1418 extern PetscErrorCode  MatPartitioningChacoSetEigenSolver(MatPartitioning,MPChacoEigenType);
1419 extern PetscErrorCode  MatPartitioningChacoSetEigenTol(MatPartitioning, PetscReal);
1420 extern PetscErrorCode  MatPartitioningChacoSetEigenNumber(MatPartitioning, PetscInt);
1421 
1422 #define MP_PARTY_OPT "opt"
1423 #define MP_PARTY_LIN "lin"
1424 #define MP_PARTY_SCA "sca"
1425 #define MP_PARTY_RAN "ran"
1426 #define MP_PARTY_GBF "gbf"
1427 #define MP_PARTY_GCF "gcf"
1428 #define MP_PARTY_BUB "bub"
1429 #define MP_PARTY_DEF "def"
1430 extern PetscErrorCode  MatPartitioningPartySetGlobal(MatPartitioning, const char*);
1431 #define MP_PARTY_HELPFUL_SETS "hs"
1432 #define MP_PARTY_KERNIGHAN_LIN "kl"
1433 #define MP_PARTY_NONE "no"
1434 extern PetscErrorCode  MatPartitioningPartySetLocal(MatPartitioning, const char*);
1435 extern PetscErrorCode  MatPartitioningPartySetCoarseLevel(MatPartitioning,PetscReal);
1436 extern PetscErrorCode  MatPartitioningPartySetBipart(MatPartitioning,PetscBool );
1437 extern PetscErrorCode  MatPartitioningPartySetMatchOptimization(MatPartitioning,PetscBool );
1438 
1439 typedef enum { MP_SCOTCH_GREEDY, MP_SCOTCH_GPS, MP_SCOTCH_GR_GPS } MPScotchGlobalType;
1440 extern PetscErrorCode  MatPartitioningScotchSetArch(MatPartitioning,const char*);
1441 extern PetscErrorCode  MatPartitioningScotchSetMultilevel(MatPartitioning);
1442 extern PetscErrorCode  MatPartitioningScotchSetGlobal(MatPartitioning,MPScotchGlobalType);
1443 extern PetscErrorCode  MatPartitioningScotchSetCoarseLevel(MatPartitioning,PetscReal);
1444 extern PetscErrorCode  MatPartitioningScotchSetHostList(MatPartitioning,const char*);
1445 typedef enum { MP_SCOTCH_KERNIGHAN_LIN, MP_SCOTCH_NONE } MPScotchLocalType;
1446 extern PetscErrorCode  MatPartitioningScotchSetLocal(MatPartitioning,MPScotchLocalType);
1447 extern PetscErrorCode  MatPartitioningScotchSetMapping(MatPartitioning);
1448 extern PetscErrorCode  MatPartitioningScotchSetStrategy(MatPartitioning,char*);
1449 
1450 extern PetscErrorCode MatMeshToVertexGraph(Mat,PetscInt,Mat*);
1451 extern PetscErrorCode MatMeshToCellGraph(Mat,PetscInt,Mat*);
1452 
1453 /*
1454     If you add entries here you must also add them to finclude/petscmat.h
1455 */
1456 typedef enum { MATOP_SET_VALUES=0,
1457                MATOP_GET_ROW=1,
1458                MATOP_RESTORE_ROW=2,
1459                MATOP_MULT=3,
1460                MATOP_MULT_ADD=4,
1461                MATOP_MULT_TRANSPOSE=5,
1462                MATOP_MULT_TRANSPOSE_ADD=6,
1463                MATOP_SOLVE=7,
1464                MATOP_SOLVE_ADD=8,
1465                MATOP_SOLVE_TRANSPOSE=9,
1466                MATOP_SOLVE_TRANSPOSE_ADD=10,
1467                MATOP_LUFACTOR=11,
1468                MATOP_CHOLESKYFACTOR=12,
1469                MATOP_SOR=13,
1470                MATOP_TRANSPOSE=14,
1471                MATOP_GETINFO=15,
1472                MATOP_EQUAL=16,
1473                MATOP_GET_DIAGONAL=17,
1474                MATOP_DIAGONAL_SCALE=18,
1475                MATOP_NORM=19,
1476                MATOP_ASSEMBLY_BEGIN=20,
1477                MATOP_ASSEMBLY_END=21,
1478                MATOP_SET_OPTION=22,
1479                MATOP_ZERO_ENTRIES=23,
1480                MATOP_ZERO_ROWS=24,
1481                MATOP_LUFACTOR_SYMBOLIC=25,
1482                MATOP_LUFACTOR_NUMERIC=26,
1483                MATOP_CHOLESKY_FACTOR_SYMBOLIC=27,
1484                MATOP_CHOLESKY_FACTOR_NUMERIC=28,
1485                MATOP_SETUP_PREALLOCATION=29,
1486                MATOP_ILUFACTOR_SYMBOLIC=30,
1487                MATOP_ICCFACTOR_SYMBOLIC=31,
1488                MATOP_GET_ARRAY=32,
1489                MATOP_RESTORE_ARRAY=33,
1490                MATOP_DUPLICATE=34,
1491                MATOP_FORWARD_SOLVE=35,
1492                MATOP_BACKWARD_SOLVE=36,
1493                MATOP_ILUFACTOR=37,
1494                MATOP_ICCFACTOR=38,
1495                MATOP_AXPY=39,
1496                MATOP_GET_SUBMATRICES=40,
1497                MATOP_INCREASE_OVERLAP=41,
1498                MATOP_GET_VALUES=42,
1499                MATOP_COPY=43,
1500                MATOP_GET_ROW_MAX=44,
1501                MATOP_SCALE=45,
1502                MATOP_SHIFT=46,
1503                MATOP_DIAGONAL_SET=47,
1504                MATOP_ILUDT_FACTOR=48,
1505                MATOP_SET_BLOCK_SIZE=49,
1506                MATOP_GET_ROW_IJ=50,
1507                MATOP_RESTORE_ROW_IJ=51,
1508                MATOP_GET_COLUMN_IJ=52,
1509                MATOP_RESTORE_COLUMN_IJ=53,
1510                MATOP_FDCOLORING_CREATE=54,
1511                MATOP_COLORING_PATCH=55,
1512                MATOP_SET_UNFACTORED=56,
1513                MATOP_PERMUTE=57,
1514                MATOP_SET_VALUES_BLOCKED=58,
1515                MATOP_GET_SUBMATRIX=59,
1516                MATOP_DESTROY=60,
1517                MATOP_VIEW=61,
1518                MATOP_CONVERT_FROM=62,
1519                MATOP_USE_SCALED_FORM=63,
1520                MATOP_SCALE_SYSTEM=64,
1521                MATOP_UNSCALE_SYSTEM=65,
1522                MATOP_SET_LOCAL_TO_GLOBAL_MAP=66,
1523                MATOP_SET_VALUES_LOCAL=67,
1524                MATOP_ZERO_ROWS_LOCAL=68,
1525                MATOP_GET_ROW_MAX_ABS=69,
1526                MATOP_GET_ROW_MIN_ABS=70,
1527                MATOP_CONVERT=71,
1528                MATOP_SET_COLORING=72,
1529                MATOP_SET_VALUES_ADIC=73,
1530                MATOP_SET_VALUES_ADIFOR=74,
1531                MATOP_FD_COLORING_APPLY=75,
1532                MATOP_SET_FROM_OPTIONS=76,
1533                MATOP_MULT_CON=77,
1534                MATOP_MULT_TRANSPOSE_CON=78,
1535                MATOP_PERMUTE_SPARSIFY=79,
1536                MATOP_MULT_MULTIPLE=80,
1537                MATOP_SOLVE_MULTIPLE=81,
1538                MATOP_GET_INERTIA=82,
1539                MATOP_LOAD=83,
1540                MATOP_IS_SYMMETRIC=84,
1541                MATOP_IS_HERMITIAN=85,
1542                MATOP_IS_STRUCTURALLY_SYMMETRIC=86,
1543                MATOP_DUMMY=87,
1544                MATOP_GET_VECS=88,
1545                MATOP_MAT_MULT=89,
1546                MATOP_MAT_MULT_SYMBOLIC=90,
1547                MATOP_MAT_MULT_NUMERIC=91,
1548                MATOP_PTAP=92,
1549                MATOP_PTAP_SYMBOLIC=93,
1550                MATOP_PTAP_NUMERIC=94,
1551                MATOP_MAT_MULTTRANSPOSE=95,
1552                MATOP_MAT_MULTTRANSPOSE_SYM=96,
1553                MATOP_MAT_MULTTRANSPOSE_NUM=97,
1554                MATOP_PTAP_SYMBOLIC_SEQAIJ=98,
1555                MATOP_PTAP_NUMERIC_SEQAIJ=99,
1556                MATOP_PTAP_SYMBOLIC_MPIAIJ=100,
1557                MATOP_PTAP_NUMERIC_MPIAIJ=101,
1558                MATOP_CONJUGATE=102,
1559                MATOP_SET_SIZES=103,
1560                MATOP_SET_VALUES_ROW=104,
1561                MATOP_REAL_PART=105,
1562                MATOP_IMAG_PART=106,
1563                MATOP_GET_ROW_UTRIANGULAR=107,
1564                MATOP_RESTORE_ROW_UTRIANGULAR=108,
1565                MATOP_MATSOLVE=109,
1566                MATOP_GET_REDUNDANTMATRIX=110,
1567                MATOP_GET_ROW_MIN=111,
1568                MATOP_GET_COLUMN_VEC=112,
1569                MATOP_MISSING_DIAGONAL=113,
1570                MATOP_MATGETSEQNONZEROSTRUCTURE=114,
1571                MATOP_CREATE=115,
1572                MATOP_GET_GHOSTS=116,
1573                MATOP_GET_LOCALSUBMATRIX=117,
1574                MATOP_RESTORE_LOCALSUBMATRIX=118,
1575                MATOP_MULT_DIAGONAL_BLOCK=119,
1576                MATOP_HERMITIANTRANSPOSE=120,
1577                MATOP_MULTHERMITIANTRANSPOSE=121,
1578                MATOP_MULTHERMITIANTRANSPOSEADD=122,
1579                MATOP_GETMULTIPROCBLOCK=123,
1580                MATOP_GETCOLUMNNORMS=125,
1581 	       MATOP_GET_SUBMATRICES_PARALLEL=128
1582              } MatOperation;
1583 extern PetscErrorCode  MatHasOperation(Mat,MatOperation,PetscBool *);
1584 extern PetscErrorCode  MatShellSetOperation(Mat,MatOperation,void(*)(void));
1585 extern PetscErrorCode  MatShellGetOperation(Mat,MatOperation,void(**)(void));
1586 extern PetscErrorCode  MatShellSetContext(Mat,void*);
1587 
1588 /*
1589    Codes for matrices stored on disk. By default they are
1590    stored in a universal format. By changing the format with
1591    PetscViewerSetFormat(viewer,PETSC_VIEWER_NATIVE); the matrices will
1592    be stored in a way natural for the matrix, for example dense matrices
1593    would be stored as dense. Matrices stored this way may only be
1594    read into matrices of the same type.
1595 */
1596 #define MATRIX_BINARY_FORMAT_DENSE -1
1597 
1598 extern PetscErrorCode  MatMPIBAIJSetHashTableFactor(Mat,PetscReal);
1599 extern PetscErrorCode  MatISGetLocalMat(Mat,Mat*);
1600 
1601 /*S
1602      MatNullSpace - Object that removes a null space from a vector, i.e.
1603          orthogonalizes the vector to a subsapce
1604 
1605    Level: advanced
1606 
1607   Concepts: matrix; linear operator, null space
1608 
1609   Users manual sections:
1610 .   sec_singular
1611 
1612 .seealso:  MatNullSpaceCreate()
1613 S*/
1614 typedef struct _p_MatNullSpace* MatNullSpace;
1615 
1616 extern PetscErrorCode  MatNullSpaceCreate(MPI_Comm,PetscBool ,PetscInt,const Vec[],MatNullSpace*);
1617 extern PetscErrorCode  MatNullSpaceSetFunction(MatNullSpace,PetscErrorCode (*)(MatNullSpace,Vec,void*),void*);
1618 extern PetscErrorCode  MatNullSpaceDestroy(MatNullSpace*);
1619 extern PetscErrorCode  MatNullSpaceRemove(MatNullSpace,Vec,Vec*);
1620 extern PetscErrorCode  MatNullSpaceAttach(Mat,MatNullSpace);
1621 extern PetscErrorCode  MatNullSpaceTest(MatNullSpace,Mat,PetscBool  *);
1622 extern PetscErrorCode  MatNullSpaceView(MatNullSpace,PetscViewer);
1623 
1624 extern PetscErrorCode  MatReorderingSeqSBAIJ(Mat,IS);
1625 extern PetscErrorCode  MatMPISBAIJSetHashTableFactor(Mat,PetscReal);
1626 extern PetscErrorCode  MatSeqSBAIJSetColumnIndices(Mat,PetscInt *);
1627 extern PetscErrorCode  MatSeqBAIJInvertBlockDiagonal(Mat);
1628 
1629 extern PetscErrorCode  MatCreateMAIJ(Mat,PetscInt,Mat*);
1630 extern PetscErrorCode  MatMAIJRedimension(Mat,PetscInt,Mat*);
1631 extern PetscErrorCode  MatMAIJGetAIJ(Mat,Mat*);
1632 
1633 extern PetscErrorCode  MatComputeExplicitOperator(Mat,Mat*);
1634 
1635 extern PetscErrorCode  MatDiagonalScaleLocal(Mat,Vec);
1636 
1637 extern PetscErrorCode  MatCreateMFFD(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,Mat*);
1638 extern PetscErrorCode  MatMFFDSetBase(Mat,Vec,Vec);
1639 extern PetscErrorCode  MatMFFDSetFunction(Mat,PetscErrorCode(*)(void*,Vec,Vec),void*);
1640 extern PetscErrorCode  MatMFFDSetFunctioni(Mat,PetscErrorCode (*)(void*,PetscInt,Vec,PetscScalar*));
1641 extern PetscErrorCode  MatMFFDSetFunctioniBase(Mat,PetscErrorCode (*)(void*,Vec));
1642 extern PetscErrorCode  MatMFFDAddNullSpace(Mat,MatNullSpace);
1643 extern PetscErrorCode  MatMFFDSetHHistory(Mat,PetscScalar[],PetscInt);
1644 extern PetscErrorCode  MatMFFDResetHHistory(Mat);
1645 extern PetscErrorCode  MatMFFDSetFunctionError(Mat,PetscReal);
1646 extern PetscErrorCode  MatMFFDSetPeriod(Mat,PetscInt);
1647 extern PetscErrorCode  MatMFFDGetH(Mat,PetscScalar *);
1648 extern PetscErrorCode  MatMFFDSetOptionsPrefix(Mat,const char[]);
1649 extern PetscErrorCode  MatMFFDCheckPositivity(void*,Vec,Vec,PetscScalar*);
1650 extern PetscErrorCode  MatMFFDSetCheckh(Mat,PetscErrorCode (*)(void*,Vec,Vec,PetscScalar*),void*);
1651 
1652 /*S
1653     MatMFFD - A data structured used to manage the computation of the h differencing parameter for matrix-free
1654               Jacobian vector products
1655 
1656     Notes: MATMFFD is a specific MatType which uses the MatMFFD data structure
1657 
1658            MatMFFD*() methods actually take the Mat as their first argument. Not a MatMFFD data structure
1659 
1660     Level: developer
1661 
1662 .seealso: MATMFFD, MatCreateMFFD(), MatMFFDSetFuction(), MatMFFDSetType(), MatMFFDRegister()
1663 S*/
1664 typedef struct _p_MatMFFD* MatMFFD;
1665 
1666 /*E
1667     MatMFFDType - algorithm used to compute the h used in computing matrix-vector products via differencing of the function
1668 
1669    Level: beginner
1670 
1671 .seealso: MatMFFDSetType(), MatMFFDRegister()
1672 E*/
1673 #define MatMFFDType char*
1674 #define MATMFFD_DS  "ds"
1675 #define MATMFFD_WP  "wp"
1676 
1677 extern PetscErrorCode  MatMFFDSetType(Mat,const MatMFFDType);
1678 extern PetscErrorCode  MatMFFDRegister(const char[],const char[],const char[],PetscErrorCode (*)(MatMFFD));
1679 
1680 /*MC
1681    MatMFFDRegisterDynamic - Adds a method to the MatMFFD registry.
1682 
1683    Synopsis:
1684    PetscErrorCode MatMFFDRegisterDynamic(const char *name_solver,const char *path,const char *name_create,PetscErrorCode (*routine_create)(MatMFFD))
1685 
1686    Not Collective
1687 
1688    Input Parameters:
1689 +  name_solver - name of a new user-defined compute-h module
1690 .  path - path (either absolute or relative) the library containing this solver
1691 .  name_create - name of routine to create method context
1692 -  routine_create - routine to create method context
1693 
1694    Level: developer
1695 
1696    Notes:
1697    MatMFFDRegisterDynamic() may be called multiple times to add several user-defined solvers.
1698 
1699    If dynamic libraries are used, then the fourth input argument (routine_create)
1700    is ignored.
1701 
1702    Sample usage:
1703 .vb
1704    MatMFFDRegisterDynamic("my_h",/home/username/my_lib/lib/libO/solaris/mylib.a,
1705                "MyHCreate",MyHCreate);
1706 .ve
1707 
1708    Then, your solver can be chosen with the procedural interface via
1709 $     MatMFFDSetType(mfctx,"my_h")
1710    or at runtime via the option
1711 $     -snes_mf_type my_h
1712 
1713 .keywords: MatMFFD, register
1714 
1715 .seealso: MatMFFDRegisterAll(), MatMFFDRegisterDestroy()
1716 M*/
1717 #if defined(PETSC_USE_DYNAMIC_LIBRARIES)
1718 #define MatMFFDRegisterDynamic(a,b,c,d) MatMFFDRegister(a,b,c,0)
1719 #else
1720 #define MatMFFDRegisterDynamic(a,b,c,d) MatMFFDRegister(a,b,c,d)
1721 #endif
1722 
1723 extern PetscErrorCode  MatMFFDRegisterAll(const char[]);
1724 extern PetscErrorCode  MatMFFDRegisterDestroy(void);
1725 extern PetscErrorCode  MatMFFDDSSetUmin(Mat,PetscReal);
1726 extern PetscErrorCode  MatMFFDWPSetComputeNormU(Mat,PetscBool );
1727 
1728 
1729 extern PetscErrorCode  PetscViewerMathematicaPutMatrix(PetscViewer, PetscInt, PetscInt, PetscReal *);
1730 extern PetscErrorCode  PetscViewerMathematicaPutCSRMatrix(PetscViewer, PetscInt, PetscInt, PetscInt *, PetscInt *, PetscReal *);
1731 
1732 /*
1733    PETSc interface to MUMPS
1734 */
1735 #ifdef PETSC_HAVE_MUMPS
1736 extern PetscErrorCode  MatMumpsSetIcntl(Mat,PetscInt,PetscInt);
1737 #endif
1738 
1739 /*
1740    PETSc interface to SUPERLU
1741 */
1742 #ifdef PETSC_HAVE_SUPERLU
1743 extern PetscErrorCode  MatSuperluSetILUDropTol(Mat,PetscReal);
1744 #endif
1745 
1746 #if defined(PETSC_HAVE_CUSP)
1747 extern PetscErrorCode  MatCreateSeqAIJCUSP(MPI_Comm,PetscInt,PetscInt,PetscInt,const PetscInt[],Mat*);
1748 extern PetscErrorCode  MatCreateMPIAIJCUSP(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt,const PetscInt[],PetscInt,const PetscInt[],Mat*);
1749 #endif
1750 
1751 /*
1752    PETSc interface to FFTW
1753 */
1754 #if defined(PETSC_HAVE_FFTW)
1755 extern PetscErrorCode VecScatterPetscToFFTW(Mat,Vec,Vec);
1756 extern PetscErrorCode VecScatterFFTWToPetsc(Mat,Vec,Vec);
1757 extern PetscErrorCode MatGetVecsFFTW(Mat,Vec*,Vec*,Vec*);
1758 #endif
1759 
1760 extern PetscErrorCode MatCreateNest(MPI_Comm,PetscInt,const IS[],PetscInt,const IS[],const Mat[],Mat*);
1761 extern PetscErrorCode MatNestGetSize(Mat,PetscInt*,PetscInt*);
1762 extern PetscErrorCode MatNestGetSubMats(Mat,PetscInt*,PetscInt*,Mat***);
1763 extern PetscErrorCode MatNestGetSubMat(Mat,PetscInt,PetscInt,Mat*);
1764 extern PetscErrorCode MatNestSetVecType(Mat,const VecType);
1765 extern PetscErrorCode MatNestSetSubMats(Mat,PetscInt,const IS[],PetscInt,const IS[],const Mat[]);
1766 extern PetscErrorCode MatNestSetSubMat(Mat,PetscInt,PetscInt,Mat);
1767 
1768 PETSC_EXTERN_CXX_END
1769 #endif
1770