1 /* 2 Include file for the matrix component of PETSc 3 */ 4 #ifndef __PETSCMAT_H 5 #define __PETSCMAT_H 6 #include "petscvec.h" 7 PETSC_EXTERN_CXX_BEGIN 8 9 /*S 10 Mat - Abstract PETSc matrix object 11 12 Level: beginner 13 14 Concepts: matrix; linear operator 15 16 .seealso: MatCreate(), MatType, MatSetType() 17 S*/ 18 typedef struct _p_Mat* Mat; 19 20 /*J 21 MatType - String with the name of a PETSc matrix or the creation function 22 with an optional dynamic library name, for example 23 http://www.mcs.anl.gov/petsc/lib.a:mymatcreate() 24 25 Level: beginner 26 27 .seealso: MatSetType(), Mat, MatSolverPackage 28 J*/ 29 #define MatType char* 30 #define MATSAME "same" 31 #define MATMAIJ "maij" 32 #define MATSEQMAIJ "seqmaij" 33 #define MATMPIMAIJ "mpimaij" 34 #define MATIS "is" 35 #define MATAIJ "aij" 36 #define MATSEQAIJ "seqaij" 37 #define MATSEQAIJPTHREAD "seqaijpthread" 38 #define MATAIJPTHREAD "aijpthread" 39 #define MATMPIAIJ "mpiaij" 40 #define MATAIJCRL "aijcrl" 41 #define MATSEQAIJCRL "seqaijcrl" 42 #define MATMPIAIJCRL "mpiaijcrl" 43 #define MATAIJCUSP "aijcusp" 44 #define MATSEQAIJCUSP "seqaijcusp" 45 #define MATMPIAIJCUSP "mpiaijcusp" 46 #define MATAIJPERM "aijperm" 47 #define MATSEQAIJPERM "seqaijperm" 48 #define MATMPIAIJPERM "mpiaijperm" 49 #define MATSHELL "shell" 50 #define MATDENSE "dense" 51 #define MATSEQDENSE "seqdense" 52 #define MATMPIDENSE "mpidense" 53 #define MATBAIJ "baij" 54 #define MATSEQBAIJ "seqbaij" 55 #define MATMPIBAIJ "mpibaij" 56 #define MATMPIADJ "mpiadj" 57 #define MATSBAIJ "sbaij" 58 #define MATSEQSBAIJ "seqsbaij" 59 #define MATMPISBAIJ "mpisbaij" 60 #define MATSEQBSTRM "seqbstrm" 61 #define MATMPIBSTRM "mpibstrm" 62 #define MATBSTRM "bstrm" 63 #define MATSEQSBSTRM "seqsbstrm" 64 #define MATMPISBSTRM "mpisbstrm" 65 #define MATSBSTRM "sbstrm" 66 #define MATDAAD "daad" 67 #define MATMFFD "mffd" 68 #define MATNORMAL "normal" 69 #define MATLRC "lrc" 70 #define MATSCATTER "scatter" 71 #define MATBLOCKMAT "blockmat" 72 #define MATCOMPOSITE "composite" 73 #define MATFFT "fft" 74 #define MATFFTW "fftw" 75 #define MATSEQCUFFT "seqcufft" 76 #define MATTRANSPOSEMAT "transpose" 77 #define MATSCHURCOMPLEMENT "schurcomplement" 78 #define MATPYTHON "python" 79 #define MATHYPRESTRUCT "hyprestruct" 80 #define MATHYPRESSTRUCT "hypresstruct" 81 #define MATSUBMATRIX "submatrix" 82 #define MATLOCALREF "localref" 83 #define MATNEST "nest" 84 #define MATIJ "ij" 85 86 /*J 87 MatSolverPackage - String with the name of a PETSc matrix solver type. 88 89 For example: "petsc" indicates what PETSc provides, "superlu" indicates either 90 SuperLU or SuperLU_Dist etc. 91 92 93 Level: beginner 94 95 .seealso: MatGetFactor(), Mat, MatSetType(), MatType 96 J*/ 97 #define MatSolverPackage char* 98 #define MATSOLVERSPOOLES "spooles" 99 #define MATSOLVERSUPERLU "superlu" 100 #define MATSOLVERSUPERLU_DIST "superlu_dist" 101 #define MATSOLVERUMFPACK "umfpack" 102 #define MATSOLVERCHOLMOD "cholmod" 103 #define MATSOLVERESSL "essl" 104 #define MATSOLVERLUSOL "lusol" 105 #define MATSOLVERMUMPS "mumps" 106 #define MATSOLVERPASTIX "pastix" 107 #define MATSOLVERMATLAB "matlab" 108 #define MATSOLVERPETSC "petsc" 109 #define MATSOLVERPLAPACK "plapack" 110 #define MATSOLVERBAS "bas" 111 112 #define MATSOLVERBSTRM "bstrm" 113 #define MATSOLVERSBSTRM "sbstrm" 114 115 /*E 116 MatFactorType - indicates what type of factorization is requested 117 118 Level: beginner 119 120 Any additions/changes here MUST also be made in include/finclude/petscmat.h 121 122 .seealso: MatSolverPackage, MatGetFactor() 123 E*/ 124 typedef enum {MAT_FACTOR_NONE, MAT_FACTOR_LU, MAT_FACTOR_CHOLESKY, MAT_FACTOR_ILU, MAT_FACTOR_ICC,MAT_FACTOR_ILUDT} MatFactorType; 125 extern const char *const MatFactorTypes[]; 126 127 extern PetscErrorCode MatGetFactor(Mat,const MatSolverPackage,MatFactorType,Mat*); 128 extern PetscErrorCode MatGetFactorAvailable(Mat,const MatSolverPackage,MatFactorType,PetscBool *); 129 extern PetscErrorCode MatFactorGetSolverPackage(Mat,const MatSolverPackage*); 130 extern PetscErrorCode MatGetFactorType(Mat,MatFactorType*); 131 132 /* Logging support */ 133 #define MAT_FILE_CLASSID 1211216 /* used to indicate matrices in binary files */ 134 extern PetscClassId MAT_CLASSID; 135 extern PetscClassId MAT_FDCOLORING_CLASSID; 136 extern PetscClassId MAT_TRANSPOSECOLORING_CLASSID; 137 extern PetscClassId MAT_PARTITIONING_CLASSID; 138 extern PetscClassId MAT_COARSEN_CLASSID; 139 extern PetscClassId MAT_NULLSPACE_CLASSID; 140 extern PetscClassId MATMFFD_CLASSID; 141 142 /*E 143 MatReuse - Indicates if matrices obtained from a previous call to MatGetSubMatrices() 144 or MatGetSubMatrix() are to be reused to store the new matrix values. For MatConvert() is used to indicate 145 that the input matrix is to be replaced with the converted matrix. 146 147 Level: beginner 148 149 Any additions/changes here MUST also be made in include/finclude/petscmat.h 150 151 .seealso: MatGetSubMatrices(), MatGetSubMatrix(), MatDestroyMatrices(), MatConvert() 152 E*/ 153 typedef enum {MAT_INITIAL_MATRIX,MAT_REUSE_MATRIX,MAT_IGNORE_MATRIX} MatReuse; 154 155 /*E 156 MatGetSubMatrixOption - Indicates if matrices obtained from a call to MatGetSubMatrices() 157 include the matrix values. Currently it is only used by MatGetSeqNonzerostructure(). 158 159 Level: beginner 160 161 .seealso: MatGetSeqNonzerostructure() 162 E*/ 163 typedef enum {MAT_DO_NOT_GET_VALUES,MAT_GET_VALUES} MatGetSubMatrixOption; 164 165 extern PetscErrorCode MatInitializePackage(const char[]); 166 167 extern PetscErrorCode MatCreate(MPI_Comm,Mat*); 168 PetscPolymorphicFunction(MatCreate,(MPI_Comm comm),(comm,&A),Mat,A) 169 PetscPolymorphicFunction(MatCreate,(),(PETSC_COMM_WORLD,&A),Mat,A) 170 extern PetscErrorCode MatSetSizes(Mat,PetscInt,PetscInt,PetscInt,PetscInt); 171 extern PetscErrorCode MatSetType(Mat,const MatType); 172 extern PetscErrorCode MatSetFromOptions(Mat); 173 extern PetscErrorCode MatRegisterAll(const char[]); 174 extern PetscErrorCode MatRegister(const char[],const char[],const char[],PetscErrorCode(*)(Mat)); 175 extern PetscErrorCode MatRegisterBaseName(const char[],const char[],const char[]); 176 extern PetscErrorCode MatSetOptionsPrefix(Mat,const char[]); 177 extern PetscErrorCode MatAppendOptionsPrefix(Mat,const char[]); 178 extern PetscErrorCode MatGetOptionsPrefix(Mat,const char*[]); 179 180 /*MC 181 MatRegisterDynamic - Adds a new matrix type 182 183 Synopsis: 184 PetscErrorCode MatRegisterDynamic(const char *name,const char *path,const char *name_create,PetscErrorCode (*routine_create)(Mat)) 185 186 Not Collective 187 188 Input Parameters: 189 + name - name of a new user-defined matrix type 190 . path - path (either absolute or relative) the library containing this solver 191 . name_create - name of routine to create method context 192 - routine_create - routine to create method context 193 194 Notes: 195 MatRegisterDynamic() may be called multiple times to add several user-defined solvers. 196 197 If dynamic libraries are used, then the fourth input argument (routine_create) 198 is ignored. 199 200 Sample usage: 201 .vb 202 MatRegisterDynamic("my_mat",/home/username/my_lib/lib/libO/solaris/mylib.a, 203 "MyMatCreate",MyMatCreate); 204 .ve 205 206 Then, your solver can be chosen with the procedural interface via 207 $ MatSetType(Mat,"my_mat") 208 or at runtime via the option 209 $ -mat_type my_mat 210 211 Level: advanced 212 213 Notes: ${PETSC_ARCH} occuring in pathname will be replaced with appropriate values. 214 If your function is not being put into a shared library then use VecRegister() instead 215 216 .keywords: Mat, register 217 218 .seealso: MatRegisterAll(), MatRegisterDestroy() 219 220 M*/ 221 #if defined(PETSC_USE_DYNAMIC_LIBRARIES) 222 #define MatRegisterDynamic(a,b,c,d) MatRegister(a,b,c,0) 223 #else 224 #define MatRegisterDynamic(a,b,c,d) MatRegister(a,b,c,d) 225 #endif 226 227 extern PetscBool MatRegisterAllCalled; 228 extern PetscFList MatList; 229 extern PetscFList MatColoringList; 230 extern PetscFList MatPartitioningList; 231 extern PetscFList MatCoarsenList; 232 233 /*E 234 MatStructure - Indicates if the matrix has the same nonzero structure 235 236 Level: beginner 237 238 Any additions/changes here MUST also be made in include/finclude/petscmat.h 239 240 .seealso: MatCopy(), KSPSetOperators(), PCSetOperators() 241 E*/ 242 typedef enum {DIFFERENT_NONZERO_PATTERN,SUBSET_NONZERO_PATTERN,SAME_NONZERO_PATTERN,SAME_PRECONDITIONER} MatStructure; 243 244 extern PetscErrorCode MatCreateSeqDense(MPI_Comm,PetscInt,PetscInt,PetscScalar[],Mat*); 245 extern PetscErrorCode MatCreateDense(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,PetscScalar[],Mat*); 246 extern PetscErrorCode MatCreateSeqAIJ(MPI_Comm,PetscInt,PetscInt,PetscInt,const PetscInt[],Mat*); 247 PetscPolymorphicFunction(MatCreateSeqAIJ,(PetscInt m,PetscInt n,PetscInt nz,const PetscInt nnz[]),(PETSC_COMM_SELF,m,n,nz,nnz,&A),Mat,A) 248 PetscPolymorphicFunction(MatCreateSeqAIJ,(PetscInt m,PetscInt n,PetscInt nz),(PETSC_COMM_SELF,m,n,nz,PETSC_NULL,&A),Mat,A) 249 PetscPolymorphicFunction(MatCreateSeqAIJ,(PetscInt m,PetscInt n,const PetscInt nnz[]),(PETSC_COMM_SELF,m,n,0,nnz,&A),Mat,A) 250 PetscPolymorphicFunction(MatCreateSeqAIJ,(PetscInt m,PetscInt n),(PETSC_COMM_SELF,m,n,0,PETSC_NULL,&A),Mat,A) 251 PetscPolymorphicSubroutine(MatCreateSeqAIJ,(PetscInt m,PetscInt n,PetscInt nz,Mat *A),(PETSC_COMM_SELF,m,n,nz,PETSC_NULL,A)) 252 PetscPolymorphicSubroutine(MatCreateSeqAIJ,(PetscInt m,PetscInt n,const PetscInt nnz[],Mat *A),(PETSC_COMM_SELF,m,n,0,nnz,A)) 253 PetscPolymorphicSubroutine(MatCreateSeqAIJ,(PetscInt m,PetscInt n,Mat *A),(PETSC_COMM_SELF,m,n,0,PETSC_NULL,A)) 254 extern PetscErrorCode MatCreateAIJ(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt,const PetscInt[],PetscInt,const PetscInt[],Mat*); 255 PetscPolymorphicFunction(MatCreateAIJ,(MPI_Comm comm,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,const PetscInt nnz[],PetscInt onz,const PetscInt onnz[]),(comm,m,n,M,N,nz,nnz,onz,onnz,&A),Mat,A) 256 PetscPolymorphicFunction(MatCreateAIJ,(MPI_Comm comm,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,PetscInt nnz),(comm,m,n,M,N,nz,PETSC_NULL,nnz,PETSC_NULL,&A),Mat,A) 257 PetscPolymorphicFunction(MatCreateAIJ,(MPI_Comm comm,PetscInt m,PetscInt n,PetscInt M,PetscInt N,const PetscInt nnz[],const PetscInt onz[]),(comm,m,n,M,N,0,nnz,0,onz,&A),Mat,A) 258 PetscPolymorphicFunction(MatCreateAIJ,(MPI_Comm comm,PetscInt m,PetscInt n,PetscInt M,PetscInt N),(comm,m,n,M,N,0,PETSC_NULL,0,PETSC_NULL,&A),Mat,A) 259 PetscPolymorphicSubroutine(MatCreateAIJ,(MPI_Comm comm,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,PetscInt nnz,Mat *A),(comm,m,n,M,N,nz,PETSC_NULL,nnz,PETSC_NULL,A)) 260 PetscPolymorphicSubroutine(MatCreateAIJ,(MPI_Comm comm,PetscInt m,PetscInt n,PetscInt M,PetscInt N,const PetscInt nnz[],const PetscInt onz[],Mat *A),(comm,m,n,M,N,0,nnz,0,onz,A)) 261 PetscPolymorphicSubroutine(MatCreateAIJ,(MPI_Comm comm,PetscInt m,PetscInt n,PetscInt M,PetscInt N,Mat *A),(comm,m,n,M,N,0,PETSC_NULL,0,PETSC_NULL,A)) 262 PetscPolymorphicFunction(MatCreateAIJ,(PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,const PetscInt nnz[],PetscInt onz,const PetscInt onnz[]),(PETSC_COMM_WORLD,m,n,M,N,nz,nnz,onz,onnz,&A),Mat,A) 263 PetscPolymorphicFunction(MatCreateAIJ,(PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,PetscInt nnz),(PETSC_COMM_WORLD,m,n,M,N,nz,PETSC_NULL,nnz,PETSC_NULL,&A),Mat,A) 264 PetscPolymorphicFunction(MatCreateAIJ,(PetscInt m,PetscInt n,PetscInt M,PetscInt N,const PetscInt nnz[],const PetscInt onz[]),(PETSC_COMM_WORLD,m,n,M,N,0,nnz,0,onz,&A),Mat,A) 265 PetscPolymorphicFunction(MatCreateAIJ,(PetscInt m,PetscInt n,PetscInt M,PetscInt N),(PETSC_COMM_WORLD,m,n,M,N,0,PETSC_NULL,0,PETSC_NULL,&A),Mat,A) 266 PetscPolymorphicSubroutine(MatCreateAIJ,(PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,PetscInt nnz,Mat *A),(PETSC_COMM_WORLD,m,n,M,N,nz,PETSC_NULL,nnz,PETSC_NULL,A)) 267 PetscPolymorphicSubroutine(MatCreateAIJ,(PetscInt m,PetscInt n,PetscInt M,PetscInt N,const PetscInt nnz[],const PetscInt onz[],Mat *A),(PETSC_COMM_WORLD,m,n,M,N,0,nnz,0,onz,A)) 268 PetscPolymorphicSubroutine(MatCreateAIJ,(PetscInt m,PetscInt n,PetscInt M,PetscInt N,Mat *A),(PETSC_COMM_WORLD,m,n,M,N,0,PETSC_NULL,0,PETSC_NULL,A)) 269 extern PetscErrorCode MatCreateMPIAIJWithArrays(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,const PetscInt[],const PetscInt[],const PetscScalar[],Mat *); 270 extern PetscErrorCode MatCreateMPIAIJWithSplitArrays(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt[],PetscInt[],PetscScalar[],PetscInt[],PetscInt[],PetscScalar[],Mat*); 271 272 extern PetscErrorCode MatCreateSeqBAIJ(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,const PetscInt[],Mat*); 273 PetscPolymorphicFunction(MatCreateSeqBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt nz,const PetscInt nnz[]),(PETSC_COMM_SELF,bs,m,n,nz,nnz,&A),Mat,A) 274 PetscPolymorphicFunction(MatCreateSeqBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt nz),(PETSC_COMM_SELF,bs,m,n,nz,PETSC_NULL,&A),Mat,A) 275 PetscPolymorphicFunction(MatCreateSeqBAIJ,(PetscInt bs,PetscInt m,PetscInt n,const PetscInt nnz[]),(PETSC_COMM_SELF,bs,m,n,0,nnz,&A),Mat,A) 276 PetscPolymorphicFunction(MatCreateSeqBAIJ,(PetscInt bs,PetscInt m,PetscInt n),(PETSC_COMM_SELF,bs,m,n,0,PETSC_NULL,&A),Mat,A) 277 PetscPolymorphicSubroutine(MatCreateSeqBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt nz,Mat *A),(PETSC_COMM_SELF,bs,m,n,nz,PETSC_NULL,A)) 278 PetscPolymorphicSubroutine(MatCreateSeqBAIJ,(PetscInt bs,PetscInt m,PetscInt n,const PetscInt nnz[],Mat *A),(PETSC_COMM_SELF,bs,m,n,0,nnz,A)) 279 PetscPolymorphicSubroutine(MatCreateSeqBAIJ,(PetscInt bs,PetscInt m,PetscInt n,Mat *A),(PETSC_COMM_SELF,bs,m,n,0,PETSC_NULL,A)) 280 extern PetscErrorCode MatCreateBAIJ(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt,const PetscInt[],PetscInt,const PetscInt[],Mat*); 281 PetscPolymorphicFunction(MatCreateBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,const PetscInt nnz[],PetscInt onz,const PetscInt onnz[]),(comm,bs,m,n,M,N,nz,nnz,onz,onnz,&A),Mat,A) 282 PetscPolymorphicFunction(MatCreateBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,PetscInt nnz),(comm,bs,m,n,M,N,nz,PETSC_NULL,nnz,PETSC_NULL,&A),Mat,A) 283 PetscPolymorphicFunction(MatCreateBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,const PetscInt nnz[],const PetscInt onz[]),(comm,bs,m,n,M,N,0,nnz,0,onz,&A),Mat,A) 284 PetscPolymorphicFunction(MatCreateBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N),(comm,bs,m,n,M,N,0,PETSC_NULL,0,PETSC_NULL,&A),Mat,A) 285 PetscPolymorphicSubroutine(MatCreateBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,PetscInt nnz,Mat *A),(comm,bs,m,n,M,N,nz,PETSC_NULL,nnz,PETSC_NULL,A)) 286 PetscPolymorphicSubroutine(MatCreateBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,const PetscInt nnz[],const PetscInt onz[],Mat *A),(comm,bs,m,n,M,N,0,nnz,0,onz,A)) 287 PetscPolymorphicSubroutine(MatCreateBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,Mat *A),(comm,bs,m,n,M,N,0,PETSC_NULL,0,PETSC_NULL,A)) 288 PetscPolymorphicFunction(MatCreateBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,const PetscInt nnz[],PetscInt onz,const PetscInt onnz[]),(PETSC_COMM_WORLD,bs,m,n,M,N,nz,nnz,onz,onnz,&A),Mat,A) 289 PetscPolymorphicFunction(MatCreateBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,PetscInt nnz),(PETSC_COMM_WORLD,bs,m,n,M,N,nz,PETSC_NULL,nnz,PETSC_NULL,&A),Mat,A) 290 PetscPolymorphicFunction(MatCreateBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,const PetscInt nnz[],const PetscInt onz[]),(PETSC_COMM_WORLD,bs,m,n,M,N,0,nnz,0,onz,&A),Mat,A) 291 PetscPolymorphicFunction(MatCreateBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N),(PETSC_COMM_WORLD,bs,m,n,M,N,0,PETSC_NULL,0,PETSC_NULL,&A),Mat,A) 292 PetscPolymorphicSubroutine(MatCreateBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,PetscInt nnz,Mat *A),(PETSC_COMM_WORLD,bs,m,n,M,N,nz,PETSC_NULL,nnz,PETSC_NULL,A)) 293 PetscPolymorphicSubroutine(MatCreateBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,const PetscInt nnz[],const PetscInt onz[],Mat *A),(PETSC_COMM_WORLD,bs,m,n,M,N,0,nnz,0,onz,A)) 294 PetscPolymorphicSubroutine(MatCreateBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,Mat *A),(PETSC_COMM_WORLD,bs,m,n,M,N,0,PETSC_NULL,0,PETSC_NULL,A)) 295 extern PetscErrorCode MatCreateMPIBAIJWithArrays(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt,const PetscInt[],const PetscInt[],const PetscScalar[],Mat*); 296 297 extern PetscErrorCode MatCreateMPIAdj(MPI_Comm,PetscInt,PetscInt,PetscInt[],PetscInt[],PetscInt[],Mat*); 298 extern PetscErrorCode MatCreateSeqSBAIJ(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,const PetscInt[],Mat*); 299 PetscPolymorphicFunction(MatCreateSeqSBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt nz,const PetscInt nnz[]),(PETSC_COMM_SELF,bs,m,n,nz,nnz,&A),Mat,A) 300 PetscPolymorphicFunction(MatCreateSeqSBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt nz),(PETSC_COMM_SELF,bs,m,n,nz,PETSC_NULL,&A),Mat,A) 301 PetscPolymorphicFunction(MatCreateSeqSBAIJ,(PetscInt bs,PetscInt m,PetscInt n,const PetscInt nnz[]),(PETSC_COMM_SELF,bs,m,n,0,nnz,&A),Mat,A) 302 PetscPolymorphicFunction(MatCreateSeqSBAIJ,(PetscInt bs,PetscInt m,PetscInt n),(PETSC_COMM_SELF,bs,m,n,0,PETSC_NULL,&A),Mat,A) 303 PetscPolymorphicSubroutine(MatCreateSeqSBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt nz,Mat *A),(PETSC_COMM_SELF,bs,m,n,nz,PETSC_NULL,A)) 304 PetscPolymorphicSubroutine(MatCreateSeqSBAIJ,(PetscInt bs,PetscInt m,PetscInt n,const PetscInt nnz[],Mat *A),(PETSC_COMM_SELF,bs,m,n,0,nnz,A)) 305 PetscPolymorphicSubroutine(MatCreateSeqSBAIJ,(PetscInt bs,PetscInt m,PetscInt n,Mat *A),(PETSC_COMM_SELF,bs,m,n,0,PETSC_NULL,A)) 306 307 extern PetscErrorCode MatCreateSBAIJ(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt,const PetscInt[],PetscInt,const PetscInt[],Mat*); 308 PetscPolymorphicFunction(MatCreateSBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,const PetscInt nnz[],PetscInt onz,const PetscInt onnz[]),(comm,bs,m,n,M,N,nz,nnz,onz,onnz,&A),Mat,A) 309 PetscPolymorphicFunction(MatCreateSBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,PetscInt nnz),(comm,bs,m,n,M,N,nz,PETSC_NULL,nnz,PETSC_NULL,&A),Mat,A) 310 PetscPolymorphicFunction(MatCreateSBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,const PetscInt nnz[],const PetscInt onz[]),(comm,bs,m,n,M,N,0,nnz,0,onz,&A),Mat,A) 311 PetscPolymorphicFunction(MatCreateSBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N),(comm,bs,m,n,M,N,0,PETSC_NULL,0,PETSC_NULL,&A),Mat,A) 312 PetscPolymorphicSubroutine(MatCreateSBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,PetscInt nnz,Mat *A),(comm,bs,m,n,M,N,nz,PETSC_NULL,nnz,PETSC_NULL,A)) 313 PetscPolymorphicSubroutine(MatCreateSBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,const PetscInt nnz[],const PetscInt onz[],Mat *A),(comm,bs,m,n,M,N,0,nnz,0,onz,A)) 314 PetscPolymorphicSubroutine(MatCreateSBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,Mat *A),(comm,bs,m,n,M,N,0,PETSC_NULL,0,PETSC_NULL,A)) 315 PetscPolymorphicFunction(MatCreateSBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,const PetscInt nnz[],PetscInt onz,const PetscInt onnz[]),(PETSC_COMM_WORLD,bs,m,n,M,N,nz,nnz,onz,onnz,&A),Mat,A) 316 PetscPolymorphicFunction(MatCreateSBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,PetscInt nnz),(PETSC_COMM_WORLD,bs,m,n,M,N,nz,PETSC_NULL,nnz,PETSC_NULL,&A),Mat,A) 317 PetscPolymorphicFunction(MatCreateSBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,const PetscInt nnz[],const PetscInt onz[]),(PETSC_COMM_WORLD,bs,m,n,M,N,0,nnz,0,onz,&A),Mat,A) 318 PetscPolymorphicFunction(MatCreateSBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N),(PETSC_COMM_WORLD,bs,m,n,M,N,0,PETSC_NULL,0,PETSC_NULL,&A),Mat,A) 319 PetscPolymorphicSubroutine(MatCreateSBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,PetscInt nnz,Mat *A),(PETSC_COMM_WORLD,bs,m,n,M,N,nz,PETSC_NULL,nnz,PETSC_NULL,A)) 320 PetscPolymorphicSubroutine(MatCreateSBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,const PetscInt nnz[],const PetscInt onz[],Mat *A),(PETSC_COMM_WORLD,bs,m,n,M,N,0,nnz,0,onz,A)) 321 PetscPolymorphicSubroutine(MatCreateSBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,Mat *A),(PETSC_COMM_WORLD,bs,m,n,M,N,0,PETSC_NULL,0,PETSC_NULL,A)) 322 extern PetscErrorCode MatCreateMPISBAIJWithArrays(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt,const PetscInt[],const PetscInt[],const PetscScalar[],Mat *); 323 extern PetscErrorCode MatMPISBAIJSetPreallocationCSR(Mat,PetscInt,const PetscInt[],const PetscInt[],const PetscScalar[]); 324 extern PetscErrorCode MatXAIJSetPreallocation(Mat,PetscInt,const PetscInt*,const PetscInt*,const PetscInt*,const PetscInt*); 325 326 extern PetscErrorCode MatCreateShell(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,void *,Mat*); 327 PetscPolymorphicFunction(MatCreateShell,(MPI_Comm comm,PetscInt m,PetscInt n,PetscInt M,PetscInt N,void *ctx),(comm,m,n,M,N,ctx,&A),Mat,A) 328 PetscPolymorphicFunction(MatCreateShell,(PetscInt m,PetscInt n,PetscInt M,PetscInt N,void *ctx),(PETSC_COMM_WORLD,m,n,M,N,ctx,&A),Mat,A) 329 extern PetscErrorCode MatCreateAdic(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt,void (*)(void),Mat*); 330 extern PetscErrorCode MatCreateNormal(Mat,Mat*); 331 PetscPolymorphicFunction(MatCreateNormal,(Mat mat),(mat,&A),Mat,A) 332 extern PetscErrorCode MatCreateLRC(Mat,Mat,Mat,Mat*); 333 extern PetscErrorCode MatCreateIS(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,ISLocalToGlobalMapping,Mat*); 334 extern PetscErrorCode MatCreateSeqAIJCRL(MPI_Comm,PetscInt,PetscInt,PetscInt,const PetscInt[],Mat*); 335 extern PetscErrorCode MatCreateMPIAIJCRL(MPI_Comm,PetscInt,PetscInt,PetscInt,const PetscInt[],PetscInt,const PetscInt[],Mat*); 336 337 extern PetscErrorCode MatCreateSeqBSTRM(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,const PetscInt[],Mat*); 338 extern PetscErrorCode MatCreateMPIBSTRM(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt,const PetscInt[],PetscInt,const PetscInt[],Mat*); 339 extern PetscErrorCode MatCreateSeqSBSTRM(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,const PetscInt[],Mat*); 340 extern PetscErrorCode MatCreateMPISBSTRM(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt,const PetscInt[],PetscInt,const PetscInt[],Mat*); 341 342 extern PetscErrorCode MatCreateScatter(MPI_Comm,VecScatter,Mat*); 343 extern PetscErrorCode MatScatterSetVecScatter(Mat,VecScatter); 344 extern PetscErrorCode MatScatterGetVecScatter(Mat,VecScatter*); 345 extern PetscErrorCode MatCreateBlockMat(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt*,Mat*); 346 extern PetscErrorCode MatCompositeAddMat(Mat,Mat); 347 extern PetscErrorCode MatCompositeMerge(Mat); 348 extern PetscErrorCode MatCreateComposite(MPI_Comm,PetscInt,const Mat*,Mat*); 349 typedef enum {MAT_COMPOSITE_ADDITIVE,MAT_COMPOSITE_MULTIPLICATIVE} MatCompositeType; 350 extern PetscErrorCode MatCompositeSetType(Mat,MatCompositeType); 351 352 extern PetscErrorCode MatCreateFFT(MPI_Comm,PetscInt,const PetscInt[],const MatType,Mat*); 353 extern PetscErrorCode MatCreateSeqCUFFT(MPI_Comm,PetscInt,const PetscInt[],Mat*); 354 355 extern PetscErrorCode MatCreateTranspose(Mat,Mat*); 356 extern PetscErrorCode MatCreateSubMatrix(Mat,IS,IS,Mat*); 357 extern PetscErrorCode MatSubMatrixUpdate(Mat,Mat,IS,IS); 358 extern PetscErrorCode MatCreateLocalRef(Mat,IS,IS,Mat*); 359 360 extern PetscErrorCode MatPythonSetType(Mat,const char[]); 361 362 extern PetscErrorCode MatSetUp(Mat); 363 extern PetscErrorCode MatDestroy(Mat*); 364 365 extern PetscErrorCode MatConjugate(Mat); 366 extern PetscErrorCode MatRealPart(Mat); 367 extern PetscErrorCode MatImaginaryPart(Mat); 368 extern PetscErrorCode MatGetDiagonalBlock(Mat,Mat*); 369 extern PetscErrorCode MatGetTrace(Mat,PetscScalar*); 370 extern PetscErrorCode MatInvertBlockDiagonal(Mat,PetscScalar **); 371 372 /* ------------------------------------------------------------*/ 373 extern PetscErrorCode MatSetValues(Mat,PetscInt,const PetscInt[],PetscInt,const PetscInt[],const PetscScalar[],InsertMode); 374 extern PetscErrorCode MatSetValuesBlocked(Mat,PetscInt,const PetscInt[],PetscInt,const PetscInt[],const PetscScalar[],InsertMode); 375 extern PetscErrorCode MatSetValuesRow(Mat,PetscInt,const PetscScalar[]); 376 extern PetscErrorCode MatSetValuesRowLocal(Mat,PetscInt,const PetscScalar[]); 377 extern PetscErrorCode MatSetValuesBatch(Mat,PetscInt,PetscInt,PetscInt[],const PetscScalar[]); 378 379 /*S 380 MatStencil - Data structure (C struct) for storing information about a single row or 381 column of a matrix as indexed on an associated grid. 382 383 Fortran usage is different, see MatSetValuesStencil() for details. 384 385 Level: beginner 386 387 Concepts: matrix; linear operator 388 389 .seealso: MatSetValuesStencil(), MatSetStencil(), MatSetValuesBlockedStencil() 390 S*/ 391 typedef struct { 392 PetscInt k,j,i,c; 393 } MatStencil; 394 395 extern PetscErrorCode MatSetValuesStencil(Mat,PetscInt,const MatStencil[],PetscInt,const MatStencil[],const PetscScalar[],InsertMode); 396 extern PetscErrorCode MatSetValuesBlockedStencil(Mat,PetscInt,const MatStencil[],PetscInt,const MatStencil[],const PetscScalar[],InsertMode); 397 extern PetscErrorCode MatSetStencil(Mat,PetscInt,const PetscInt[],const PetscInt[],PetscInt); 398 399 extern PetscErrorCode MatSetColoring(Mat,ISColoring); 400 extern PetscErrorCode MatSetValuesAdic(Mat,void*); 401 extern PetscErrorCode MatSetValuesAdifor(Mat,PetscInt,void*); 402 403 /*E 404 MatAssemblyType - Indicates if the matrix is now to be used, or if you plan 405 to continue to add values to it 406 407 Level: beginner 408 409 .seealso: MatAssemblyBegin(), MatAssemblyEnd() 410 E*/ 411 typedef enum {MAT_FLUSH_ASSEMBLY=1,MAT_FINAL_ASSEMBLY=0} MatAssemblyType; 412 extern PetscErrorCode MatAssemblyBegin(Mat,MatAssemblyType); 413 extern PetscErrorCode MatAssemblyEnd(Mat,MatAssemblyType); 414 extern PetscErrorCode MatAssembled(Mat,PetscBool *); 415 416 417 418 /*E 419 MatOption - Options that may be set for a matrix and its behavior or storage 420 421 Level: beginner 422 423 Any additions/changes here MUST also be made in include/finclude/petscmat.h 424 425 .seealso: MatSetOption() 426 E*/ 427 typedef enum {MAT_ROW_ORIENTED,MAT_NEW_NONZERO_LOCATIONS, 428 MAT_SYMMETRIC, 429 MAT_STRUCTURALLY_SYMMETRIC, 430 MAT_NEW_DIAGONALS,MAT_IGNORE_OFF_PROC_ENTRIES, 431 MAT_NEW_NONZERO_LOCATION_ERR, 432 MAT_NEW_NONZERO_ALLOCATION_ERR,MAT_USE_HASH_TABLE, 433 MAT_KEEP_NONZERO_PATTERN,MAT_IGNORE_ZERO_ENTRIES, 434 MAT_USE_INODES, 435 MAT_HERMITIAN, 436 MAT_SYMMETRY_ETERNAL, 437 MAT_CHECK_COMPRESSED_ROW, 438 MAT_IGNORE_LOWER_TRIANGULAR,MAT_ERROR_LOWER_TRIANGULAR, 439 MAT_GETROW_UPPERTRIANGULAR,MAT_UNUSED_NONZERO_LOCATION_ERR, 440 MAT_SPD,MAT_NO_OFF_PROC_ENTRIES,MAT_NO_OFF_PROC_ZERO_ROWS, 441 NUM_MAT_OPTIONS} MatOption; 442 extern const char *MatOptions[]; 443 extern PetscErrorCode MatSetOption(Mat,MatOption,PetscBool ); 444 extern PetscErrorCode MatGetType(Mat,const MatType*); 445 PetscPolymorphicFunction(MatGetType,(Mat mat),(mat,&t),const MatType,t) 446 447 extern PetscErrorCode MatGetValues(Mat,PetscInt,const PetscInt[],PetscInt,const PetscInt[],PetscScalar[]); 448 extern PetscErrorCode MatGetRow(Mat,PetscInt,PetscInt *,const PetscInt *[],const PetscScalar*[]); 449 extern PetscErrorCode MatRestoreRow(Mat,PetscInt,PetscInt *,const PetscInt *[],const PetscScalar*[]); 450 extern PetscErrorCode MatGetRowUpperTriangular(Mat); 451 extern PetscErrorCode MatRestoreRowUpperTriangular(Mat); 452 extern PetscErrorCode MatGetColumn(Mat,PetscInt,PetscInt *,const PetscInt *[],const PetscScalar*[]); 453 extern PetscErrorCode MatRestoreColumn(Mat,PetscInt,PetscInt *,const PetscInt *[],const PetscScalar*[]); 454 extern PetscErrorCode MatGetColumnVector(Mat,Vec,PetscInt); 455 extern PetscErrorCode MatGetArray(Mat,PetscScalar *[]); 456 PetscPolymorphicFunction(MatGetArray,(Mat mat),(mat,&a),PetscScalar*,a) 457 extern PetscErrorCode MatRestoreArray(Mat,PetscScalar *[]); 458 extern PetscErrorCode MatGetBlockSize(Mat,PetscInt *); 459 PetscPolymorphicFunction(MatGetBlockSize,(Mat mat),(mat,&a),PetscInt,a) 460 extern PetscErrorCode MatSetBlockSize(Mat,PetscInt); 461 462 463 extern PetscErrorCode MatMult(Mat,Vec,Vec); 464 extern PetscErrorCode MatMultDiagonalBlock(Mat,Vec,Vec); 465 extern PetscErrorCode MatMultAdd(Mat,Vec,Vec,Vec); 466 PetscPolymorphicSubroutine(MatMultAdd,(Mat A,Vec x,Vec y),(A,x,y,y)) 467 extern PetscErrorCode MatMultTranspose(Mat,Vec,Vec); 468 extern PetscErrorCode MatMultHermitianTranspose(Mat,Vec,Vec); 469 extern PetscErrorCode MatIsTranspose(Mat,Mat,PetscReal,PetscBool *); 470 PetscPolymorphicFunction(MatIsTranspose,(Mat A,Mat B,PetscReal tol),(A,B,tol,&t),PetscBool ,t) 471 PetscPolymorphicFunction(MatIsTranspose,(Mat A,Mat B),(A,B,0,&t),PetscBool ,t) 472 extern PetscErrorCode MatIsHermitianTranspose(Mat,Mat,PetscReal,PetscBool *); 473 extern PetscErrorCode MatMultTransposeAdd(Mat,Vec,Vec,Vec); 474 extern PetscErrorCode MatMultHermitianTransposeAdd(Mat,Vec,Vec,Vec); 475 PetscPolymorphicSubroutine(MatMultTransposeAdd,(Mat A,Vec x,Vec y),(A,x,y,y)) 476 extern PetscErrorCode MatMultConstrained(Mat,Vec,Vec); 477 extern PetscErrorCode MatMultTransposeConstrained(Mat,Vec,Vec); 478 extern PetscErrorCode MatMatSolve(Mat,Mat,Mat); 479 480 /*E 481 MatDuplicateOption - Indicates if a duplicated sparse matrix should have 482 its numerical values copied over or just its nonzero structure. 483 484 Level: beginner 485 486 Any additions/changes here MUST also be made in include/finclude/petscmat.h 487 488 $ MAT_SHARE_NONZERO_PATTERN - the i and j arrays in the new matrix will be shared with the original matrix 489 $ this also triggers the MAT_DO_NOT_COPY_VALUES option. This is used when you 490 $ have several matrices with the same nonzero pattern. 491 492 .seealso: MatDuplicate() 493 E*/ 494 typedef enum {MAT_DO_NOT_COPY_VALUES,MAT_COPY_VALUES,MAT_SHARE_NONZERO_PATTERN} MatDuplicateOption; 495 496 extern PetscErrorCode MatConvert(Mat,const MatType,MatReuse,Mat*); 497 PetscPolymorphicFunction(MatConvert,(Mat A,const MatType t),(A,t,MAT_INITIAL_MATRIX,&a),Mat,a) 498 extern PetscErrorCode MatDuplicate(Mat,MatDuplicateOption,Mat*); 499 PetscPolymorphicFunction(MatDuplicate,(Mat A,MatDuplicateOption o),(A,o,&a),Mat,a) 500 PetscPolymorphicFunction(MatDuplicate,(Mat A),(A,MAT_COPY_VALUES,&a),Mat,a) 501 502 503 extern PetscErrorCode MatCopy(Mat,Mat,MatStructure); 504 extern PetscErrorCode MatView(Mat,PetscViewer); 505 extern PetscErrorCode MatIsSymmetric(Mat,PetscReal,PetscBool *); 506 PetscPolymorphicFunction(MatIsSymmetric,(Mat A,PetscReal tol),(A,tol,&t),PetscBool ,t) 507 PetscPolymorphicFunction(MatIsSymmetric,(Mat A),(A,0,&t),PetscBool ,t) 508 extern PetscErrorCode MatIsStructurallySymmetric(Mat,PetscBool *); 509 PetscPolymorphicFunction(MatIsStructurallySymmetric,(Mat A),(A,&t),PetscBool ,t) 510 extern PetscErrorCode MatIsHermitian(Mat,PetscReal,PetscBool *); 511 PetscPolymorphicFunction(MatIsHermitian,(Mat A),(A,0,&t),PetscBool ,t) 512 extern PetscErrorCode MatIsSymmetricKnown(Mat,PetscBool *,PetscBool *); 513 extern PetscErrorCode MatIsHermitianKnown(Mat,PetscBool *,PetscBool *); 514 extern PetscErrorCode MatMissingDiagonal(Mat,PetscBool *,PetscInt *); 515 extern PetscErrorCode MatLoad(Mat, PetscViewer); 516 517 extern PetscErrorCode MatGetRowIJ(Mat,PetscInt,PetscBool ,PetscBool ,PetscInt*,PetscInt *[],PetscInt *[],PetscBool *); 518 extern PetscErrorCode MatRestoreRowIJ(Mat,PetscInt,PetscBool ,PetscBool ,PetscInt *,PetscInt *[],PetscInt *[],PetscBool *); 519 extern PetscErrorCode MatGetColumnIJ(Mat,PetscInt,PetscBool ,PetscBool ,PetscInt*,PetscInt *[],PetscInt *[],PetscBool *); 520 extern PetscErrorCode MatRestoreColumnIJ(Mat,PetscInt,PetscBool ,PetscBool ,PetscInt *,PetscInt *[],PetscInt *[],PetscBool *); 521 522 /*S 523 MatInfo - Context of matrix information, used with MatGetInfo() 524 525 In Fortran this is simply a double precision array of dimension MAT_INFO_SIZE 526 527 Level: intermediate 528 529 Concepts: matrix^nonzero information 530 531 .seealso: MatGetInfo(), MatInfoType 532 S*/ 533 typedef struct { 534 PetscLogDouble block_size; /* block size */ 535 PetscLogDouble nz_allocated,nz_used,nz_unneeded; /* number of nonzeros */ 536 PetscLogDouble memory; /* memory allocated */ 537 PetscLogDouble assemblies; /* number of matrix assemblies called */ 538 PetscLogDouble mallocs; /* number of mallocs during MatSetValues() */ 539 PetscLogDouble fill_ratio_given,fill_ratio_needed; /* fill ratio for LU/ILU */ 540 PetscLogDouble factor_mallocs; /* number of mallocs during factorization */ 541 } MatInfo; 542 543 /*E 544 MatInfoType - Indicates if you want information about the local part of the matrix, 545 the entire parallel matrix or the maximum over all the local parts. 546 547 Level: beginner 548 549 Any additions/changes here MUST also be made in include/finclude/petscmat.h 550 551 .seealso: MatGetInfo(), MatInfo 552 E*/ 553 typedef enum {MAT_LOCAL=1,MAT_GLOBAL_MAX=2,MAT_GLOBAL_SUM=3} MatInfoType; 554 extern PetscErrorCode MatGetInfo(Mat,MatInfoType,MatInfo*); 555 extern PetscErrorCode MatGetDiagonal(Mat,Vec); 556 extern PetscErrorCode MatGetRowMax(Mat,Vec,PetscInt[]); 557 extern PetscErrorCode MatGetRowMin(Mat,Vec,PetscInt[]); 558 extern PetscErrorCode MatGetRowMaxAbs(Mat,Vec,PetscInt[]); 559 extern PetscErrorCode MatGetRowMinAbs(Mat,Vec,PetscInt[]); 560 extern PetscErrorCode MatGetRowSum(Mat,Vec); 561 extern PetscErrorCode MatTranspose(Mat,MatReuse,Mat*); 562 PetscPolymorphicFunction(MatTranspose,(Mat A),(A,MAT_INITIAL_MATRIX,&t),Mat,t) 563 extern PetscErrorCode MatHermitianTranspose(Mat,MatReuse,Mat*); 564 extern PetscErrorCode MatPermute(Mat,IS,IS,Mat *); 565 PetscPolymorphicFunction(MatPermute,(Mat A,IS is1,IS is2),(A,is1,is2,&t),Mat,t) 566 extern PetscErrorCode MatDiagonalScale(Mat,Vec,Vec); 567 extern PetscErrorCode MatDiagonalSet(Mat,Vec,InsertMode); 568 extern PetscErrorCode MatEqual(Mat,Mat,PetscBool *); 569 PetscPolymorphicFunction(MatEqual,(Mat A,Mat B),(A,B,&t),PetscBool ,t) 570 extern PetscErrorCode MatMultEqual(Mat,Mat,PetscInt,PetscBool *); 571 extern PetscErrorCode MatMultAddEqual(Mat,Mat,PetscInt,PetscBool *); 572 extern PetscErrorCode MatMultTransposeEqual(Mat,Mat,PetscInt,PetscBool *); 573 extern PetscErrorCode MatMultTransposeAddEqual(Mat,Mat,PetscInt,PetscBool *); 574 575 extern PetscErrorCode MatNorm(Mat,NormType,PetscReal *); 576 PetscPolymorphicFunction(MatNorm,(Mat A,NormType t),(A,t,&n),PetscReal,n) 577 extern PetscErrorCode MatGetColumnNorms(Mat,NormType,PetscReal *); 578 extern PetscErrorCode MatZeroEntries(Mat); 579 extern PetscErrorCode MatZeroRows(Mat,PetscInt,const PetscInt [],PetscScalar,Vec,Vec); 580 extern PetscErrorCode MatZeroRowsIS(Mat,IS,PetscScalar,Vec,Vec); 581 extern PetscErrorCode MatZeroRowsStencil(Mat,PetscInt,const MatStencil [],PetscScalar,Vec,Vec); 582 extern PetscErrorCode MatZeroRowsColumnsStencil(Mat,PetscInt,const MatStencil[],PetscScalar,Vec,Vec); 583 extern PetscErrorCode MatZeroRowsColumns(Mat,PetscInt,const PetscInt [],PetscScalar,Vec,Vec); 584 extern PetscErrorCode MatZeroRowsColumnsIS(Mat,IS,PetscScalar,Vec,Vec); 585 586 extern PetscErrorCode MatUseScaledForm(Mat,PetscBool ); 587 extern PetscErrorCode MatScaleSystem(Mat,Vec,Vec); 588 extern PetscErrorCode MatUnScaleSystem(Mat,Vec,Vec); 589 590 extern PetscErrorCode MatGetSize(Mat,PetscInt*,PetscInt*); 591 extern PetscErrorCode MatGetLocalSize(Mat,PetscInt*,PetscInt*); 592 extern PetscErrorCode MatGetOwnershipRange(Mat,PetscInt*,PetscInt*); 593 extern PetscErrorCode MatGetOwnershipRanges(Mat,const PetscInt**); 594 extern PetscErrorCode MatGetOwnershipRangeColumn(Mat,PetscInt*,PetscInt*); 595 extern PetscErrorCode MatGetOwnershipRangesColumn(Mat,const PetscInt**); 596 597 extern PetscErrorCode MatGetSubMatrices(Mat,PetscInt,const IS[],const IS[],MatReuse,Mat *[]); 598 extern PetscErrorCode MatGetSubMatricesParallel(Mat,PetscInt,const IS[],const IS[],MatReuse,Mat *[]); 599 extern PetscErrorCode MatDestroyMatrices(PetscInt,Mat *[]); 600 extern PetscErrorCode MatGetSubMatrix(Mat,IS,IS,MatReuse,Mat *); 601 extern PetscErrorCode MatGetLocalSubMatrix(Mat,IS,IS,Mat*); 602 extern PetscErrorCode MatRestoreLocalSubMatrix(Mat,IS,IS,Mat*); 603 extern PetscErrorCode MatGetSeqNonzeroStructure(Mat,Mat*); 604 extern PetscErrorCode MatDestroySeqNonzeroStructure(Mat*); 605 606 extern PetscErrorCode MatMerge(MPI_Comm,Mat,PetscInt,MatReuse,Mat*); 607 extern PetscErrorCode MatMergeSymbolic(MPI_Comm,Mat,PetscInt,Mat*); 608 extern PetscErrorCode MatMergeNumeric(MPI_Comm,Mat,PetscInt,Mat); 609 extern PetscErrorCode MatMerge_SeqsToMPI(MPI_Comm,Mat,PetscInt,PetscInt,MatReuse,Mat*); 610 extern PetscErrorCode MatMerge_SeqsToMPISymbolic(MPI_Comm,Mat,PetscInt,PetscInt,Mat*); 611 extern PetscErrorCode MatMerge_SeqsToMPINumeric(Mat,Mat); 612 extern PetscErrorCode MatMPIAIJGetLocalMat(Mat,MatReuse,Mat*); 613 extern PetscErrorCode MatMPIAIJGetLocalMatCondensed(Mat,MatReuse,IS*,IS*,Mat*); 614 extern PetscErrorCode MatGetBrowsOfAcols(Mat,Mat,MatReuse,IS*,IS*,Mat*); 615 #if defined (PETSC_USE_CTABLE) 616 extern PetscErrorCode MatGetCommunicationStructs(Mat, Vec *, PetscTable *, VecScatter *); 617 #else 618 extern PetscErrorCode MatGetCommunicationStructs(Mat, Vec *, PetscInt *[], VecScatter *); 619 #endif 620 extern PetscErrorCode MatGetGhosts(Mat, PetscInt *,const PetscInt *[]); 621 622 extern PetscErrorCode MatIncreaseOverlap(Mat,PetscInt,IS[],PetscInt); 623 624 extern PetscErrorCode MatMatMult(Mat,Mat,MatReuse,PetscReal,Mat*); 625 extern PetscErrorCode MatMatMultSymbolic(Mat,Mat,PetscReal,Mat*); 626 extern PetscErrorCode MatMatMultNumeric(Mat,Mat,Mat); 627 628 extern PetscErrorCode MatPtAP(Mat,Mat,MatReuse,PetscReal,Mat*); 629 extern PetscErrorCode MatPtAPSymbolic(Mat,Mat,PetscReal,Mat*); 630 extern PetscErrorCode MatPtAPNumeric(Mat,Mat,Mat); 631 extern PetscErrorCode MatRARt(Mat,Mat,MatReuse,PetscReal,Mat*); 632 extern PetscErrorCode MatRARtSymbolic(Mat,Mat,PetscReal,Mat*); 633 extern PetscErrorCode MatRARtNumeric(Mat,Mat,Mat); 634 635 extern PetscErrorCode MatTransposeMatMult(Mat,Mat,MatReuse,PetscReal,Mat*); 636 extern PetscErrorCode MatTransposetMatMultSymbolic(Mat,Mat,PetscReal,Mat*); 637 extern PetscErrorCode MatTransposetMatMultNumeric(Mat,Mat,Mat); 638 extern PetscErrorCode MatMatTransposeMult(Mat,Mat,MatReuse,PetscReal,Mat*); 639 extern PetscErrorCode MatMatTransposeMultSymbolic(Mat,Mat,PetscReal,Mat*); 640 extern PetscErrorCode MatMatTransposeMultNumeric(Mat,Mat,Mat); 641 642 extern PetscErrorCode MatAXPY(Mat,PetscScalar,Mat,MatStructure); 643 extern PetscErrorCode MatAYPX(Mat,PetscScalar,Mat,MatStructure); 644 645 extern PetscErrorCode MatScale(Mat,PetscScalar); 646 extern PetscErrorCode MatShift(Mat,PetscScalar); 647 648 extern PetscErrorCode MatSetLocalToGlobalMapping(Mat,ISLocalToGlobalMapping,ISLocalToGlobalMapping); 649 extern PetscErrorCode MatSetLocalToGlobalMappingBlock(Mat,ISLocalToGlobalMapping,ISLocalToGlobalMapping); 650 extern PetscErrorCode MatGetLocalToGlobalMapping(Mat,ISLocalToGlobalMapping*,ISLocalToGlobalMapping*); 651 extern PetscErrorCode MatGetLocalToGlobalMappingBlock(Mat,ISLocalToGlobalMapping*,ISLocalToGlobalMapping*); 652 extern PetscErrorCode MatZeroRowsLocal(Mat,PetscInt,const PetscInt [],PetscScalar,Vec,Vec); 653 extern PetscErrorCode MatZeroRowsLocalIS(Mat,IS,PetscScalar,Vec,Vec); 654 extern PetscErrorCode MatZeroRowsColumnsLocal(Mat,PetscInt,const PetscInt [],PetscScalar,Vec,Vec); 655 extern PetscErrorCode MatZeroRowsColumnsLocalIS(Mat,IS,PetscScalar,Vec,Vec); 656 extern PetscErrorCode MatSetValuesLocal(Mat,PetscInt,const PetscInt[],PetscInt,const PetscInt[],const PetscScalar[],InsertMode); 657 extern PetscErrorCode MatSetValuesBlockedLocal(Mat,PetscInt,const PetscInt[],PetscInt,const PetscInt[],const PetscScalar[],InsertMode); 658 659 extern PetscErrorCode MatStashSetInitialSize(Mat,PetscInt,PetscInt); 660 extern PetscErrorCode MatStashGetInfo(Mat,PetscInt*,PetscInt*,PetscInt*,PetscInt*); 661 662 extern PetscErrorCode MatInterpolate(Mat,Vec,Vec); 663 extern PetscErrorCode MatInterpolateAdd(Mat,Vec,Vec,Vec); 664 PetscPolymorphicSubroutine(MatInterpolateAdd,(Mat A,Vec x,Vec y),(A,x,y,y)) 665 extern PetscErrorCode MatRestrict(Mat,Vec,Vec); 666 extern PetscErrorCode MatGetVecs(Mat,Vec*,Vec*); 667 extern PetscErrorCode MatGetRedundantMatrix(Mat,PetscInt,MPI_Comm,PetscInt,MatReuse,Mat*); 668 extern PetscErrorCode MatGetMultiProcBlock(Mat,MPI_Comm,MatReuse,Mat*); 669 extern PetscErrorCode MatFindZeroDiagonals(Mat,IS*); 670 671 /*MC 672 MatSetValue - Set a single entry into a matrix. 673 674 Not collective 675 676 Input Parameters: 677 + m - the matrix 678 . row - the row location of the entry 679 . col - the column location of the entry 680 . value - the value to insert 681 - mode - either INSERT_VALUES or ADD_VALUES 682 683 Notes: 684 For efficiency one should use MatSetValues() and set several or many 685 values simultaneously if possible. 686 687 Level: beginner 688 689 .seealso: MatSetValues(), MatSetValueLocal() 690 M*/ 691 PETSC_STATIC_INLINE PetscErrorCode MatSetValue(Mat v,PetscInt i,PetscInt j,PetscScalar va,InsertMode mode) {return MatSetValues(v,1,&i,1,&j,&va,mode);} 692 693 PETSC_STATIC_INLINE PetscErrorCode MatGetValue(Mat v,PetscInt i,PetscInt j,PetscScalar *va) {return MatGetValues(v,1,&i,1,&j,va);} 694 695 PETSC_STATIC_INLINE PetscErrorCode MatSetValueLocal(Mat v,PetscInt i,PetscInt j,PetscScalar va,InsertMode mode) {return MatSetValuesLocal(v,1,&i,1,&j,&va,mode);} 696 697 /*MC 698 MatPreallocateInitialize - Begins the block of code that will count the number of nonzeros per 699 row in a matrix providing the data that one can use to correctly preallocate the matrix. 700 701 Synopsis: 702 PetscErrorCode MatPreallocateInitialize(MPI_Comm comm, PetscInt nrows, PetscInt ncols, PetscInt *dnz, PetscInt *onz) 703 704 Collective on MPI_Comm 705 706 Input Parameters: 707 + comm - the communicator that will share the eventually allocated matrix 708 . nrows - the number of LOCAL rows in the matrix 709 - ncols - the number of LOCAL columns in the matrix 710 711 Output Parameters: 712 + dnz - the array that will be passed to the matrix preallocation routines 713 - ozn - the other array passed to the matrix preallocation routines 714 715 716 Level: intermediate 717 718 Notes: 719 See the <A href="../../docs/manual.pdf#nameddest=ch_performance">Hints for Performance Improvment</A> chapter in the users manual for more details. 720 721 Do not malloc or free dnz and onz, that is handled internally by these routines 722 723 Use MatPreallocateInitializeSymmetric() for symmetric matrices (MPISBAIJ matrices) 724 725 This is a MACRO not a function because it has a leading { that is closed by PetscPreallocateFinalize(). 726 727 Concepts: preallocation^Matrix 728 729 .seealso: MatPreallocateFinalize(), MatPreallocateSet(), MatPreallocateSymmetricSet(), MatPreallocateSetLocal(), 730 MatPreallocateInitializeSymmetric(), MatPreallocateSymmetricSetLocal() 731 M*/ 732 #define MatPreallocateInitialize(comm,nrows,ncols,dnz,onz) 0; \ 733 { \ 734 PetscErrorCode _4_ierr; PetscInt __nrows = (nrows),__ctmp = (ncols),__rstart,__start,__end; \ 735 _4_ierr = PetscMalloc2(__nrows,PetscInt,&dnz,__nrows,PetscInt,&onz);CHKERRQ(_4_ierr); \ 736 _4_ierr = PetscMemzero(dnz,__nrows*sizeof(PetscInt));CHKERRQ(_4_ierr);\ 737 _4_ierr = PetscMemzero(onz,__nrows*sizeof(PetscInt));CHKERRQ(_4_ierr);\ 738 _4_ierr = MPI_Scan(&__ctmp,&__end,1,MPIU_INT,MPI_SUM,comm);CHKERRQ(_4_ierr); __start = __end - __ctmp;\ 739 _4_ierr = MPI_Scan(&__nrows,&__rstart,1,MPIU_INT,MPI_SUM,comm);CHKERRQ(_4_ierr); __rstart = __rstart - __nrows; 740 741 /*MC 742 MatPreallocateSymmetricInitialize - Begins the block of code that will count the number of nonzeros per 743 row in a matrix providing the data that one can use to correctly preallocate the matrix. 744 745 Synopsis: 746 PetscErrorCode MatPreallocateSymmetricInitialize(MPI_Comm comm, PetscInt nrows, PetscInt ncols, PetscInt *dnz, PetscInt *onz) 747 748 Collective on MPI_Comm 749 750 Input Parameters: 751 + comm - the communicator that will share the eventually allocated matrix 752 . nrows - the number of LOCAL rows in the matrix 753 - ncols - the number of LOCAL columns in the matrix 754 755 Output Parameters: 756 + dnz - the array that will be passed to the matrix preallocation routines 757 - ozn - the other array passed to the matrix preallocation routines 758 759 760 Level: intermediate 761 762 Notes: 763 See the <A href="../../docs/manual.pdf#nameddest=ch_performance">Hints for Performance Improvment</A> chapter in the users manual for more details. 764 765 Do not malloc or free dnz and onz, that is handled internally by these routines 766 767 This is a MACRO not a function because it has a leading { that is closed by PetscPreallocateFinalize(). 768 769 Concepts: preallocation^Matrix 770 771 .seealso: MatPreallocateFinalize(), MatPreallocateSet(), MatPreallocateSymmetricSet(), MatPreallocateSetLocal(), 772 MatPreallocateInitialize(), MatPreallocateSymmetricSetLocal() 773 M*/ 774 #define MatPreallocateSymmetricInitialize(comm,nrows,ncols,dnz,onz) 0; \ 775 { \ 776 PetscErrorCode _4_ierr; PetscInt __nrows = (nrows),__ctmp = (ncols),__rstart,__end; \ 777 _4_ierr = PetscMalloc2(__nrows,PetscInt,&dnz,__nrows,PetscInt,&onz);CHKERRQ(_4_ierr); \ 778 _4_ierr = PetscMemzero(dnz,__nrows*sizeof(PetscInt));CHKERRQ(_4_ierr);\ 779 _4_ierr = PetscMemzero(onz,__nrows*sizeof(PetscInt));CHKERRQ(_4_ierr);\ 780 _4_ierr = MPI_Scan(&__ctmp,&__end,1,MPIU_INT,MPI_SUM,comm);CHKERRQ(_4_ierr);\ 781 _4_ierr = MPI_Scan(&__nrows,&__rstart,1,MPIU_INT,MPI_SUM,comm);CHKERRQ(_4_ierr); __rstart = __rstart - __nrows; 782 783 /*MC 784 MatPreallocateSetLocal - Indicates the locations (rows and columns) in the matrix where nonzeros will be 785 inserted using a local number of the rows and columns 786 787 Synopsis: 788 PetscErrorCode MatPreallocateSetLocal(ISLocalToGlobalMappping map,PetscInt nrows, PetscInt *rows,PetscInt ncols, PetscInt *cols,PetscInt *dnz, PetscInt *onz) 789 790 Not Collective 791 792 Input Parameters: 793 + map - the row mapping from local numbering to global numbering 794 . nrows - the number of rows indicated 795 . rows - the indices of the rows 796 . cmap - the column mapping from local to global numbering 797 . ncols - the number of columns in the matrix 798 . cols - the columns indicated 799 . dnz - the array that will be passed to the matrix preallocation routines 800 - ozn - the other array passed to the matrix preallocation routines 801 802 803 Level: intermediate 804 805 Notes: 806 See the <A href="../../docs/manual.pdf#nameddest=ch_performance">Hints for Performance Improvment</A> chapter in the users manual for more details. 807 808 Do not malloc or free dnz and onz, that is handled internally by these routines 809 810 Concepts: preallocation^Matrix 811 812 .seealso: MatPreallocateFinalize(), MatPreallocateSet(), MatPreallocateSymmetricSet(), MatPreallocateInitialize(), 813 MatPreallocateInitialize(), MatPreallocateSymmetricSetLocal() 814 M*/ 815 #define MatPreallocateSetLocal(rmap,nrows,rows,cmap,ncols,cols,dnz,onz) 0; \ 816 {\ 817 PetscInt __l;\ 818 _4_ierr = ISLocalToGlobalMappingApply(rmap,nrows,rows,rows);CHKERRQ(_4_ierr);\ 819 _4_ierr = ISLocalToGlobalMappingApply(cmap,ncols,cols,cols);CHKERRQ(_4_ierr);\ 820 for (__l=0;__l<nrows;__l++) {\ 821 _4_ierr = MatPreallocateSet((rows)[__l],ncols,cols,dnz,onz);CHKERRQ(_4_ierr);\ 822 }\ 823 } 824 825 /*MC 826 MatPreallocateSymmetricSetLocal - Indicates the locations (rows and columns) in the matrix where nonzeros will be 827 inserted using a local number of the rows and columns 828 829 Synopsis: 830 PetscErrorCode MatPreallocateSymmetricSetLocal(ISLocalToGlobalMappping map,PetscInt nrows, PetscInt *rows,PetscInt ncols, PetscInt *cols,PetscInt *dnz, PetscInt *onz) 831 832 Not Collective 833 834 Input Parameters: 835 + map - the mapping between local numbering and global numbering 836 . nrows - the number of rows indicated 837 . rows - the indices of the rows 838 . ncols - the number of columns in the matrix 839 . cols - the columns indicated 840 . dnz - the array that will be passed to the matrix preallocation routines 841 - ozn - the other array passed to the matrix preallocation routines 842 843 844 Level: intermediate 845 846 Notes: 847 See the <A href="../../docs/manual.pdf#nameddest=ch_performance">Hints for Performance Improvment</A> chapter in the users manual for more details. 848 849 Do not malloc or free dnz and onz that is handled internally by these routines 850 851 Concepts: preallocation^Matrix 852 853 .seealso: MatPreallocateFinalize(), MatPreallocateSet(), MatPreallocateSymmetricSet(), MatPreallocateInitialize(), 854 MatPreallocateInitialize(), MatPreallocateSymmetricSetLocal(), MatPreallocateSetLocal() 855 M*/ 856 #define MatPreallocateSymmetricSetLocal(map,nrows,rows,ncols,cols,dnz,onz) 0;\ 857 {\ 858 PetscInt __l;\ 859 _4_ierr = ISLocalToGlobalMappingApply(map,nrows,rows,rows);CHKERRQ(_4_ierr);\ 860 _4_ierr = ISLocalToGlobalMappingApply(map,ncols,cols,cols);CHKERRQ(_4_ierr);\ 861 for (__l=0;__l<nrows;__l++) {\ 862 _4_ierr = MatPreallocateSymmetricSet((rows)[__l],ncols,cols,dnz,onz);CHKERRQ(_4_ierr);\ 863 }\ 864 } 865 866 /*MC 867 MatPreallocateSet - Indicates the locations (rows and columns) in the matrix where nonzeros will be 868 inserted using a local number of the rows and columns 869 870 Synopsis: 871 PetscErrorCode MatPreallocateSet(PetscInt nrows, PetscInt *rows,PetscInt ncols, PetscInt *cols,PetscInt *dnz, PetscInt *onz) 872 873 Not Collective 874 875 Input Parameters: 876 + row - the row 877 . ncols - the number of columns in the matrix 878 - cols - the columns indicated 879 880 Output Parameters: 881 + dnz - the array that will be passed to the matrix preallocation routines 882 - ozn - the other array passed to the matrix preallocation routines 883 884 885 Level: intermediate 886 887 Notes: 888 See the <A href="../../docs/manual.pdf#nameddest=ch_performance">Hints for Performance Improvment</A> chapter in the users manual for more details. 889 890 Do not malloc or free dnz and onz that is handled internally by these routines 891 892 This is a MACRO not a function because it uses variables declared in MatPreallocateInitialize(). 893 894 Concepts: preallocation^Matrix 895 896 .seealso: MatPreallocateFinalize(), MatPreallocateSet(), MatPreallocateSymmetricSet(), MatPreallocateInitialize(), 897 MatPreallocateInitialize(), MatPreallocateSymmetricSetLocal(), MatPreallocateSetLocal() 898 M*/ 899 #define MatPreallocateSet(row,nc,cols,dnz,onz) 0;\ 900 { PetscInt __i; \ 901 if (row < __rstart) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Trying to set preallocation for row %D less than first local row %D",row,__rstart);\ 902 if (row >= __rstart+__nrows) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Trying to set preallocation for row %D greater than last local row %D",row,__rstart+__nrows-1);\ 903 for (__i=0; __i<nc; __i++) {\ 904 if ((cols)[__i] < __start || (cols)[__i] >= __end) onz[row - __rstart]++; \ 905 else dnz[row - __rstart]++;\ 906 }\ 907 } 908 909 /*MC 910 MatPreallocateSymmetricSet - Indicates the locations (rows and columns) in the matrix where nonzeros will be 911 inserted using a local number of the rows and columns 912 913 Synopsis: 914 PetscErrorCode MatPreallocateSymmetricSet(PetscInt nrows, PetscInt *rows,PetscInt ncols, PetscInt *cols,PetscInt *dnz, PetscInt *onz) 915 916 Not Collective 917 918 Input Parameters: 919 + nrows - the number of rows indicated 920 . rows - the indices of the rows 921 . ncols - the number of columns in the matrix 922 . cols - the columns indicated 923 . dnz - the array that will be passed to the matrix preallocation routines 924 - ozn - the other array passed to the matrix preallocation routines 925 926 927 Level: intermediate 928 929 Notes: 930 See the <A href="../../docs/manual.pdf#nameddest=ch_performance">Hints for Performance Improvment</A> chapter in the users manual for more details. 931 932 Do not malloc or free dnz and onz that is handled internally by these routines 933 934 This is a MACRO not a function because it uses variables declared in MatPreallocateInitialize(). 935 936 Concepts: preallocation^Matrix 937 938 .seealso: MatPreallocateFinalize(), MatPreallocateSet(), MatPreallocateSymmetricSet(), MatPreallocateInitialize(), 939 MatPreallocateInitialize(), MatPreallocateSymmetricSetLocal(), MatPreallocateSetLocal() 940 M*/ 941 #define MatPreallocateSymmetricSet(row,nc,cols,dnz,onz) 0;\ 942 { PetscInt __i; \ 943 for (__i=0; __i<nc; __i++) {\ 944 if (cols[__i] >= __end) onz[row - __rstart]++; \ 945 else if (cols[__i] >= row) dnz[row - __rstart]++;\ 946 }\ 947 } 948 949 /*MC 950 MatPreallocateLocation - An alternative to MatPreallocationSet() that puts the nonzero locations into the matrix if it exists 951 952 Synopsis: 953 PetscErrorCode MatPreallocateLocations(Mat A,PetscInt row,PetscInt ncols,PetscInt *cols,PetscInt *dnz,PetscInt *onz) 954 955 Not Collective 956 957 Input Parameters: 958 . A - matrix 959 . row - row where values exist (must be local to this process) 960 . ncols - number of columns 961 . cols - columns with nonzeros 962 . dnz - the array that will be passed to the matrix preallocation routines 963 - ozn - the other array passed to the matrix preallocation routines 964 965 966 Level: intermediate 967 968 Notes: 969 See the <A href="../../docs/manual.pdf#nameddest=ch_performance">Hints for Performance Improvment</A> chapter in the users manual for more details. 970 971 Do not malloc or free dnz and onz that is handled internally by these routines 972 973 This is a MACRO not a function because it uses a bunch of variables private to the MatPreallocation.... routines. 974 975 Concepts: preallocation^Matrix 976 977 .seealso: MatPreallocateInitialize(), MatPreallocateSet(), MatPreallocateSymmetricSet(), MatPreallocateSetLocal(), 978 MatPreallocateSymmetricInitialize(), MatPreallocateSymmetricSetLocal() 979 M*/ 980 #define MatPreallocateLocation(A,row,ncols,cols,dnz,onz) 0;if (A) {ierr = MatSetValues(A,1,&row,ncols,cols,PETSC_NULL,INSERT_VALUES);CHKERRQ(ierr);} else {ierr = MatPreallocateSet(row,ncols,cols,dnz,onz);CHKERRQ(ierr);} 981 982 983 /*MC 984 MatPreallocateFinalize - Ends the block of code that will count the number of nonzeros per 985 row in a matrix providing the data that one can use to correctly preallocate the matrix. 986 987 Synopsis: 988 PetscErrorCode MatPreallocateFinalize(PetscInt *dnz, PetscInt *onz) 989 990 Collective on MPI_Comm 991 992 Input Parameters: 993 + dnz - the array that was be passed to the matrix preallocation routines 994 - ozn - the other array passed to the matrix preallocation routines 995 996 997 Level: intermediate 998 999 Notes: 1000 See the <A href="../../docs/manual.pdf#nameddest=ch_performance">Hints for Performance Improvment</A> chapter in the users manual for more details. 1001 1002 Do not malloc or free dnz and onz that is handled internally by these routines 1003 1004 This is a MACRO not a function because it closes the { started in MatPreallocateInitialize(). 1005 1006 Concepts: preallocation^Matrix 1007 1008 .seealso: MatPreallocateInitialize(), MatPreallocateSet(), MatPreallocateSymmetricSet(), MatPreallocateSetLocal(), 1009 MatPreallocateSymmetricInitialize(), MatPreallocateSymmetricSetLocal() 1010 M*/ 1011 #define MatPreallocateFinalize(dnz,onz) 0;_4_ierr = PetscFree2(dnz,onz);CHKERRQ(_4_ierr);} 1012 1013 1014 1015 /* Routines unique to particular data structures */ 1016 extern PetscErrorCode MatShellGetContext(Mat,void *); 1017 PetscPolymorphicFunction(MatShellGetContext,(Mat A),(A,&t),void*,t) 1018 1019 extern PetscErrorCode MatInodeAdjustForInodes(Mat,IS*,IS*); 1020 extern PetscErrorCode MatInodeGetInodeSizes(Mat,PetscInt *,PetscInt *[],PetscInt *); 1021 1022 extern PetscErrorCode MatSeqAIJSetColumnIndices(Mat,PetscInt[]); 1023 extern PetscErrorCode MatSeqBAIJSetColumnIndices(Mat,PetscInt[]); 1024 extern PetscErrorCode MatCreateSeqAIJWithArrays(MPI_Comm,PetscInt,PetscInt,PetscInt[],PetscInt[],PetscScalar[],Mat*); 1025 extern PetscErrorCode MatCreateSeqBAIJWithArrays(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt[],PetscInt[],PetscScalar[],Mat*); 1026 extern PetscErrorCode MatCreateSeqSBAIJWithArrays(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt[],PetscInt[],PetscScalar[],Mat*); 1027 extern PetscErrorCode MatCreateSeqAIJFromTriple(MPI_Comm,PetscInt,PetscInt,PetscInt[],PetscInt[],PetscScalar[],Mat*,PetscInt,PetscBool); 1028 1029 #define MAT_SKIP_ALLOCATION -4 1030 1031 extern PetscErrorCode MatSeqBAIJSetPreallocation(Mat,PetscInt,PetscInt,const PetscInt[]); 1032 PetscPolymorphicSubroutine(MatSeqBAIJSetPreallocation,(Mat A,PetscInt bs,const PetscInt nnz[]),(A,bs,0,nnz)) 1033 extern PetscErrorCode MatSeqSBAIJSetPreallocation(Mat,PetscInt,PetscInt,const PetscInt[]); 1034 PetscPolymorphicSubroutine(MatSeqSBAIJSetPreallocation,(Mat A,PetscInt bs,const PetscInt nnz[]),(A,bs,0,nnz)) 1035 extern PetscErrorCode MatSeqAIJSetPreallocation(Mat,PetscInt,const PetscInt[]); 1036 PetscPolymorphicSubroutine(MatSeqAIJSetPreallocation,(Mat A,const PetscInt nnz[]),(A,0,nnz)) 1037 1038 extern PetscErrorCode MatMPIBAIJSetPreallocation(Mat,PetscInt,PetscInt,const PetscInt[],PetscInt,const PetscInt[]); 1039 PetscPolymorphicSubroutine(MatMPIBAIJSetPreallocation,(Mat A,PetscInt bs,const PetscInt nnz[],const PetscInt onz[]),(A,bs,0,nnz,0,onz)) 1040 extern PetscErrorCode MatMPISBAIJSetPreallocation(Mat,PetscInt,PetscInt,const PetscInt[],PetscInt,const PetscInt[]); 1041 extern PetscErrorCode MatMPIAIJSetPreallocation(Mat,PetscInt,const PetscInt[],PetscInt,const PetscInt[]); 1042 extern PetscErrorCode MatSeqAIJSetPreallocationCSR(Mat,const PetscInt [],const PetscInt [],const PetscScalar []); 1043 extern PetscErrorCode MatSeqBAIJSetPreallocationCSR(Mat,PetscInt,const PetscInt[],const PetscInt[],const PetscScalar[]); 1044 extern PetscErrorCode MatMPIAIJSetPreallocationCSR(Mat,const PetscInt[],const PetscInt[],const PetscScalar[]); 1045 extern PetscErrorCode MatMPIBAIJSetPreallocationCSR(Mat,PetscInt,const PetscInt[],const PetscInt[],const PetscScalar[]); 1046 extern PetscErrorCode MatMPIAdjSetPreallocation(Mat,PetscInt[],PetscInt[],PetscInt[]); 1047 extern PetscErrorCode MatMPIDenseSetPreallocation(Mat,PetscScalar[]); 1048 extern PetscErrorCode MatSeqDenseSetPreallocation(Mat,PetscScalar[]); 1049 extern PetscErrorCode MatMPIAIJGetSeqAIJ(Mat,Mat*,Mat*,PetscInt*[]); 1050 extern PetscErrorCode MatMPIBAIJGetSeqBAIJ(Mat,Mat*,Mat*,PetscInt*[]); 1051 extern PetscErrorCode MatAdicSetLocalFunction(Mat,void (*)(void)); 1052 extern PetscErrorCode MatMPIAdjCreateNonemptySubcommMat(Mat,Mat*); 1053 1054 extern PetscErrorCode MatSeqDenseSetLDA(Mat,PetscInt); 1055 extern PetscErrorCode MatDenseGetLocalMatrix(Mat,Mat*); 1056 1057 extern PetscErrorCode MatStoreValues(Mat); 1058 extern PetscErrorCode MatRetrieveValues(Mat); 1059 1060 extern PetscErrorCode MatDAADSetCtx(Mat,void*); 1061 1062 extern PetscErrorCode MatFindNonzeroRows(Mat,IS*); 1063 /* 1064 These routines are not usually accessed directly, rather solving is 1065 done through the KSP and PC interfaces. 1066 */ 1067 1068 /*J 1069 MatOrderingType - String with the name of a PETSc matrix ordering or the creation function 1070 with an optional dynamic library name, for example 1071 http://www.mcs.anl.gov/petsc/lib.a:orderingcreate() 1072 1073 Level: beginner 1074 1075 Cannot use const because the PC objects manipulate the string 1076 1077 .seealso: MatGetOrdering() 1078 J*/ 1079 #define MatOrderingType char* 1080 #define MATORDERINGNATURAL "natural" 1081 #define MATORDERINGND "nd" 1082 #define MATORDERING1WD "1wd" 1083 #define MATORDERINGRCM "rcm" 1084 #define MATORDERINGQMD "qmd" 1085 #define MATORDERINGROWLENGTH "rowlength" 1086 #define MATORDERINGAMD "amd" /* only works if UMFPACK is installed with PETSc */ 1087 1088 extern PetscErrorCode MatGetOrdering(Mat,const MatOrderingType,IS*,IS*); 1089 extern PetscErrorCode MatGetOrderingList(PetscFList *list); 1090 extern PetscErrorCode MatOrderingRegister(const char[],const char[],const char[],PetscErrorCode(*)(Mat,const MatOrderingType,IS*,IS*)); 1091 1092 /*MC 1093 MatOrderingRegisterDynamic - Adds a new sparse matrix ordering to the matrix package. 1094 1095 Synopsis: 1096 PetscErrorCode MatOrderingRegisterDynamic(const char *name_ordering,const char *path,const char *name_create,PetscErrorCode (*routine_create)(MatOrdering)) 1097 1098 Not Collective 1099 1100 Input Parameters: 1101 + sname - name of ordering (for example MATORDERINGND) 1102 . path - location of library where creation routine is 1103 . name - name of function that creates the ordering type,a string 1104 - function - function pointer that creates the ordering 1105 1106 Level: developer 1107 1108 If dynamic libraries are used, then the fourth input argument (function) 1109 is ignored. 1110 1111 Sample usage: 1112 .vb 1113 MatOrderingRegisterDynamic("my_order",/home/username/my_lib/lib/libO/solaris/mylib.a, 1114 "MyOrder",MyOrder); 1115 .ve 1116 1117 Then, your partitioner can be chosen with the procedural interface via 1118 $ MatOrderingSetType(part,"my_order) 1119 or at runtime via the option 1120 $ -pc_factor_mat_ordering_type my_order 1121 1122 ${PETSC_ARCH} occuring in pathname will be replaced with appropriate values. 1123 1124 .keywords: matrix, ordering, register 1125 1126 .seealso: MatOrderingRegisterDestroy(), MatOrderingRegisterAll() 1127 M*/ 1128 #if defined(PETSC_USE_DYNAMIC_LIBRARIES) 1129 #define MatOrderingRegisterDynamic(a,b,c,d) MatOrderingRegister(a,b,c,0) 1130 #else 1131 #define MatOrderingRegisterDynamic(a,b,c,d) MatOrderingRegister(a,b,c,d) 1132 #endif 1133 1134 extern PetscErrorCode MatOrderingRegisterDestroy(void); 1135 extern PetscErrorCode MatOrderingRegisterAll(const char[]); 1136 extern PetscBool MatOrderingRegisterAllCalled; 1137 extern PetscFList MatOrderingList; 1138 1139 extern PetscErrorCode MatReorderForNonzeroDiagonal(Mat,PetscReal,IS,IS); 1140 1141 /*S 1142 MatFactorShiftType - Numeric Shift. 1143 1144 Level: beginner 1145 1146 S*/ 1147 typedef enum {MAT_SHIFT_NONE,MAT_SHIFT_NONZERO,MAT_SHIFT_POSITIVE_DEFINITE,MAT_SHIFT_INBLOCKS} MatFactorShiftType; 1148 extern const char *MatFactorShiftTypes[]; 1149 1150 /*S 1151 MatFactorInfo - Data passed into the matrix factorization routines 1152 1153 In Fortran these are simply double precision arrays of size MAT_FACTORINFO_SIZE, that is use 1154 $ MatFactorInfo info(MAT_FACTORINFO_SIZE) 1155 1156 Notes: These are not usually directly used by users, instead use PC type of LU, ILU, CHOLESKY or ICC. 1157 1158 You can use MatFactorInfoInitialize() to set default values. 1159 1160 Level: developer 1161 1162 .seealso: MatLUFactorSymbolic(), MatILUFactorSymbolic(), MatCholeskyFactorSymbolic(), MatICCFactorSymbolic(), MatICCFactor(), 1163 MatFactorInfoInitialize() 1164 1165 S*/ 1166 typedef struct { 1167 PetscReal diagonal_fill; /* force diagonal to fill in if initially not filled */ 1168 PetscReal usedt; 1169 PetscReal dt; /* drop tolerance */ 1170 PetscReal dtcol; /* tolerance for pivoting */ 1171 PetscReal dtcount; /* maximum nonzeros to be allowed per row */ 1172 PetscReal fill; /* expected fill, nonzeros in factored matrix/nonzeros in original matrix */ 1173 PetscReal levels; /* ICC/ILU(levels) */ 1174 PetscReal pivotinblocks; /* for BAIJ and SBAIJ matrices pivot in factorization on blocks, default 1.0 1175 factorization may be faster if do not pivot */ 1176 PetscReal zeropivot; /* pivot is called zero if less than this */ 1177 PetscReal shifttype; /* type of shift added to matrix factor to prevent zero pivots */ 1178 PetscReal shiftamount; /* how large the shift is */ 1179 } MatFactorInfo; 1180 1181 extern PetscErrorCode MatFactorInfoInitialize(MatFactorInfo*); 1182 extern PetscErrorCode MatCholeskyFactor(Mat,IS,const MatFactorInfo*); 1183 extern PetscErrorCode MatCholeskyFactorSymbolic(Mat,Mat,IS,const MatFactorInfo*); 1184 extern PetscErrorCode MatCholeskyFactorNumeric(Mat,Mat,const MatFactorInfo*); 1185 extern PetscErrorCode MatLUFactor(Mat,IS,IS,const MatFactorInfo*); 1186 extern PetscErrorCode MatILUFactor(Mat,IS,IS,const MatFactorInfo*); 1187 extern PetscErrorCode MatLUFactorSymbolic(Mat,Mat,IS,IS,const MatFactorInfo*); 1188 extern PetscErrorCode MatILUFactorSymbolic(Mat,Mat,IS,IS,const MatFactorInfo*); 1189 extern PetscErrorCode MatICCFactorSymbolic(Mat,Mat,IS,const MatFactorInfo*); 1190 extern PetscErrorCode MatICCFactor(Mat,IS,const MatFactorInfo*); 1191 extern PetscErrorCode MatLUFactorNumeric(Mat,Mat,const MatFactorInfo*); 1192 extern PetscErrorCode MatGetInertia(Mat,PetscInt*,PetscInt*,PetscInt*); 1193 extern PetscErrorCode MatSolve(Mat,Vec,Vec); 1194 extern PetscErrorCode MatForwardSolve(Mat,Vec,Vec); 1195 extern PetscErrorCode MatBackwardSolve(Mat,Vec,Vec); 1196 extern PetscErrorCode MatSolveAdd(Mat,Vec,Vec,Vec); 1197 extern PetscErrorCode MatSolveTranspose(Mat,Vec,Vec); 1198 extern PetscErrorCode MatSolveTransposeAdd(Mat,Vec,Vec,Vec); 1199 extern PetscErrorCode MatSolves(Mat,Vecs,Vecs); 1200 1201 extern PetscErrorCode MatSetUnfactored(Mat); 1202 1203 /*E 1204 MatSORType - What type of (S)SOR to perform 1205 1206 Level: beginner 1207 1208 May be bitwise ORd together 1209 1210 Any additions/changes here MUST also be made in include/finclude/petscmat.h 1211 1212 MatSORType may be bitwise ORd together, so do not change the numbers 1213 1214 .seealso: MatSOR() 1215 E*/ 1216 typedef enum {SOR_FORWARD_SWEEP=1,SOR_BACKWARD_SWEEP=2,SOR_SYMMETRIC_SWEEP=3, 1217 SOR_LOCAL_FORWARD_SWEEP=4,SOR_LOCAL_BACKWARD_SWEEP=8, 1218 SOR_LOCAL_SYMMETRIC_SWEEP=12,SOR_ZERO_INITIAL_GUESS=16, 1219 SOR_EISENSTAT=32,SOR_APPLY_UPPER=64,SOR_APPLY_LOWER=128} MatSORType; 1220 extern PetscErrorCode MatSOR(Mat,Vec,PetscReal,MatSORType,PetscReal,PetscInt,PetscInt,Vec); 1221 1222 /* 1223 These routines are for efficiently computing Jacobians via finite differences. 1224 */ 1225 1226 /*J 1227 MatColoringType - String with the name of a PETSc matrix coloring or the creation function 1228 with an optional dynamic library name, for example 1229 http://www.mcs.anl.gov/petsc/lib.a:coloringcreate() 1230 1231 Level: beginner 1232 1233 .seealso: MatGetColoring() 1234 J*/ 1235 #define MatColoringType char* 1236 #define MATCOLORINGNATURAL "natural" 1237 #define MATCOLORINGSL "sl" 1238 #define MATCOLORINGLF "lf" 1239 #define MATCOLORINGID "id" 1240 1241 extern PetscErrorCode MatGetColoring(Mat,const MatColoringType,ISColoring*); 1242 extern PetscErrorCode MatColoringRegister(const char[],const char[],const char[],PetscErrorCode(*)(Mat,MatColoringType,ISColoring *)); 1243 1244 /*MC 1245 MatColoringRegisterDynamic - Adds a new sparse matrix coloring to the 1246 matrix package. 1247 1248 Synopsis: 1249 PetscErrorCode MatColoringRegisterDynamic(const char *name_coloring,const char *path,const char *name_create,PetscErrorCode (*routine_create)(MatColoring)) 1250 1251 Not Collective 1252 1253 Input Parameters: 1254 + sname - name of Coloring (for example MATCOLORINGSL) 1255 . path - location of library where creation routine is 1256 . name - name of function that creates the Coloring type, a string 1257 - function - function pointer that creates the coloring 1258 1259 Level: developer 1260 1261 If dynamic libraries are used, then the fourth input argument (function) 1262 is ignored. 1263 1264 Sample usage: 1265 .vb 1266 MatColoringRegisterDynamic("my_color",/home/username/my_lib/lib/libO/solaris/mylib.a, 1267 "MyColor",MyColor); 1268 .ve 1269 1270 Then, your partitioner can be chosen with the procedural interface via 1271 $ MatColoringSetType(part,"my_color") 1272 or at runtime via the option 1273 $ -mat_coloring_type my_color 1274 1275 $PETSC_ARCH occuring in pathname will be replaced with appropriate values. 1276 1277 .keywords: matrix, Coloring, register 1278 1279 .seealso: MatColoringRegisterDestroy(), MatColoringRegisterAll() 1280 M*/ 1281 #if defined(PETSC_USE_DYNAMIC_LIBRARIES) 1282 #define MatColoringRegisterDynamic(a,b,c,d) MatColoringRegister(a,b,c,0) 1283 #else 1284 #define MatColoringRegisterDynamic(a,b,c,d) MatColoringRegister(a,b,c,d) 1285 #endif 1286 1287 extern PetscBool MatColoringRegisterAllCalled; 1288 1289 extern PetscErrorCode MatColoringRegisterAll(const char[]); 1290 extern PetscErrorCode MatColoringRegisterDestroy(void); 1291 extern PetscErrorCode MatColoringPatch(Mat,PetscInt,PetscInt,ISColoringValue[],ISColoring*); 1292 1293 /*S 1294 MatFDColoring - Object for computing a sparse Jacobian via finite differences 1295 and coloring 1296 1297 Level: beginner 1298 1299 Concepts: coloring, sparse Jacobian, finite differences 1300 1301 .seealso: MatFDColoringCreate() 1302 S*/ 1303 typedef struct _p_MatFDColoring* MatFDColoring; 1304 1305 extern PetscErrorCode MatFDColoringCreate(Mat,ISColoring,MatFDColoring *); 1306 extern PetscErrorCode MatFDColoringDestroy(MatFDColoring*); 1307 extern PetscErrorCode MatFDColoringView(MatFDColoring,PetscViewer); 1308 extern PetscErrorCode MatFDColoringSetFunction(MatFDColoring,PetscErrorCode (*)(void),void*); 1309 extern PetscErrorCode MatFDColoringGetFunction(MatFDColoring,PetscErrorCode (**)(void),void**); 1310 extern PetscErrorCode MatFDColoringSetParameters(MatFDColoring,PetscReal,PetscReal); 1311 extern PetscErrorCode MatFDColoringSetFromOptions(MatFDColoring); 1312 extern PetscErrorCode MatFDColoringApply(Mat,MatFDColoring,Vec,MatStructure*,void *); 1313 extern PetscErrorCode MatFDColoringSetF(MatFDColoring,Vec); 1314 extern PetscErrorCode MatFDColoringGetPerturbedColumns(MatFDColoring,PetscInt*,PetscInt*[]); 1315 1316 /*S 1317 MatTransposeColoring - Object for computing a sparse matrix product C=A*B^T via coloring 1318 1319 Level: beginner 1320 1321 Concepts: coloring, sparse matrix product 1322 1323 .seealso: MatTransposeColoringCreate() 1324 S*/ 1325 typedef struct _p_MatTransposeColoring* MatTransposeColoring; 1326 1327 extern PetscErrorCode MatTransposeColoringCreate(Mat,ISColoring,MatTransposeColoring *); 1328 extern PetscErrorCode MatTransColoringApplySpToDen(MatTransposeColoring,Mat,Mat); 1329 extern PetscErrorCode MatTransColoringApplyDenToSp(MatTransposeColoring,Mat,Mat); 1330 extern PetscErrorCode MatTransposeColoringDestroy(MatTransposeColoring*); 1331 1332 /* 1333 These routines are for partitioning matrices: currently used only 1334 for adjacency matrix, MatCreateMPIAdj(). 1335 */ 1336 1337 /*S 1338 MatPartitioning - Object for managing the partitioning of a matrix or graph 1339 1340 Level: beginner 1341 1342 Concepts: partitioning 1343 1344 .seealso: MatPartitioningCreate(), MatPartitioningType 1345 S*/ 1346 typedef struct _p_MatPartitioning* MatPartitioning; 1347 1348 /*J 1349 MatPartitioningType - String with the name of a PETSc matrix partitioning or the creation function 1350 with an optional dynamic library name, for example 1351 http://www.mcs.anl.gov/petsc/lib.a:partitioningcreate() 1352 1353 Level: beginner 1354 dm 1355 .seealso: MatPartitioningCreate(), MatPartitioning 1356 J*/ 1357 #define MatPartitioningType char* 1358 #define MATPARTITIONINGCURRENT "current" 1359 #define MATPARTITIONINGSQUARE "square" 1360 #define MATPARTITIONINGPARMETIS "parmetis" 1361 #define MATPARTITIONINGCHACO "chaco" 1362 #define MATPARTITIONINGPARTY "party" 1363 #define MATPARTITIONINGPTSCOTCH "ptscotch" 1364 1365 1366 extern PetscErrorCode MatPartitioningCreate(MPI_Comm,MatPartitioning*); 1367 extern PetscErrorCode MatPartitioningSetType(MatPartitioning,const MatPartitioningType); 1368 extern PetscErrorCode MatPartitioningSetNParts(MatPartitioning,PetscInt); 1369 extern PetscErrorCode MatPartitioningSetAdjacency(MatPartitioning,Mat); 1370 extern PetscErrorCode MatPartitioningSetVertexWeights(MatPartitioning,const PetscInt[]); 1371 extern PetscErrorCode MatPartitioningSetPartitionWeights(MatPartitioning,const PetscReal []); 1372 extern PetscErrorCode MatPartitioningApply(MatPartitioning,IS*); 1373 extern PetscErrorCode MatPartitioningDestroy(MatPartitioning*); 1374 1375 extern PetscErrorCode MatPartitioningRegister(const char[],const char[],const char[],PetscErrorCode (*)(MatPartitioning)); 1376 1377 /*MC 1378 MatPartitioningRegisterDynamic - Adds a new sparse matrix partitioning to the 1379 matrix package. 1380 1381 Synopsis: 1382 PetscErrorCode MatPartitioningRegisterDynamic(const char *name_partitioning,const char *path,const char *name_create,PetscErrorCode (*routine_create)(MatPartitioning)) 1383 1384 Not Collective 1385 1386 Input Parameters: 1387 + sname - name of partitioning (for example MATPARTITIONINGCURRENT) or parmetis 1388 . path - location of library where creation routine is 1389 . name - name of function that creates the partitioning type, a string 1390 - function - function pointer that creates the partitioning type 1391 1392 Level: developer 1393 1394 If dynamic libraries are used, then the fourth input argument (function) 1395 is ignored. 1396 1397 Sample usage: 1398 .vb 1399 MatPartitioningRegisterDynamic("my_part",/home/username/my_lib/lib/libO/solaris/mylib.a, 1400 "MyPartCreate",MyPartCreate); 1401 .ve 1402 1403 Then, your partitioner can be chosen with the procedural interface via 1404 $ MatPartitioningSetType(part,"my_part") 1405 or at runtime via the option 1406 $ -mat_partitioning_type my_part 1407 1408 $PETSC_ARCH occuring in pathname will be replaced with appropriate values. 1409 1410 .keywords: matrix, partitioning, register 1411 1412 .seealso: MatPartitioningRegisterDestroy(), MatPartitioningRegisterAll() 1413 M*/ 1414 #if defined(PETSC_USE_DYNAMIC_LIBRARIES) 1415 #define MatPartitioningRegisterDynamic(a,b,c,d) MatPartitioningRegister(a,b,c,0) 1416 #else 1417 #define MatPartitioningRegisterDynamic(a,b,c,d) MatPartitioningRegister(a,b,c,d) 1418 #endif 1419 1420 extern PetscBool MatPartitioningRegisterAllCalled; 1421 1422 extern PetscErrorCode MatPartitioningRegisterAll(const char[]); 1423 extern PetscErrorCode MatPartitioningRegisterDestroy(void); 1424 1425 extern PetscErrorCode MatPartitioningView(MatPartitioning,PetscViewer); 1426 extern PetscErrorCode MatPartitioningSetFromOptions(MatPartitioning); 1427 extern PetscErrorCode MatPartitioningGetType(MatPartitioning,const MatPartitioningType*); 1428 1429 extern PetscErrorCode MatPartitioningParmetisSetCoarseSequential(MatPartitioning); 1430 extern PetscErrorCode MatPartitioningParmetisGetEdgeCut(MatPartitioning, PetscInt *); 1431 1432 typedef enum { MP_CHACO_MULTILEVEL=1,MP_CHACO_SPECTRAL=2,MP_CHACO_LINEAR=4,MP_CHACO_RANDOM=5,MP_CHACO_SCATTERED=6 } MPChacoGlobalType; 1433 extern const char *MPChacoGlobalTypes[]; 1434 typedef enum { MP_CHACO_KERNIGHAN=1,MP_CHACO_NONE=2 } MPChacoLocalType; 1435 extern const char *MPChacoLocalTypes[]; 1436 typedef enum { MP_CHACO_LANCZOS=0,MP_CHACO_RQI=1 } MPChacoEigenType; 1437 extern const char *MPChacoEigenTypes[]; 1438 1439 extern PetscErrorCode MatPartitioningChacoSetGlobal(MatPartitioning,MPChacoGlobalType); 1440 extern PetscErrorCode MatPartitioningChacoGetGlobal(MatPartitioning,MPChacoGlobalType*); 1441 extern PetscErrorCode MatPartitioningChacoSetLocal(MatPartitioning,MPChacoLocalType); 1442 extern PetscErrorCode MatPartitioningChacoGetLocal(MatPartitioning,MPChacoLocalType*); 1443 extern PetscErrorCode MatPartitioningChacoSetCoarseLevel(MatPartitioning,PetscReal); 1444 extern PetscErrorCode MatPartitioningChacoSetEigenSolver(MatPartitioning,MPChacoEigenType); 1445 extern PetscErrorCode MatPartitioningChacoGetEigenSolver(MatPartitioning,MPChacoEigenType*); 1446 extern PetscErrorCode MatPartitioningChacoSetEigenTol(MatPartitioning,PetscReal); 1447 extern PetscErrorCode MatPartitioningChacoGetEigenTol(MatPartitioning,PetscReal*); 1448 extern PetscErrorCode MatPartitioningChacoSetEigenNumber(MatPartitioning,PetscInt); 1449 extern PetscErrorCode MatPartitioningChacoGetEigenNumber(MatPartitioning,PetscInt*); 1450 1451 #define MP_PARTY_OPT "opt" 1452 #define MP_PARTY_LIN "lin" 1453 #define MP_PARTY_SCA "sca" 1454 #define MP_PARTY_RAN "ran" 1455 #define MP_PARTY_GBF "gbf" 1456 #define MP_PARTY_GCF "gcf" 1457 #define MP_PARTY_BUB "bub" 1458 #define MP_PARTY_DEF "def" 1459 extern PetscErrorCode MatPartitioningPartySetGlobal(MatPartitioning,const char*); 1460 #define MP_PARTY_HELPFUL_SETS "hs" 1461 #define MP_PARTY_KERNIGHAN_LIN "kl" 1462 #define MP_PARTY_NONE "no" 1463 extern PetscErrorCode MatPartitioningPartySetLocal(MatPartitioning,const char*); 1464 extern PetscErrorCode MatPartitioningPartySetCoarseLevel(MatPartitioning,PetscReal); 1465 extern PetscErrorCode MatPartitioningPartySetBipart(MatPartitioning,PetscBool); 1466 extern PetscErrorCode MatPartitioningPartySetMatchOptimization(MatPartitioning,PetscBool); 1467 1468 typedef enum { MP_PTSCOTCH_QUALITY,MP_PTSCOTCH_SPEED,MP_PTSCOTCH_BALANCE,MP_PTSCOTCH_SAFETY,MP_PTSCOTCH_SCALABILITY } MPPTScotchStrategyType; 1469 extern const char *MPPTScotchStrategyTypes[]; 1470 1471 extern PetscErrorCode MatPartitioningPTScotchSetImbalance(MatPartitioning,PetscReal); 1472 extern PetscErrorCode MatPartitioningPTScotchGetImbalance(MatPartitioning,PetscReal*); 1473 extern PetscErrorCode MatPartitioningPTScotchSetStrategy(MatPartitioning,MPPTScotchStrategyType); 1474 extern PetscErrorCode MatPartitioningPTScotchGetStrategy(MatPartitioning,MPPTScotchStrategyType*); 1475 1476 /* 1477 These routines are for coarsening matrices: 1478 */ 1479 1480 /*S 1481 MatCoarsen - Object for managing the coarsening of a graph (symmetric matrix) 1482 1483 Level: beginner 1484 1485 Concepts: coarsen 1486 1487 .seealso: MatCoarsenCreate), MatCoarsenType 1488 S*/ 1489 typedef struct _p_MatCoarsen* MatCoarsen; 1490 1491 /*J 1492 MatCoarsenType - String with the name of a PETSc matrix coarsen or the creation function 1493 with an optional dynamic library name, for example 1494 http://www.mcs.anl.gov/petsc/lib.a:coarsencreate() 1495 1496 Level: beginner 1497 dm 1498 .seealso: MatCoarsenCreate(), MatCoarsen 1499 J*/ 1500 #define MatCoarsenType char* 1501 #define MATCOARSENMIS "mis" 1502 #define MATCOARSENHEM "hem" 1503 1504 extern PetscErrorCode MatCoarsenCreate(MPI_Comm,MatCoarsen*); 1505 extern PetscErrorCode MatCoarsenSetType(MatCoarsen,const MatCoarsenType); 1506 extern PetscErrorCode MatCoarsenSetAdjacency(MatCoarsen,Mat); 1507 extern PetscErrorCode MatCoarsenSetGreedyOrdering(MatCoarsen,const IS); 1508 extern PetscErrorCode MatCoarsenSetStrictAggs(MatCoarsen,PetscBool); 1509 extern PetscErrorCode MatCoarsenSetVerbose(MatCoarsen,PetscInt); 1510 extern PetscErrorCode MatCoarsenGetMISAggLists( MatCoarsen, IS*, IS* ); 1511 extern PetscErrorCode MatCoarsenApply(MatCoarsen); 1512 extern PetscErrorCode MatCoarsenDestroy(MatCoarsen*); 1513 1514 extern PetscErrorCode MatCoarsenRegister(const char[],const char[],const char[],PetscErrorCode (*)(MatCoarsen)); 1515 1516 /*MC 1517 MatCoarsenRegisterDynamic - Adds a new sparse matrix coarsen to the 1518 matrix package. 1519 1520 Synopsis: 1521 PetscErrorCode MatCoarsenRegisterDynamic(const char *name_coarsen,const char *path,const char *name_create,PetscErrorCode (*routine_create)(MatCoarsen)) 1522 1523 Not Collective 1524 1525 Input Parameters: 1526 + sname - name of coarsen (for example MATCOARSENMIS) 1527 . path - location of library where creation routine is 1528 . name - name of function that creates the coarsen type, a string 1529 - function - function pointer that creates the coarsen type 1530 1531 Level: developer 1532 1533 If dynamic libraries are used, then the fourth input argument (function) 1534 is ignored. 1535 1536 Sample usage: 1537 .vb 1538 MatCoarsenRegisterDynamic("my_agg",/home/username/my_lib/lib/libO/solaris/mylib.a, 1539 "MyAggCreate",MyAggCreate); 1540 .ve 1541 1542 Then, your aggregator can be chosen with the procedural interface via 1543 $ MatCoarsenSetType(agg,"my_agg") 1544 or at runtime via the option 1545 $ -mat_coarsen_type my_agg 1546 1547 $PETSC_ARCH occuring in pathname will be replaced with appropriate values. 1548 1549 .keywords: matrix, coarsen, register 1550 1551 .seealso: MatCoarsenRegisterDestroy(), MatCoarsenRegisterAll() 1552 M*/ 1553 #if defined(PETSC_USE_DYNAMIC_LIBRARIES) 1554 #define MatCoarsenRegisterDynamic(a,b,c,d) MatCoarsenRegister(a,b,c,0) 1555 #else 1556 #define MatCoarsenRegisterDynamic(a,b,c,d) MatCoarsenRegister(a,b,c,d) 1557 #endif 1558 1559 extern PetscBool MatCoarsenRegisterAllCalled; 1560 1561 extern PetscErrorCode MatCoarsenRegisterAll(const char[]); 1562 extern PetscErrorCode MatCoarsenRegisterDestroy(void); 1563 1564 extern PetscErrorCode MatCoarsenView(MatCoarsen,PetscViewer); 1565 extern PetscErrorCode MatCoarsenSetFromOptions(MatCoarsen); 1566 extern PetscErrorCode MatCoarsenGetType(MatCoarsen,const MatCoarsenType*); 1567 1568 1569 extern PetscErrorCode MatMeshToVertexGraph(Mat,PetscInt,Mat*); 1570 extern PetscErrorCode MatMeshToCellGraph(Mat,PetscInt,Mat*); 1571 1572 /* 1573 If you add entries here you must also add them to finclude/petscmat.h 1574 */ 1575 typedef enum { MATOP_SET_VALUES=0, 1576 MATOP_GET_ROW=1, 1577 MATOP_RESTORE_ROW=2, 1578 MATOP_MULT=3, 1579 MATOP_MULT_ADD=4, 1580 MATOP_MULT_TRANSPOSE=5, 1581 MATOP_MULT_TRANSPOSE_ADD=6, 1582 MATOP_SOLVE=7, 1583 MATOP_SOLVE_ADD=8, 1584 MATOP_SOLVE_TRANSPOSE=9, 1585 MATOP_SOLVE_TRANSPOSE_ADD=10, 1586 MATOP_LUFACTOR=11, 1587 MATOP_CHOLESKYFACTOR=12, 1588 MATOP_SOR=13, 1589 MATOP_TRANSPOSE=14, 1590 MATOP_GETINFO=15, 1591 MATOP_EQUAL=16, 1592 MATOP_GET_DIAGONAL=17, 1593 MATOP_DIAGONAL_SCALE=18, 1594 MATOP_NORM=19, 1595 MATOP_ASSEMBLY_BEGIN=20, 1596 MATOP_ASSEMBLY_END=21, 1597 MATOP_SET_OPTION=22, 1598 MATOP_ZERO_ENTRIES=23, 1599 MATOP_ZERO_ROWS=24, 1600 MATOP_LUFACTOR_SYMBOLIC=25, 1601 MATOP_LUFACTOR_NUMERIC=26, 1602 MATOP_CHOLESKY_FACTOR_SYMBOLIC=27, 1603 MATOP_CHOLESKY_FACTOR_NUMERIC=28, 1604 MATOP_SETUP_PREALLOCATION=29, 1605 MATOP_ILUFACTOR_SYMBOLIC=30, 1606 MATOP_ICCFACTOR_SYMBOLIC=31, 1607 MATOP_GET_ARRAY=32, 1608 MATOP_RESTORE_ARRAY=33, 1609 MATOP_DUPLICATE=34, 1610 MATOP_FORWARD_SOLVE=35, 1611 MATOP_BACKWARD_SOLVE=36, 1612 MATOP_ILUFACTOR=37, 1613 MATOP_ICCFACTOR=38, 1614 MATOP_AXPY=39, 1615 MATOP_GET_SUBMATRICES=40, 1616 MATOP_INCREASE_OVERLAP=41, 1617 MATOP_GET_VALUES=42, 1618 MATOP_COPY=43, 1619 MATOP_GET_ROW_MAX=44, 1620 MATOP_SCALE=45, 1621 MATOP_SHIFT=46, 1622 MATOP_DIAGONAL_SET=47, 1623 MATOP_ILUDT_FACTOR=48, 1624 MATOP_SET_BLOCK_SIZE=49, 1625 MATOP_GET_ROW_IJ=50, 1626 MATOP_RESTORE_ROW_IJ=51, 1627 MATOP_GET_COLUMN_IJ=52, 1628 MATOP_RESTORE_COLUMN_IJ=53, 1629 MATOP_FDCOLORING_CREATE=54, 1630 MATOP_COLORING_PATCH=55, 1631 MATOP_SET_UNFACTORED=56, 1632 MATOP_PERMUTE=57, 1633 MATOP_SET_VALUES_BLOCKED=58, 1634 MATOP_GET_SUBMATRIX=59, 1635 MATOP_DESTROY=60, 1636 MATOP_VIEW=61, 1637 MATOP_CONVERT_FROM=62, 1638 MATOP_USE_SCALED_FORM=63, 1639 MATOP_SCALE_SYSTEM=64, 1640 MATOP_UNSCALE_SYSTEM=65, 1641 MATOP_SET_LOCAL_TO_GLOBAL_MAP=66, 1642 MATOP_SET_VALUES_LOCAL=67, 1643 MATOP_ZERO_ROWS_LOCAL=68, 1644 MATOP_GET_ROW_MAX_ABS=69, 1645 MATOP_GET_ROW_MIN_ABS=70, 1646 MATOP_CONVERT=71, 1647 MATOP_SET_COLORING=72, 1648 MATOP_SET_VALUES_ADIC=73, 1649 MATOP_SET_VALUES_ADIFOR=74, 1650 MATOP_FD_COLORING_APPLY=75, 1651 MATOP_SET_FROM_OPTIONS=76, 1652 MATOP_MULT_CON=77, 1653 MATOP_MULT_TRANSPOSE_CON=78, 1654 MATOP_PERMUTE_SPARSIFY=79, 1655 MATOP_MULT_MULTIPLE=80, 1656 MATOP_SOLVE_MULTIPLE=81, 1657 MATOP_GET_INERTIA=82, 1658 MATOP_LOAD=83, 1659 MATOP_IS_SYMMETRIC=84, 1660 MATOP_IS_HERMITIAN=85, 1661 MATOP_IS_STRUCTURALLY_SYMMETRIC=86, 1662 MATOP_DUMMY=87, 1663 MATOP_GET_VECS=88, 1664 MATOP_MAT_MULT=89, 1665 MATOP_MAT_MULT_SYMBOLIC=90, 1666 MATOP_MAT_MULT_NUMERIC=91, 1667 MATOP_PTAP=92, 1668 MATOP_PTAP_SYMBOLIC=93, 1669 MATOP_PTAP_NUMERIC=94, 1670 MATOP_MAT_MULTTRANSPOSE=95, 1671 MATOP_MAT_MULTTRANSPOSE_SYM=96, 1672 MATOP_MAT_MULTTRANSPOSE_NUM=97, 1673 MATOP_PTAP_SYMBOLIC_SEQAIJ=98, 1674 MATOP_PTAP_NUMERIC_SEQAIJ=99, 1675 MATOP_PTAP_SYMBOLIC_MPIAIJ=100, 1676 MATOP_PTAP_NUMERIC_MPIAIJ=101, 1677 MATOP_CONJUGATE=102, 1678 MATOP_SET_SIZES=103, 1679 MATOP_SET_VALUES_ROW=104, 1680 MATOP_REAL_PART=105, 1681 MATOP_IMAG_PART=106, 1682 MATOP_GET_ROW_UTRIANGULAR=107, 1683 MATOP_RESTORE_ROW_UTRIANGULAR=108, 1684 MATOP_MATSOLVE=109, 1685 MATOP_GET_REDUNDANTMATRIX=110, 1686 MATOP_GET_ROW_MIN=111, 1687 MATOP_GET_COLUMN_VEC=112, 1688 MATOP_MISSING_DIAGONAL=113, 1689 MATOP_MATGETSEQNONZEROSTRUCTURE=114, 1690 MATOP_CREATE=115, 1691 MATOP_GET_GHOSTS=116, 1692 MATOP_GET_LOCALSUBMATRIX=117, 1693 MATOP_RESTORE_LOCALSUBMATRIX=118, 1694 MATOP_MULT_DIAGONAL_BLOCK=119, 1695 MATOP_HERMITIANTRANSPOSE=120, 1696 MATOP_MULTHERMITIANTRANSPOSE=121, 1697 MATOP_MULTHERMITIANTRANSPOSEADD=122, 1698 MATOP_GETMULTIPROCBLOCK=123, 1699 MATOP_GETCOLUMNNORMS=125, 1700 MATOP_GET_SUBMATRICES_PARALLEL=128, 1701 MATOP_SET_VALUES_BATCH=129, 1702 MATOP_TRANSPOSEMATMULT=130, 1703 MATOP_TRANSPOSEMATMULT_SYMBOLIC=131, 1704 MATOP_TRANSPOSEMATMULT_NUMERIC=132, 1705 MATOP_TRANSPOSECOLORING_CREATE=133, 1706 MATOP_TRANSCOLORING_APPLY_SPTODEN=134, 1707 MATOP_TRANSCOLORING_APPLY_DENTOSP=135, 1708 MATOP_RARt=136, 1709 MATOP_RARt_SYMBOLIC=137, 1710 MATOP_RARt_NUMERIC=138 1711 } MatOperation; 1712 extern PetscErrorCode MatHasOperation(Mat,MatOperation,PetscBool *); 1713 extern PetscErrorCode MatShellSetOperation(Mat,MatOperation,void(*)(void)); 1714 extern PetscErrorCode MatShellGetOperation(Mat,MatOperation,void(**)(void)); 1715 extern PetscErrorCode MatShellSetContext(Mat,void*); 1716 1717 /* 1718 Codes for matrices stored on disk. By default they are 1719 stored in a universal format. By changing the format with 1720 PetscViewerSetFormat(viewer,PETSC_VIEWER_NATIVE); the matrices will 1721 be stored in a way natural for the matrix, for example dense matrices 1722 would be stored as dense. Matrices stored this way may only be 1723 read into matrices of the same type. 1724 */ 1725 #define MATRIX_BINARY_FORMAT_DENSE -1 1726 1727 extern PetscErrorCode MatMPIBAIJSetHashTableFactor(Mat,PetscReal); 1728 extern PetscErrorCode MatISGetLocalMat(Mat,Mat*); 1729 extern PetscErrorCode MatISSetLocalMat(Mat,Mat); 1730 1731 /*S 1732 MatNullSpace - Object that removes a null space from a vector, i.e. 1733 orthogonalizes the vector to a subsapce 1734 1735 Level: advanced 1736 1737 Concepts: matrix; linear operator, null space 1738 1739 Users manual sections: 1740 . sec_singular 1741 1742 .seealso: MatNullSpaceCreate() 1743 S*/ 1744 typedef struct _p_MatNullSpace* MatNullSpace; 1745 1746 extern PetscErrorCode MatNullSpaceCreate(MPI_Comm,PetscBool ,PetscInt,const Vec[],MatNullSpace*); 1747 extern PetscErrorCode MatNullSpaceSetFunction(MatNullSpace,PetscErrorCode (*)(MatNullSpace,Vec,void*),void*); 1748 extern PetscErrorCode MatNullSpaceDestroy(MatNullSpace*); 1749 extern PetscErrorCode MatNullSpaceRemove(MatNullSpace,Vec,Vec*); 1750 extern PetscErrorCode MatGetNullSpace(Mat, MatNullSpace *); 1751 extern PetscErrorCode MatSetNullSpace(Mat,MatNullSpace); 1752 extern PetscErrorCode MatSetNearNullSpace(Mat,MatNullSpace); 1753 extern PetscErrorCode MatNullSpaceTest(MatNullSpace,Mat,PetscBool *); 1754 extern PetscErrorCode MatNullSpaceView(MatNullSpace,PetscViewer); 1755 1756 extern PetscErrorCode MatReorderingSeqSBAIJ(Mat,IS); 1757 extern PetscErrorCode MatMPISBAIJSetHashTableFactor(Mat,PetscReal); 1758 extern PetscErrorCode MatSeqSBAIJSetColumnIndices(Mat,PetscInt *); 1759 extern PetscErrorCode MatSeqBAIJInvertBlockDiagonal(Mat); 1760 1761 extern PetscErrorCode MatCreateMAIJ(Mat,PetscInt,Mat*); 1762 extern PetscErrorCode MatMAIJRedimension(Mat,PetscInt,Mat*); 1763 extern PetscErrorCode MatMAIJGetAIJ(Mat,Mat*); 1764 1765 extern PetscErrorCode MatComputeExplicitOperator(Mat,Mat*); 1766 1767 extern PetscErrorCode MatDiagonalScaleLocal(Mat,Vec); 1768 1769 extern PetscErrorCode MatCreateMFFD(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,Mat*); 1770 extern PetscErrorCode MatMFFDSetBase(Mat,Vec,Vec); 1771 extern PetscErrorCode MatMFFDSetFunction(Mat,PetscErrorCode(*)(void*,Vec,Vec),void*); 1772 extern PetscErrorCode MatMFFDSetFunctioni(Mat,PetscErrorCode (*)(void*,PetscInt,Vec,PetscScalar*)); 1773 extern PetscErrorCode MatMFFDSetFunctioniBase(Mat,PetscErrorCode (*)(void*,Vec)); 1774 extern PetscErrorCode MatMFFDAddNullSpace(Mat,MatNullSpace); 1775 extern PetscErrorCode MatMFFDSetHHistory(Mat,PetscScalar[],PetscInt); 1776 extern PetscErrorCode MatMFFDResetHHistory(Mat); 1777 extern PetscErrorCode MatMFFDSetFunctionError(Mat,PetscReal); 1778 extern PetscErrorCode MatMFFDSetPeriod(Mat,PetscInt); 1779 extern PetscErrorCode MatMFFDGetH(Mat,PetscScalar *); 1780 extern PetscErrorCode MatMFFDSetOptionsPrefix(Mat,const char[]); 1781 extern PetscErrorCode MatMFFDCheckPositivity(void*,Vec,Vec,PetscScalar*); 1782 extern PetscErrorCode MatMFFDSetCheckh(Mat,PetscErrorCode (*)(void*,Vec,Vec,PetscScalar*),void*); 1783 1784 /*S 1785 MatMFFD - A data structured used to manage the computation of the h differencing parameter for matrix-free 1786 Jacobian vector products 1787 1788 Notes: MATMFFD is a specific MatType which uses the MatMFFD data structure 1789 1790 MatMFFD*() methods actually take the Mat as their first argument. Not a MatMFFD data structure 1791 1792 Level: developer 1793 1794 .seealso: MATMFFD, MatCreateMFFD(), MatMFFDSetFuction(), MatMFFDSetType(), MatMFFDRegister() 1795 S*/ 1796 typedef struct _p_MatMFFD* MatMFFD; 1797 1798 /*J 1799 MatMFFDType - algorithm used to compute the h used in computing matrix-vector products via differencing of the function 1800 1801 Level: beginner 1802 1803 .seealso: MatMFFDSetType(), MatMFFDRegister() 1804 J*/ 1805 #define MatMFFDType char* 1806 #define MATMFFD_DS "ds" 1807 #define MATMFFD_WP "wp" 1808 1809 extern PetscErrorCode MatMFFDSetType(Mat,const MatMFFDType); 1810 extern PetscErrorCode MatMFFDRegister(const char[],const char[],const char[],PetscErrorCode (*)(MatMFFD)); 1811 1812 /*MC 1813 MatMFFDRegisterDynamic - Adds a method to the MatMFFD registry. 1814 1815 Synopsis: 1816 PetscErrorCode MatMFFDRegisterDynamic(const char *name_solver,const char *path,const char *name_create,PetscErrorCode (*routine_create)(MatMFFD)) 1817 1818 Not Collective 1819 1820 Input Parameters: 1821 + name_solver - name of a new user-defined compute-h module 1822 . path - path (either absolute or relative) the library containing this solver 1823 . name_create - name of routine to create method context 1824 - routine_create - routine to create method context 1825 1826 Level: developer 1827 1828 Notes: 1829 MatMFFDRegisterDynamic() may be called multiple times to add several user-defined solvers. 1830 1831 If dynamic libraries are used, then the fourth input argument (routine_create) 1832 is ignored. 1833 1834 Sample usage: 1835 .vb 1836 MatMFFDRegisterDynamic("my_h",/home/username/my_lib/lib/libO/solaris/mylib.a, 1837 "MyHCreate",MyHCreate); 1838 .ve 1839 1840 Then, your solver can be chosen with the procedural interface via 1841 $ MatMFFDSetType(mfctx,"my_h") 1842 or at runtime via the option 1843 $ -snes_mf_type my_h 1844 1845 .keywords: MatMFFD, register 1846 1847 .seealso: MatMFFDRegisterAll(), MatMFFDRegisterDestroy() 1848 M*/ 1849 #if defined(PETSC_USE_DYNAMIC_LIBRARIES) 1850 #define MatMFFDRegisterDynamic(a,b,c,d) MatMFFDRegister(a,b,c,0) 1851 #else 1852 #define MatMFFDRegisterDynamic(a,b,c,d) MatMFFDRegister(a,b,c,d) 1853 #endif 1854 1855 extern PetscErrorCode MatMFFDRegisterAll(const char[]); 1856 extern PetscErrorCode MatMFFDRegisterDestroy(void); 1857 extern PetscErrorCode MatMFFDDSSetUmin(Mat,PetscReal); 1858 extern PetscErrorCode MatMFFDWPSetComputeNormU(Mat,PetscBool ); 1859 1860 1861 extern PetscErrorCode PetscViewerMathematicaPutMatrix(PetscViewer, PetscInt, PetscInt, PetscReal *); 1862 extern PetscErrorCode PetscViewerMathematicaPutCSRMatrix(PetscViewer, PetscInt, PetscInt, PetscInt *, PetscInt *, PetscReal *); 1863 1864 /* 1865 PETSc interface to MUMPS 1866 */ 1867 #ifdef PETSC_HAVE_MUMPS 1868 extern PetscErrorCode MatMumpsSetIcntl(Mat,PetscInt,PetscInt); 1869 #endif 1870 1871 /* 1872 PETSc interface to SUPERLU 1873 */ 1874 #ifdef PETSC_HAVE_SUPERLU 1875 extern PetscErrorCode MatSuperluSetILUDropTol(Mat,PetscReal); 1876 #endif 1877 1878 #if defined(PETSC_HAVE_CUSP) 1879 extern PetscErrorCode MatCreateSeqAIJCUSP(MPI_Comm,PetscInt,PetscInt,PetscInt,const PetscInt[],Mat*); 1880 extern PetscErrorCode MatCreateAIJCUSP(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt,const PetscInt[],PetscInt,const PetscInt[],Mat*); 1881 #endif 1882 1883 /* 1884 PETSc interface to FFTW 1885 */ 1886 #if defined(PETSC_HAVE_FFTW) 1887 extern PetscErrorCode VecScatterPetscToFFTW(Mat,Vec,Vec); 1888 extern PetscErrorCode VecScatterFFTWToPetsc(Mat,Vec,Vec); 1889 extern PetscErrorCode MatGetVecsFFTW(Mat,Vec*,Vec*,Vec*); 1890 #endif 1891 1892 extern PetscErrorCode MatCreateNest(MPI_Comm,PetscInt,const IS[],PetscInt,const IS[],const Mat[],Mat*); 1893 extern PetscErrorCode MatNestGetSize(Mat,PetscInt*,PetscInt*); 1894 extern PetscErrorCode MatNestGetISs(Mat,IS[],IS[]); 1895 extern PetscErrorCode MatNestGetLocalISs(Mat,IS[],IS[]); 1896 extern PetscErrorCode MatNestGetSubMats(Mat,PetscInt*,PetscInt*,Mat***); 1897 extern PetscErrorCode MatNestGetSubMat(Mat,PetscInt,PetscInt,Mat*); 1898 extern PetscErrorCode MatNestSetVecType(Mat,const VecType); 1899 extern PetscErrorCode MatNestSetSubMats(Mat,PetscInt,const IS[],PetscInt,const IS[],const Mat[]); 1900 extern PetscErrorCode MatNestSetSubMat(Mat,PetscInt,PetscInt,Mat); 1901 1902 /* 1903 MatIJ: 1904 An unweighted directed pseudograph 1905 An interpretation of this matrix as a (pseudo)graph allows us to define additional operations on it: 1906 A MatIJ can act on sparse arrays: arrays of indices, or index arrays of integers, scalars, or integer-scalar pairs 1907 by mapping the indices to the indices connected to them by the (pseudo)graph ed 1908 */ 1909 typedef enum {MATIJ_LOCAL, MATIJ_GLOBAL} MatIJIndexType; 1910 extern PetscErrorCode MatIJSetMultivalued(Mat, PetscBool); 1911 extern PetscErrorCode MatIJGetMultivalued(Mat, PetscBool*); 1912 extern PetscErrorCode MatIJSetEdges(Mat, PetscInt, const PetscInt*, const PetscInt*); 1913 extern PetscErrorCode MatIJGetEdges(Mat, PetscInt *, PetscInt **, PetscInt **); 1914 extern PetscErrorCode MatIJSetEdgesIS(Mat, IS, IS); 1915 extern PetscErrorCode MatIJGetEdgesIS(Mat, IS*, IS*); 1916 extern PetscErrorCode MatIJGetRowSizes(Mat, MatIJIndexType, PetscInt, const PetscInt *, PetscInt **); 1917 extern PetscErrorCode MatIJGetMinRowSize(Mat, PetscInt *); 1918 extern PetscErrorCode MatIJGetMaxRowSize(Mat, PetscInt *); 1919 extern PetscErrorCode MatIJGetSupport(Mat, PetscInt *, PetscInt **); 1920 extern PetscErrorCode MatIJGetSupportIS(Mat, IS *); 1921 extern PetscErrorCode MatIJGetImage(Mat, PetscInt*, PetscInt**); 1922 extern PetscErrorCode MatIJGetImageIS(Mat, IS *); 1923 extern PetscErrorCode MatIJGetSupportSize(Mat, PetscInt *); 1924 extern PetscErrorCode MatIJGetImageSize(Mat, PetscInt *); 1925 1926 extern PetscErrorCode MatIJBinRenumber(Mat, Mat*); 1927 1928 extern PetscErrorCode MatIJMap(Mat, MatIJIndexType, PetscInt,const PetscInt*,const PetscInt*,const PetscScalar*, MatIJIndexType,PetscInt*,PetscInt**,PetscInt**,PetscScalar**,PetscInt**); 1929 extern PetscErrorCode MatIJBin(Mat, MatIJIndexType, PetscInt,const PetscInt*,const PetscInt*,const PetscScalar*,PetscInt*,PetscInt**,PetscInt**,PetscScalar**,PetscInt**); 1930 extern PetscErrorCode MatIJBinMap(Mat,Mat, MatIJIndexType,PetscInt,const PetscInt*,const PetscInt*,const PetscScalar*,MatIJIndexType,PetscInt*,PetscInt**,PetscInt**,PetscScalar**,PetscInt**); 1931 1932 PETSC_EXTERN_CXX_END 1933 #endif 1934