1 /* 2 Include file for the matrix component of PETSc 3 */ 4 #ifndef __PETSCMAT_H 5 #define __PETSCMAT_H 6 #include "petscvec.h" 7 PETSC_EXTERN_CXX_BEGIN 8 9 /*S 10 Mat - Abstract PETSc matrix object 11 12 Level: beginner 13 14 Concepts: matrix; linear operator 15 16 .seealso: MatCreate(), MatType, MatSetType() 17 S*/ 18 typedef struct _p_Mat* Mat; 19 20 /*E 21 MatType - String with the name of a PETSc matrix or the creation function 22 with an optional dynamic library name, for example 23 http://www.mcs.anl.gov/petsc/lib.a:mymatcreate() 24 25 Level: beginner 26 27 .seealso: MatSetType(), Mat, MatSolverPackage 28 E*/ 29 #define MatType char* 30 #define MATSAME "same" 31 #define MATMAIJ "maij" 32 #define MATSEQMAIJ "seqmaij" 33 #define MATMPIMAIJ "mpimaij" 34 #define MATIS "is" 35 #define MATAIJ "aij" 36 #define MATSEQAIJ "seqaij" 37 #define MATMPIAIJ "mpiaij" 38 #define MATAIJCRL "aijcrl" 39 #define MATSEQAIJCRL "seqaijcrl" 40 #define MATMPIAIJCRL "mpiaijcrl" 41 #define MATAIJCUSP "aijcusp" 42 #define MATSEQAIJCUSP "seqaijcusp" 43 #define MATMPIAIJCUSP "mpiaijcusp" 44 #define MATAIJPERM "aijperm" 45 #define MATSEQAIJPERM "seqaijperm" 46 #define MATMPIAIJPERM "mpiaijperm" 47 #define MATSHELL "shell" 48 #define MATDENSE "dense" 49 #define MATSEQDENSE "seqdense" 50 #define MATMPIDENSE "mpidense" 51 #define MATBAIJ "baij" 52 #define MATSEQBAIJ "seqbaij" 53 #define MATMPIBAIJ "mpibaij" 54 #define MATMPIADJ "mpiadj" 55 #define MATSBAIJ "sbaij" 56 #define MATSEQSBAIJ "seqsbaij" 57 #define MATMPISBAIJ "mpisbaij" 58 59 #define MATSEQBSTRM "seqbstrm" 60 #define MATMPIBSTRM "mpibstrm" 61 #define MATBSTRM "bstrm" 62 #define MATSEQSBSTRM "seqsbstrm" 63 #define MATMPISBSTRM "mpisbstrm" 64 #define MATSBSTRM "sbstrm" 65 66 #define MATDAAD "daad" 67 #define MATMFFD "mffd" 68 #define MATNORMAL "normal" 69 #define MATLRC "lrc" 70 #define MATSCATTER "scatter" 71 #define MATBLOCKMAT "blockmat" 72 #define MATCOMPOSITE "composite" 73 #define MATFFT "fft" 74 #define MATFFTW "fftw" 75 #define MATSEQCUFFT "seqcufft" 76 #define MATTRANSPOSEMAT "transpose" 77 #define MATSCHURCOMPLEMENT "schurcomplement" 78 #define MATPYTHON "python" 79 #define MATHYPRESTRUCT "hyprestruct" 80 #define MATHYPRESSTRUCT "hypresstruct" 81 #define MATSUBMATRIX "submatrix" 82 #define MATLOCALREF "localref" 83 #define MATNEST "nest" 84 85 /*E 86 MatSolverPackage - String with the name of a PETSc matrix solver type. 87 88 For example: "petsc" indicates what PETSc provides, "superlu" indicates either 89 SuperLU or SuperLU_Dist etc. 90 91 92 Level: beginner 93 94 .seealso: MatGetFactor(), Mat, MatSetType(), MatType 95 E*/ 96 #define MatSolverPackage char* 97 #define MATSOLVERSPOOLES "spooles" 98 #define MATSOLVERSUPERLU "superlu" 99 #define MATSOLVERSUPERLU_DIST "superlu_dist" 100 #define MATSOLVERUMFPACK "umfpack" 101 #define MATSOLVERCHOLMOD "cholmod" 102 #define MATSOLVERESSL "essl" 103 #define MATSOLVERLUSOL "lusol" 104 #define MATSOLVERMUMPS "mumps" 105 #define MATSOLVERPASTIX "pastix" 106 #define MATSOLVERDSCPACK "dscpack" 107 #define MATSOLVERMATLAB "matlab" 108 #define MATSOLVERPETSC "petsc" 109 #define MATSOLVERPLAPACK "plapack" 110 #define MATSOLVERBAS "bas" 111 112 #define MATSOLVERBSTRM "bstrm" 113 #define MATSOLVERSBSTRM "sbstrm" 114 115 /*E 116 MatFactorType - indicates what type of factorization is requested 117 118 Level: beginner 119 120 Any additions/changes here MUST also be made in include/finclude/petscmat.h 121 122 .seealso: MatSolverPackage, MatGetFactor() 123 E*/ 124 typedef enum {MAT_FACTOR_NONE, MAT_FACTOR_LU, MAT_FACTOR_CHOLESKY, MAT_FACTOR_ILU, MAT_FACTOR_ICC,MAT_FACTOR_ILUDT} MatFactorType; 125 extern const char *const MatFactorTypes[]; 126 127 extern PetscErrorCode MatGetFactor(Mat,const MatSolverPackage,MatFactorType,Mat*); 128 extern PetscErrorCode MatGetFactorAvailable(Mat,const MatSolverPackage,MatFactorType,PetscBool *); 129 extern PetscErrorCode MatFactorGetSolverPackage(Mat,const MatSolverPackage*); 130 extern PetscErrorCode MatGetFactorType(Mat,MatFactorType*); 131 132 /* Logging support */ 133 #define MAT_FILE_CLASSID 1211216 /* used to indicate matrices in binary files */ 134 extern PetscClassId MAT_CLASSID; 135 extern PetscClassId MAT_FDCOLORING_CLASSID; 136 extern PetscClassId MAT_PARTITIONING_CLASSID; 137 extern PetscClassId MAT_NULLSPACE_CLASSID; 138 extern PetscClassId MATMFFD_CLASSID; 139 140 /*E 141 MatReuse - Indicates if matrices obtained from a previous call to MatGetSubMatrices() 142 or MatGetSubMatrix() are to be reused to store the new matrix values. For MatConvert() is used to indicate 143 that the input matrix is to be replaced with the converted matrix. 144 145 Level: beginner 146 147 Any additions/changes here MUST also be made in include/finclude/petscmat.h 148 149 .seealso: MatGetSubMatrices(), MatGetSubMatrix(), MatDestroyMatrices(), MatConvert() 150 E*/ 151 typedef enum {MAT_INITIAL_MATRIX,MAT_REUSE_MATRIX,MAT_IGNORE_MATRIX} MatReuse; 152 153 /*E 154 MatGetSubMatrixOption - Indicates if matrices obtained from a call to MatGetSubMatrices() 155 include the matrix values. Currently it is only used by MatGetSeqNonzerostructure(). 156 157 Level: beginner 158 159 .seealso: MatGetSeqNonzerostructure() 160 E*/ 161 typedef enum {MAT_DO_NOT_GET_VALUES,MAT_GET_VALUES} MatGetSubMatrixOption; 162 163 extern PetscErrorCode MatInitializePackage(const char[]); 164 165 extern PetscErrorCode MatCreate(MPI_Comm,Mat*); 166 PetscPolymorphicFunction(MatCreate,(MPI_Comm comm),(comm,&A),Mat,A) 167 PetscPolymorphicFunction(MatCreate,(),(PETSC_COMM_WORLD,&A),Mat,A) 168 extern PetscErrorCode MatSetSizes(Mat,PetscInt,PetscInt,PetscInt,PetscInt); 169 extern PetscErrorCode MatSetType(Mat,const MatType); 170 extern PetscErrorCode MatSetFromOptions(Mat); 171 extern PetscErrorCode MatSetUpPreallocation(Mat); 172 extern PetscErrorCode MatRegisterAll(const char[]); 173 extern PetscErrorCode MatRegister(const char[],const char[],const char[],PetscErrorCode(*)(Mat)); 174 extern PetscErrorCode MatRegisterBaseName(const char[],const char[],const char[]); 175 extern PetscErrorCode MatSetOptionsPrefix(Mat,const char[]); 176 extern PetscErrorCode MatAppendOptionsPrefix(Mat,const char[]); 177 extern PetscErrorCode MatGetOptionsPrefix(Mat,const char*[]); 178 179 /*MC 180 MatRegisterDynamic - Adds a new matrix type 181 182 Synopsis: 183 PetscErrorCode MatRegisterDynamic(const char *name,const char *path,const char *name_create,PetscErrorCode (*routine_create)(Mat)) 184 185 Not Collective 186 187 Input Parameters: 188 + name - name of a new user-defined matrix type 189 . path - path (either absolute or relative) the library containing this solver 190 . name_create - name of routine to create method context 191 - routine_create - routine to create method context 192 193 Notes: 194 MatRegisterDynamic() may be called multiple times to add several user-defined solvers. 195 196 If dynamic libraries are used, then the fourth input argument (routine_create) 197 is ignored. 198 199 Sample usage: 200 .vb 201 MatRegisterDynamic("my_mat",/home/username/my_lib/lib/libO/solaris/mylib.a, 202 "MyMatCreate",MyMatCreate); 203 .ve 204 205 Then, your solver can be chosen with the procedural interface via 206 $ MatSetType(Mat,"my_mat") 207 or at runtime via the option 208 $ -mat_type my_mat 209 210 Level: advanced 211 212 Notes: ${PETSC_ARCH} occuring in pathname will be replaced with appropriate values. 213 If your function is not being put into a shared library then use VecRegister() instead 214 215 .keywords: Mat, register 216 217 .seealso: MatRegisterAll(), MatRegisterDestroy() 218 219 M*/ 220 #if defined(PETSC_USE_DYNAMIC_LIBRARIES) 221 #define MatRegisterDynamic(a,b,c,d) MatRegister(a,b,c,0) 222 #else 223 #define MatRegisterDynamic(a,b,c,d) MatRegister(a,b,c,d) 224 #endif 225 226 extern PetscBool MatRegisterAllCalled; 227 extern PetscFList MatList; 228 extern PetscFList MatColoringList; 229 extern PetscFList MatPartitioningList; 230 231 /*E 232 MatStructure - Indicates if the matrix has the same nonzero structure 233 234 Level: beginner 235 236 Any additions/changes here MUST also be made in include/finclude/petscmat.h 237 238 .seealso: MatCopy(), KSPSetOperators(), PCSetOperators() 239 E*/ 240 typedef enum {SAME_NONZERO_PATTERN,DIFFERENT_NONZERO_PATTERN,SAME_PRECONDITIONER,SUBSET_NONZERO_PATTERN} MatStructure; 241 242 extern PetscErrorCode MatCreateSeqDense(MPI_Comm,PetscInt,PetscInt,PetscScalar[],Mat*); 243 extern PetscErrorCode MatCreateMPIDense(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,PetscScalar[],Mat*); 244 extern PetscErrorCode MatCreateSeqAIJ(MPI_Comm,PetscInt,PetscInt,PetscInt,const PetscInt[],Mat*); 245 PetscPolymorphicFunction(MatCreateSeqAIJ,(PetscInt m,PetscInt n,PetscInt nz,const PetscInt nnz[]),(PETSC_COMM_SELF,m,n,nz,nnz,&A),Mat,A) 246 PetscPolymorphicFunction(MatCreateSeqAIJ,(PetscInt m,PetscInt n,PetscInt nz),(PETSC_COMM_SELF,m,n,nz,PETSC_NULL,&A),Mat,A) 247 PetscPolymorphicFunction(MatCreateSeqAIJ,(PetscInt m,PetscInt n,const PetscInt nnz[]),(PETSC_COMM_SELF,m,n,0,nnz,&A),Mat,A) 248 PetscPolymorphicFunction(MatCreateSeqAIJ,(PetscInt m,PetscInt n),(PETSC_COMM_SELF,m,n,0,PETSC_NULL,&A),Mat,A) 249 PetscPolymorphicSubroutine(MatCreateSeqAIJ,(PetscInt m,PetscInt n,PetscInt nz,Mat *A),(PETSC_COMM_SELF,m,n,nz,PETSC_NULL,A)) 250 PetscPolymorphicSubroutine(MatCreateSeqAIJ,(PetscInt m,PetscInt n,const PetscInt nnz[],Mat *A),(PETSC_COMM_SELF,m,n,0,nnz,A)) 251 PetscPolymorphicSubroutine(MatCreateSeqAIJ,(PetscInt m,PetscInt n,Mat *A),(PETSC_COMM_SELF,m,n,0,PETSC_NULL,A)) 252 extern PetscErrorCode MatCreateMPIAIJ(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt,const PetscInt[],PetscInt,const PetscInt[],Mat*); 253 PetscPolymorphicFunction(MatCreateMPIAIJ,(MPI_Comm comm,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,const PetscInt nnz[],PetscInt onz,const PetscInt onnz[]),(comm,m,n,M,N,nz,nnz,onz,onnz,&A),Mat,A) 254 PetscPolymorphicFunction(MatCreateMPIAIJ,(MPI_Comm comm,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,PetscInt nnz),(comm,m,n,M,N,nz,PETSC_NULL,nnz,PETSC_NULL,&A),Mat,A) 255 PetscPolymorphicFunction(MatCreateMPIAIJ,(MPI_Comm comm,PetscInt m,PetscInt n,PetscInt M,PetscInt N,const PetscInt nnz[],const PetscInt onz[]),(comm,m,n,M,N,0,nnz,0,onz,&A),Mat,A) 256 PetscPolymorphicFunction(MatCreateMPIAIJ,(MPI_Comm comm,PetscInt m,PetscInt n,PetscInt M,PetscInt N),(comm,m,n,M,N,0,PETSC_NULL,0,PETSC_NULL,&A),Mat,A) 257 PetscPolymorphicSubroutine(MatCreateMPIAIJ,(MPI_Comm comm,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,PetscInt nnz,Mat *A),(comm,m,n,M,N,nz,PETSC_NULL,nnz,PETSC_NULL,A)) 258 PetscPolymorphicSubroutine(MatCreateMPIAIJ,(MPI_Comm comm,PetscInt m,PetscInt n,PetscInt M,PetscInt N,const PetscInt nnz[],const PetscInt onz[],Mat *A),(comm,m,n,M,N,0,nnz,0,onz,A)) 259 PetscPolymorphicSubroutine(MatCreateMPIAIJ,(MPI_Comm comm,PetscInt m,PetscInt n,PetscInt M,PetscInt N,Mat *A),(comm,m,n,M,N,0,PETSC_NULL,0,PETSC_NULL,A)) 260 PetscPolymorphicFunction(MatCreateMPIAIJ,(PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,const PetscInt nnz[],PetscInt onz,const PetscInt onnz[]),(PETSC_COMM_WORLD,m,n,M,N,nz,nnz,onz,onnz,&A),Mat,A) 261 PetscPolymorphicFunction(MatCreateMPIAIJ,(PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,PetscInt nnz),(PETSC_COMM_WORLD,m,n,M,N,nz,PETSC_NULL,nnz,PETSC_NULL,&A),Mat,A) 262 PetscPolymorphicFunction(MatCreateMPIAIJ,(PetscInt m,PetscInt n,PetscInt M,PetscInt N,const PetscInt nnz[],const PetscInt onz[]),(PETSC_COMM_WORLD,m,n,M,N,0,nnz,0,onz,&A),Mat,A) 263 PetscPolymorphicFunction(MatCreateMPIAIJ,(PetscInt m,PetscInt n,PetscInt M,PetscInt N),(PETSC_COMM_WORLD,m,n,M,N,0,PETSC_NULL,0,PETSC_NULL,&A),Mat,A) 264 PetscPolymorphicSubroutine(MatCreateMPIAIJ,(PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,PetscInt nnz,Mat *A),(PETSC_COMM_WORLD,m,n,M,N,nz,PETSC_NULL,nnz,PETSC_NULL,A)) 265 PetscPolymorphicSubroutine(MatCreateMPIAIJ,(PetscInt m,PetscInt n,PetscInt M,PetscInt N,const PetscInt nnz[],const PetscInt onz[],Mat *A),(PETSC_COMM_WORLD,m,n,M,N,0,nnz,0,onz,A)) 266 PetscPolymorphicSubroutine(MatCreateMPIAIJ,(PetscInt m,PetscInt n,PetscInt M,PetscInt N,Mat *A),(PETSC_COMM_WORLD,m,n,M,N,0,PETSC_NULL,0,PETSC_NULL,A)) 267 extern PetscErrorCode MatCreateMPIAIJWithArrays(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,const PetscInt[],const PetscInt[],const PetscScalar[],Mat *); 268 extern PetscErrorCode MatCreateMPIAIJWithSplitArrays(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt[],PetscInt[],PetscScalar[],PetscInt[],PetscInt[],PetscScalar[],Mat*); 269 270 extern PetscErrorCode MatCreateSeqBAIJ(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,const PetscInt[],Mat*); 271 PetscPolymorphicFunction(MatCreateSeqBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt nz,const PetscInt nnz[]),(PETSC_COMM_SELF,bs,m,n,nz,nnz,&A),Mat,A) 272 PetscPolymorphicFunction(MatCreateSeqBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt nz),(PETSC_COMM_SELF,bs,m,n,nz,PETSC_NULL,&A),Mat,A) 273 PetscPolymorphicFunction(MatCreateSeqBAIJ,(PetscInt bs,PetscInt m,PetscInt n,const PetscInt nnz[]),(PETSC_COMM_SELF,bs,m,n,0,nnz,&A),Mat,A) 274 PetscPolymorphicFunction(MatCreateSeqBAIJ,(PetscInt bs,PetscInt m,PetscInt n),(PETSC_COMM_SELF,bs,m,n,0,PETSC_NULL,&A),Mat,A) 275 PetscPolymorphicSubroutine(MatCreateSeqBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt nz,Mat *A),(PETSC_COMM_SELF,bs,m,n,nz,PETSC_NULL,A)) 276 PetscPolymorphicSubroutine(MatCreateSeqBAIJ,(PetscInt bs,PetscInt m,PetscInt n,const PetscInt nnz[],Mat *A),(PETSC_COMM_SELF,bs,m,n,0,nnz,A)) 277 PetscPolymorphicSubroutine(MatCreateSeqBAIJ,(PetscInt bs,PetscInt m,PetscInt n,Mat *A),(PETSC_COMM_SELF,bs,m,n,0,PETSC_NULL,A)) 278 extern PetscErrorCode MatCreateMPIBAIJ(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt,const PetscInt[],PetscInt,const PetscInt[],Mat*); 279 PetscPolymorphicFunction(MatCreateMPIBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,const PetscInt nnz[],PetscInt onz,const PetscInt onnz[]),(comm,bs,m,n,M,N,nz,nnz,onz,onnz,&A),Mat,A) 280 PetscPolymorphicFunction(MatCreateMPIBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,PetscInt nnz),(comm,bs,m,n,M,N,nz,PETSC_NULL,nnz,PETSC_NULL,&A),Mat,A) 281 PetscPolymorphicFunction(MatCreateMPIBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,const PetscInt nnz[],const PetscInt onz[]),(comm,bs,m,n,M,N,0,nnz,0,onz,&A),Mat,A) 282 PetscPolymorphicFunction(MatCreateMPIBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N),(comm,bs,m,n,M,N,0,PETSC_NULL,0,PETSC_NULL,&A),Mat,A) 283 PetscPolymorphicSubroutine(MatCreateMPIBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,PetscInt nnz,Mat *A),(comm,bs,m,n,M,N,nz,PETSC_NULL,nnz,PETSC_NULL,A)) 284 PetscPolymorphicSubroutine(MatCreateMPIBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,const PetscInt nnz[],const PetscInt onz[],Mat *A),(comm,bs,m,n,M,N,0,nnz,0,onz,A)) 285 PetscPolymorphicSubroutine(MatCreateMPIBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,Mat *A),(comm,bs,m,n,M,N,0,PETSC_NULL,0,PETSC_NULL,A)) 286 PetscPolymorphicFunction(MatCreateMPIBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,const PetscInt nnz[],PetscInt onz,const PetscInt onnz[]),(PETSC_COMM_WORLD,bs,m,n,M,N,nz,nnz,onz,onnz,&A),Mat,A) 287 PetscPolymorphicFunction(MatCreateMPIBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,PetscInt nnz),(PETSC_COMM_WORLD,bs,m,n,M,N,nz,PETSC_NULL,nnz,PETSC_NULL,&A),Mat,A) 288 PetscPolymorphicFunction(MatCreateMPIBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,const PetscInt nnz[],const PetscInt onz[]),(PETSC_COMM_WORLD,bs,m,n,M,N,0,nnz,0,onz,&A),Mat,A) 289 PetscPolymorphicFunction(MatCreateMPIBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N),(PETSC_COMM_WORLD,bs,m,n,M,N,0,PETSC_NULL,0,PETSC_NULL,&A),Mat,A) 290 PetscPolymorphicSubroutine(MatCreateMPIBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,PetscInt nnz,Mat *A),(PETSC_COMM_WORLD,bs,m,n,M,N,nz,PETSC_NULL,nnz,PETSC_NULL,A)) 291 PetscPolymorphicSubroutine(MatCreateMPIBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,const PetscInt nnz[],const PetscInt onz[],Mat *A),(PETSC_COMM_WORLD,bs,m,n,M,N,0,nnz,0,onz,A)) 292 PetscPolymorphicSubroutine(MatCreateMPIBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,Mat *A),(PETSC_COMM_WORLD,bs,m,n,M,N,0,PETSC_NULL,0,PETSC_NULL,A)) 293 extern PetscErrorCode MatCreateMPIBAIJWithArrays(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt,const PetscInt[],const PetscInt[],const PetscScalar[],Mat*); 294 295 extern PetscErrorCode MatCreateMPIAdj(MPI_Comm,PetscInt,PetscInt,PetscInt[],PetscInt[],PetscInt[],Mat*); 296 extern PetscErrorCode MatCreateSeqSBAIJ(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,const PetscInt[],Mat*); 297 PetscPolymorphicFunction(MatCreateSeqSBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt nz,const PetscInt nnz[]),(PETSC_COMM_SELF,bs,m,n,nz,nnz,&A),Mat,A) 298 PetscPolymorphicFunction(MatCreateSeqSBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt nz),(PETSC_COMM_SELF,bs,m,n,nz,PETSC_NULL,&A),Mat,A) 299 PetscPolymorphicFunction(MatCreateSeqSBAIJ,(PetscInt bs,PetscInt m,PetscInt n,const PetscInt nnz[]),(PETSC_COMM_SELF,bs,m,n,0,nnz,&A),Mat,A) 300 PetscPolymorphicFunction(MatCreateSeqSBAIJ,(PetscInt bs,PetscInt m,PetscInt n),(PETSC_COMM_SELF,bs,m,n,0,PETSC_NULL,&A),Mat,A) 301 PetscPolymorphicSubroutine(MatCreateSeqSBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt nz,Mat *A),(PETSC_COMM_SELF,bs,m,n,nz,PETSC_NULL,A)) 302 PetscPolymorphicSubroutine(MatCreateSeqSBAIJ,(PetscInt bs,PetscInt m,PetscInt n,const PetscInt nnz[],Mat *A),(PETSC_COMM_SELF,bs,m,n,0,nnz,A)) 303 PetscPolymorphicSubroutine(MatCreateSeqSBAIJ,(PetscInt bs,PetscInt m,PetscInt n,Mat *A),(PETSC_COMM_SELF,bs,m,n,0,PETSC_NULL,A)) 304 305 extern PetscErrorCode MatCreateMPISBAIJ(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt,const PetscInt[],PetscInt,const PetscInt[],Mat*); 306 PetscPolymorphicFunction(MatCreateMPISBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,const PetscInt nnz[],PetscInt onz,const PetscInt onnz[]),(comm,bs,m,n,M,N,nz,nnz,onz,onnz,&A),Mat,A) 307 PetscPolymorphicFunction(MatCreateMPISBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,PetscInt nnz),(comm,bs,m,n,M,N,nz,PETSC_NULL,nnz,PETSC_NULL,&A),Mat,A) 308 PetscPolymorphicFunction(MatCreateMPISBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,const PetscInt nnz[],const PetscInt onz[]),(comm,bs,m,n,M,N,0,nnz,0,onz,&A),Mat,A) 309 PetscPolymorphicFunction(MatCreateMPISBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N),(comm,bs,m,n,M,N,0,PETSC_NULL,0,PETSC_NULL,&A),Mat,A) 310 PetscPolymorphicSubroutine(MatCreateMPISBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,PetscInt nnz,Mat *A),(comm,bs,m,n,M,N,nz,PETSC_NULL,nnz,PETSC_NULL,A)) 311 PetscPolymorphicSubroutine(MatCreateMPISBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,const PetscInt nnz[],const PetscInt onz[],Mat *A),(comm,bs,m,n,M,N,0,nnz,0,onz,A)) 312 PetscPolymorphicSubroutine(MatCreateMPISBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,Mat *A),(comm,bs,m,n,M,N,0,PETSC_NULL,0,PETSC_NULL,A)) 313 PetscPolymorphicFunction(MatCreateMPISBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,const PetscInt nnz[],PetscInt onz,const PetscInt onnz[]),(PETSC_COMM_WORLD,bs,m,n,M,N,nz,nnz,onz,onnz,&A),Mat,A) 314 PetscPolymorphicFunction(MatCreateMPISBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,PetscInt nnz),(PETSC_COMM_WORLD,bs,m,n,M,N,nz,PETSC_NULL,nnz,PETSC_NULL,&A),Mat,A) 315 PetscPolymorphicFunction(MatCreateMPISBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,const PetscInt nnz[],const PetscInt onz[]),(PETSC_COMM_WORLD,bs,m,n,M,N,0,nnz,0,onz,&A),Mat,A) 316 PetscPolymorphicFunction(MatCreateMPISBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N),(PETSC_COMM_WORLD,bs,m,n,M,N,0,PETSC_NULL,0,PETSC_NULL,&A),Mat,A) 317 PetscPolymorphicSubroutine(MatCreateMPISBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,PetscInt nnz,Mat *A),(PETSC_COMM_WORLD,bs,m,n,M,N,nz,PETSC_NULL,nnz,PETSC_NULL,A)) 318 PetscPolymorphicSubroutine(MatCreateMPISBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,const PetscInt nnz[],const PetscInt onz[],Mat *A),(PETSC_COMM_WORLD,bs,m,n,M,N,0,nnz,0,onz,A)) 319 PetscPolymorphicSubroutine(MatCreateMPISBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,Mat *A),(PETSC_COMM_WORLD,bs,m,n,M,N,0,PETSC_NULL,0,PETSC_NULL,A)) 320 extern PetscErrorCode MatCreateMPISBAIJWithArrays(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt,const PetscInt[],const PetscInt[],const PetscScalar[],Mat *); 321 extern PetscErrorCode MatMPISBAIJSetPreallocationCSR(Mat,PetscInt,const PetscInt[],const PetscInt[],const PetscScalar[]); 322 323 extern PetscErrorCode MatCreateShell(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,void *,Mat*); 324 PetscPolymorphicFunction(MatCreateShell,(MPI_Comm comm,PetscInt m,PetscInt n,PetscInt M,PetscInt N,void *ctx),(comm,m,n,M,N,ctx,&A),Mat,A) 325 PetscPolymorphicFunction(MatCreateShell,(PetscInt m,PetscInt n,PetscInt M,PetscInt N,void *ctx),(PETSC_COMM_WORLD,m,n,M,N,ctx,&A),Mat,A) 326 extern PetscErrorCode MatCreateAdic(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt,void (*)(void),Mat*); 327 extern PetscErrorCode MatCreateNormal(Mat,Mat*); 328 PetscPolymorphicFunction(MatCreateNormal,(Mat mat),(mat,&A),Mat,A) 329 extern PetscErrorCode MatCreateLRC(Mat,Mat,Mat,Mat*); 330 extern PetscErrorCode MatCreateIS(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,ISLocalToGlobalMapping,Mat*); 331 extern PetscErrorCode MatCreateSeqAIJCRL(MPI_Comm,PetscInt,PetscInt,PetscInt,const PetscInt[],Mat*); 332 extern PetscErrorCode MatCreateMPIAIJCRL(MPI_Comm,PetscInt,PetscInt,PetscInt,const PetscInt[],PetscInt,const PetscInt[],Mat*); 333 334 extern PetscErrorCode MatCreateSeqBSTRM(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,const PetscInt[],Mat*); 335 extern PetscErrorCode MatCreateMPIBSTRM(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt,const PetscInt[],PetscInt,const PetscInt[],Mat*); 336 extern PetscErrorCode MatCreateSeqSBSTRM(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,const PetscInt[],Mat*); 337 extern PetscErrorCode MatCreateMPISBSTRM(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt,const PetscInt[],PetscInt,const PetscInt[],Mat*); 338 339 extern PetscErrorCode MatCreateScatter(MPI_Comm,VecScatter,Mat*); 340 extern PetscErrorCode MatScatterSetVecScatter(Mat,VecScatter); 341 extern PetscErrorCode MatScatterGetVecScatter(Mat,VecScatter*); 342 extern PetscErrorCode MatCreateBlockMat(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt*,Mat*); 343 extern PetscErrorCode MatCompositeAddMat(Mat,Mat); 344 extern PetscErrorCode MatCompositeMerge(Mat); 345 extern PetscErrorCode MatCreateComposite(MPI_Comm,PetscInt,const Mat*,Mat*); 346 typedef enum {MAT_COMPOSITE_ADDITIVE,MAT_COMPOSITE_MULTIPLICATIVE} MatCompositeType; 347 extern PetscErrorCode MatCompositeSetType(Mat,MatCompositeType); 348 349 extern PetscErrorCode MatCreateFFT(MPI_Comm,PetscInt,const PetscInt[],const MatType,Mat*); 350 extern PetscErrorCode MatCreateSeqCUFFT(MPI_Comm,PetscInt,const PetscInt[],Mat*); 351 352 extern PetscErrorCode MatCreateTranspose(Mat,Mat*); 353 extern PetscErrorCode MatCreateSubMatrix(Mat,IS,IS,Mat*); 354 extern PetscErrorCode MatSubMatrixUpdate(Mat,Mat,IS,IS); 355 extern PetscErrorCode MatCreateLocalRef(Mat,IS,IS,Mat*); 356 357 extern PetscErrorCode MatPythonSetType(Mat,const char[]); 358 359 extern PetscErrorCode MatSetUp(Mat); 360 extern PetscErrorCode MatDestroy(Mat*); 361 362 extern PetscErrorCode MatConjugate(Mat); 363 extern PetscErrorCode MatRealPart(Mat); 364 extern PetscErrorCode MatImaginaryPart(Mat); 365 extern PetscErrorCode MatGetDiagonalBlock(Mat,Mat*); 366 extern PetscErrorCode MatGetTrace(Mat,PetscScalar*); 367 368 /* ------------------------------------------------------------*/ 369 extern PetscErrorCode MatSetValues(Mat,PetscInt,const PetscInt[],PetscInt,const PetscInt[],const PetscScalar[],InsertMode); 370 extern PetscErrorCode MatSetValuesBlocked(Mat,PetscInt,const PetscInt[],PetscInt,const PetscInt[],const PetscScalar[],InsertMode); 371 extern PetscErrorCode MatSetValuesRow(Mat,PetscInt,const PetscScalar[]); 372 extern PetscErrorCode MatSetValuesRowLocal(Mat,PetscInt,const PetscScalar[]); 373 374 /*S 375 MatStencil - Data structure (C struct) for storing information about a single row or 376 column of a matrix as index on an associated grid. 377 378 Level: beginner 379 380 Concepts: matrix; linear operator 381 382 .seealso: MatSetValuesStencil(), MatSetStencil(), MatSetValuesBlockStencil() 383 S*/ 384 typedef struct { 385 PetscInt k,j,i,c; 386 } MatStencil; 387 388 extern PetscErrorCode MatSetValuesStencil(Mat,PetscInt,const MatStencil[],PetscInt,const MatStencil[],const PetscScalar[],InsertMode); 389 extern PetscErrorCode MatSetValuesBlockedStencil(Mat,PetscInt,const MatStencil[],PetscInt,const MatStencil[],const PetscScalar[],InsertMode); 390 extern PetscErrorCode MatSetStencil(Mat,PetscInt,const PetscInt[],const PetscInt[],PetscInt); 391 392 extern PetscErrorCode MatSetColoring(Mat,ISColoring); 393 extern PetscErrorCode MatSetValuesAdic(Mat,void*); 394 extern PetscErrorCode MatSetValuesAdifor(Mat,PetscInt,void*); 395 396 /*E 397 MatAssemblyType - Indicates if the matrix is now to be used, or if you plan 398 to continue to add values to it 399 400 Level: beginner 401 402 .seealso: MatAssemblyBegin(), MatAssemblyEnd() 403 E*/ 404 typedef enum {MAT_FLUSH_ASSEMBLY=1,MAT_FINAL_ASSEMBLY=0} MatAssemblyType; 405 extern PetscErrorCode MatAssemblyBegin(Mat,MatAssemblyType); 406 extern PetscErrorCode MatAssemblyEnd(Mat,MatAssemblyType); 407 extern PetscErrorCode MatAssembled(Mat,PetscBool *); 408 409 410 411 /*E 412 MatOption - Options that may be set for a matrix and its behavior or storage 413 414 Level: beginner 415 416 Any additions/changes here MUST also be made in include/finclude/petscmat.h 417 418 .seealso: MatSetOption() 419 E*/ 420 typedef enum {MAT_ROW_ORIENTED,MAT_NEW_NONZERO_LOCATIONS, 421 MAT_SYMMETRIC, 422 MAT_STRUCTURALLY_SYMMETRIC, 423 MAT_NEW_DIAGONALS,MAT_IGNORE_OFF_PROC_ENTRIES, 424 MAT_NEW_NONZERO_LOCATION_ERR, 425 MAT_NEW_NONZERO_ALLOCATION_ERR,MAT_USE_HASH_TABLE, 426 MAT_KEEP_NONZERO_PATTERN,MAT_IGNORE_ZERO_ENTRIES, 427 MAT_USE_INODES, 428 MAT_HERMITIAN, 429 MAT_SYMMETRY_ETERNAL, 430 MAT_CHECK_COMPRESSED_ROW, 431 MAT_IGNORE_LOWER_TRIANGULAR,MAT_ERROR_LOWER_TRIANGULAR, 432 MAT_GETROW_UPPERTRIANGULAR,MAT_UNUSED_NONZERO_LOCATION_ERR, 433 MAT_SPD,MAT_NO_OFF_PROC_ENTRIES,MAT_NO_OFF_PROC_ZERO_ROWS, 434 NUM_MAT_OPTIONS} MatOption; 435 extern const char *MatOptions[]; 436 extern PetscErrorCode MatSetOption(Mat,MatOption,PetscBool ); 437 extern PetscErrorCode MatGetType(Mat,const MatType*); 438 PetscPolymorphicFunction(MatGetType,(Mat mat),(mat,&t),const MatType,t) 439 440 extern PetscErrorCode MatGetValues(Mat,PetscInt,const PetscInt[],PetscInt,const PetscInt[],PetscScalar[]); 441 extern PetscErrorCode MatGetRow(Mat,PetscInt,PetscInt *,const PetscInt *[],const PetscScalar*[]); 442 extern PetscErrorCode MatRestoreRow(Mat,PetscInt,PetscInt *,const PetscInt *[],const PetscScalar*[]); 443 extern PetscErrorCode MatGetRowUpperTriangular(Mat); 444 extern PetscErrorCode MatRestoreRowUpperTriangular(Mat); 445 extern PetscErrorCode MatGetColumn(Mat,PetscInt,PetscInt *,const PetscInt *[],const PetscScalar*[]); 446 extern PetscErrorCode MatRestoreColumn(Mat,PetscInt,PetscInt *,const PetscInt *[],const PetscScalar*[]); 447 extern PetscErrorCode MatGetColumnVector(Mat,Vec,PetscInt); 448 extern PetscErrorCode MatGetArray(Mat,PetscScalar *[]); 449 PetscPolymorphicFunction(MatGetArray,(Mat mat),(mat,&a),PetscScalar*,a) 450 extern PetscErrorCode MatRestoreArray(Mat,PetscScalar *[]); 451 extern PetscErrorCode MatGetBlockSize(Mat,PetscInt *); 452 PetscPolymorphicFunction(MatGetBlockSize,(Mat mat),(mat,&a),PetscInt,a) 453 extern PetscErrorCode MatSetBlockSize(Mat,PetscInt); 454 455 456 extern PetscErrorCode MatMult(Mat,Vec,Vec); 457 extern PetscErrorCode MatMultDiagonalBlock(Mat,Vec,Vec); 458 extern PetscErrorCode MatMultAdd(Mat,Vec,Vec,Vec); 459 PetscPolymorphicSubroutine(MatMultAdd,(Mat A,Vec x,Vec y),(A,x,y,y)) 460 extern PetscErrorCode MatMultTranspose(Mat,Vec,Vec); 461 extern PetscErrorCode MatMultHermitianTranspose(Mat,Vec,Vec); 462 extern PetscErrorCode MatIsTranspose(Mat,Mat,PetscReal,PetscBool *); 463 PetscPolymorphicFunction(MatIsTranspose,(Mat A,Mat B,PetscReal tol),(A,B,tol,&t),PetscBool ,t) 464 PetscPolymorphicFunction(MatIsTranspose,(Mat A,Mat B),(A,B,0,&t),PetscBool ,t) 465 extern PetscErrorCode MatIsHermitianTranspose(Mat,Mat,PetscReal,PetscBool *); 466 extern PetscErrorCode MatMultTransposeAdd(Mat,Vec,Vec,Vec); 467 extern PetscErrorCode MatMultHermitianTransposeAdd(Mat,Vec,Vec,Vec); 468 PetscPolymorphicSubroutine(MatMultTransposeAdd,(Mat A,Vec x,Vec y),(A,x,y,y)) 469 extern PetscErrorCode MatMultConstrained(Mat,Vec,Vec); 470 extern PetscErrorCode MatMultTransposeConstrained(Mat,Vec,Vec); 471 extern PetscErrorCode MatMatSolve(Mat,Mat,Mat); 472 473 /*E 474 MatDuplicateOption - Indicates if a duplicated sparse matrix should have 475 its numerical values copied over or just its nonzero structure. 476 477 Level: beginner 478 479 Any additions/changes here MUST also be made in include/finclude/petscmat.h 480 481 $ MAT_SHARE_NONZERO_PATTERN - the i and j arrays in the new matrix will be shared with the original matrix 482 $ this also triggers the MAT_DO_NOT_COPY_VALUES option. This is used when you 483 $ have several matrices with the same nonzero pattern. 484 485 .seealso: MatDuplicate() 486 E*/ 487 typedef enum {MAT_DO_NOT_COPY_VALUES,MAT_COPY_VALUES,MAT_SHARE_NONZERO_PATTERN} MatDuplicateOption; 488 489 extern PetscErrorCode MatConvert(Mat,const MatType,MatReuse,Mat*); 490 PetscPolymorphicFunction(MatConvert,(Mat A,const MatType t),(A,t,MAT_INITIAL_MATRIX,&a),Mat,a) 491 extern PetscErrorCode MatDuplicate(Mat,MatDuplicateOption,Mat*); 492 PetscPolymorphicFunction(MatDuplicate,(Mat A,MatDuplicateOption o),(A,o,&a),Mat,a) 493 PetscPolymorphicFunction(MatDuplicate,(Mat A),(A,MAT_COPY_VALUES,&a),Mat,a) 494 495 496 extern PetscErrorCode MatCopy(Mat,Mat,MatStructure); 497 extern PetscErrorCode MatView(Mat,PetscViewer); 498 extern PetscErrorCode MatIsSymmetric(Mat,PetscReal,PetscBool *); 499 PetscPolymorphicFunction(MatIsSymmetric,(Mat A,PetscReal tol),(A,tol,&t),PetscBool ,t) 500 PetscPolymorphicFunction(MatIsSymmetric,(Mat A),(A,0,&t),PetscBool ,t) 501 extern PetscErrorCode MatIsStructurallySymmetric(Mat,PetscBool *); 502 PetscPolymorphicFunction(MatIsStructurallySymmetric,(Mat A),(A,&t),PetscBool ,t) 503 extern PetscErrorCode MatIsHermitian(Mat,PetscReal,PetscBool *); 504 PetscPolymorphicFunction(MatIsHermitian,(Mat A),(A,0,&t),PetscBool ,t) 505 extern PetscErrorCode MatIsSymmetricKnown(Mat,PetscBool *,PetscBool *); 506 extern PetscErrorCode MatIsHermitianKnown(Mat,PetscBool *,PetscBool *); 507 extern PetscErrorCode MatMissingDiagonal(Mat,PetscBool *,PetscInt *); 508 extern PetscErrorCode MatLoad(Mat, PetscViewer); 509 510 extern PetscErrorCode MatGetRowIJ(Mat,PetscInt,PetscBool ,PetscBool ,PetscInt*,PetscInt *[],PetscInt *[],PetscBool *); 511 extern PetscErrorCode MatRestoreRowIJ(Mat,PetscInt,PetscBool ,PetscBool ,PetscInt *,PetscInt *[],PetscInt *[],PetscBool *); 512 extern PetscErrorCode MatGetColumnIJ(Mat,PetscInt,PetscBool ,PetscBool ,PetscInt*,PetscInt *[],PetscInt *[],PetscBool *); 513 extern PetscErrorCode MatRestoreColumnIJ(Mat,PetscInt,PetscBool ,PetscBool ,PetscInt *,PetscInt *[],PetscInt *[],PetscBool *); 514 515 /*S 516 MatInfo - Context of matrix information, used with MatGetInfo() 517 518 In Fortran this is simply a double precision array of dimension MAT_INFO_SIZE 519 520 Level: intermediate 521 522 Concepts: matrix^nonzero information 523 524 .seealso: MatGetInfo(), MatInfoType 525 S*/ 526 typedef struct { 527 PetscLogDouble block_size; /* block size */ 528 PetscLogDouble nz_allocated,nz_used,nz_unneeded; /* number of nonzeros */ 529 PetscLogDouble memory; /* memory allocated */ 530 PetscLogDouble assemblies; /* number of matrix assemblies called */ 531 PetscLogDouble mallocs; /* number of mallocs during MatSetValues() */ 532 PetscLogDouble fill_ratio_given,fill_ratio_needed; /* fill ratio for LU/ILU */ 533 PetscLogDouble factor_mallocs; /* number of mallocs during factorization */ 534 } MatInfo; 535 536 /*E 537 MatInfoType - Indicates if you want information about the local part of the matrix, 538 the entire parallel matrix or the maximum over all the local parts. 539 540 Level: beginner 541 542 Any additions/changes here MUST also be made in include/finclude/petscmat.h 543 544 .seealso: MatGetInfo(), MatInfo 545 E*/ 546 typedef enum {MAT_LOCAL=1,MAT_GLOBAL_MAX=2,MAT_GLOBAL_SUM=3} MatInfoType; 547 extern PetscErrorCode MatGetInfo(Mat,MatInfoType,MatInfo*); 548 extern PetscErrorCode MatGetDiagonal(Mat,Vec); 549 extern PetscErrorCode MatGetRowMax(Mat,Vec,PetscInt[]); 550 extern PetscErrorCode MatGetRowMin(Mat,Vec,PetscInt[]); 551 extern PetscErrorCode MatGetRowMaxAbs(Mat,Vec,PetscInt[]); 552 extern PetscErrorCode MatGetRowMinAbs(Mat,Vec,PetscInt[]); 553 extern PetscErrorCode MatGetRowSum(Mat,Vec); 554 extern PetscErrorCode MatTranspose(Mat,MatReuse,Mat*); 555 PetscPolymorphicFunction(MatTranspose,(Mat A),(A,MAT_INITIAL_MATRIX,&t),Mat,t) 556 extern PetscErrorCode MatHermitianTranspose(Mat,MatReuse,Mat*); 557 extern PetscErrorCode MatPermute(Mat,IS,IS,Mat *); 558 PetscPolymorphicFunction(MatPermute,(Mat A,IS is1,IS is2),(A,is1,is2,&t),Mat,t) 559 extern PetscErrorCode MatDiagonalScale(Mat,Vec,Vec); 560 extern PetscErrorCode MatDiagonalSet(Mat,Vec,InsertMode); 561 extern PetscErrorCode MatEqual(Mat,Mat,PetscBool *); 562 PetscPolymorphicFunction(MatEqual,(Mat A,Mat B),(A,B,&t),PetscBool ,t) 563 extern PetscErrorCode MatMultEqual(Mat,Mat,PetscInt,PetscBool *); 564 extern PetscErrorCode MatMultAddEqual(Mat,Mat,PetscInt,PetscBool *); 565 extern PetscErrorCode MatMultTransposeEqual(Mat,Mat,PetscInt,PetscBool *); 566 extern PetscErrorCode MatMultTransposeAddEqual(Mat,Mat,PetscInt,PetscBool *); 567 568 extern PetscErrorCode MatNorm(Mat,NormType,PetscReal *); 569 PetscPolymorphicFunction(MatNorm,(Mat A,NormType t),(A,t,&n),PetscReal,n) 570 extern PetscErrorCode MatGetColumnNorms(Mat,NormType,PetscReal *); 571 extern PetscErrorCode MatZeroEntries(Mat); 572 extern PetscErrorCode MatZeroRows(Mat,PetscInt,const PetscInt [],PetscScalar,Vec,Vec); 573 extern PetscErrorCode MatZeroRowsIS(Mat,IS,PetscScalar,Vec,Vec); 574 extern PetscErrorCode MatZeroRowsStencil(Mat,PetscInt,const MatStencil [],PetscScalar,Vec,Vec); 575 extern PetscErrorCode MatZeroRowsColumns(Mat,PetscInt,const PetscInt [],PetscScalar,Vec,Vec); 576 extern PetscErrorCode MatZeroRowsColumnsIS(Mat,IS,PetscScalar,Vec,Vec); 577 578 extern PetscErrorCode MatUseScaledForm(Mat,PetscBool ); 579 extern PetscErrorCode MatScaleSystem(Mat,Vec,Vec); 580 extern PetscErrorCode MatUnScaleSystem(Mat,Vec,Vec); 581 582 extern PetscErrorCode MatGetSize(Mat,PetscInt*,PetscInt*); 583 extern PetscErrorCode MatGetLocalSize(Mat,PetscInt*,PetscInt*); 584 extern PetscErrorCode MatGetOwnershipRange(Mat,PetscInt*,PetscInt*); 585 extern PetscErrorCode MatGetOwnershipRanges(Mat,const PetscInt**); 586 extern PetscErrorCode MatGetOwnershipRangeColumn(Mat,PetscInt*,PetscInt*); 587 extern PetscErrorCode MatGetOwnershipRangesColumn(Mat,const PetscInt**); 588 589 extern PetscErrorCode MatGetSubMatrices(Mat,PetscInt,const IS[],const IS[],MatReuse,Mat *[]); 590 extern PetscErrorCode MatGetSubMatricesParallel(Mat,PetscInt,const IS[],const IS[],MatReuse,Mat *[]); 591 extern PetscErrorCode MatDestroyMatrices(PetscInt,Mat *[]); 592 extern PetscErrorCode MatGetSubMatrix(Mat,IS,IS,MatReuse,Mat *); 593 extern PetscErrorCode MatGetLocalSubMatrix(Mat,IS,IS,Mat*); 594 extern PetscErrorCode MatRestoreLocalSubMatrix(Mat,IS,IS,Mat*); 595 extern PetscErrorCode MatGetSeqNonzeroStructure(Mat,Mat*); 596 extern PetscErrorCode MatDestroySeqNonzeroStructure(Mat*); 597 598 extern PetscErrorCode MatMerge(MPI_Comm,Mat,PetscInt,MatReuse,Mat*); 599 extern PetscErrorCode MatMerge_SeqsToMPI(MPI_Comm,Mat,PetscInt,PetscInt,MatReuse,Mat*); 600 extern PetscErrorCode MatMerge_SeqsToMPISymbolic(MPI_Comm,Mat,PetscInt,PetscInt,Mat*); 601 extern PetscErrorCode MatMerge_SeqsToMPINumeric(Mat,Mat); 602 extern PetscErrorCode MatMPIAIJGetLocalMat(Mat,MatReuse,Mat*); 603 extern PetscErrorCode MatMPIAIJGetLocalMatCondensed(Mat,MatReuse,IS*,IS*,Mat*); 604 extern PetscErrorCode MatGetBrowsOfAcols(Mat,Mat,MatReuse,IS*,IS*,PetscInt*,Mat*); 605 extern PetscErrorCode MatGetBrowsOfAoCols(Mat,Mat,MatReuse,PetscInt**,PetscInt**,MatScalar**,Mat*); 606 #if defined (PETSC_USE_CTABLE) 607 #include "petscctable.h" 608 extern PetscErrorCode MatGetCommunicationStructs(Mat, Vec *, PetscTable *, VecScatter *); 609 #else 610 extern PetscErrorCode MatGetCommunicationStructs(Mat, Vec *, PetscInt *[], VecScatter *); 611 #endif 612 extern PetscErrorCode MatGetGhosts(Mat, PetscInt *,const PetscInt *[]); 613 614 extern PetscErrorCode MatIncreaseOverlap(Mat,PetscInt,IS[],PetscInt); 615 616 extern PetscErrorCode MatMatMult(Mat,Mat,MatReuse,PetscReal,Mat*); 617 extern PetscErrorCode MatMatMultSymbolic(Mat,Mat,PetscReal,Mat*); 618 extern PetscErrorCode MatMatMultNumeric(Mat,Mat,Mat); 619 620 extern PetscErrorCode MatPtAP(Mat,Mat,MatReuse,PetscReal,Mat*); 621 extern PetscErrorCode MatPtAPSymbolic(Mat,Mat,PetscReal,Mat*); 622 extern PetscErrorCode MatPtAPNumeric(Mat,Mat,Mat); 623 624 extern PetscErrorCode MatMatMultTranspose(Mat,Mat,MatReuse,PetscReal,Mat*); 625 extern PetscErrorCode MatMatMultTransposeSymbolic(Mat,Mat,PetscReal,Mat*); 626 extern PetscErrorCode MatMatMultTransposeNumeric(Mat,Mat,Mat); 627 628 extern PetscErrorCode MatAXPY(Mat,PetscScalar,Mat,MatStructure); 629 extern PetscErrorCode MatAYPX(Mat,PetscScalar,Mat,MatStructure); 630 631 extern PetscErrorCode MatScale(Mat,PetscScalar); 632 extern PetscErrorCode MatShift(Mat,PetscScalar); 633 634 extern PetscErrorCode MatSetLocalToGlobalMapping(Mat,ISLocalToGlobalMapping,ISLocalToGlobalMapping); 635 extern PetscErrorCode MatSetLocalToGlobalMappingBlock(Mat,ISLocalToGlobalMapping,ISLocalToGlobalMapping); 636 extern PetscErrorCode MatGetLocalToGlobalMapping(Mat,ISLocalToGlobalMapping*,ISLocalToGlobalMapping*); 637 extern PetscErrorCode MatGetLocalToGlobalMappingBlock(Mat,ISLocalToGlobalMapping*,ISLocalToGlobalMapping*); 638 extern PetscErrorCode MatZeroRowsLocal(Mat,PetscInt,const PetscInt [],PetscScalar,Vec,Vec); 639 extern PetscErrorCode MatZeroRowsLocalIS(Mat,IS,PetscScalar,Vec,Vec); 640 extern PetscErrorCode MatZeroRowsColumnsLocal(Mat,PetscInt,const PetscInt [],PetscScalar,Vec,Vec); 641 extern PetscErrorCode MatZeroRowsColumnsLocalIS(Mat,IS,PetscScalar,Vec,Vec); 642 extern PetscErrorCode MatSetValuesLocal(Mat,PetscInt,const PetscInt[],PetscInt,const PetscInt[],const PetscScalar[],InsertMode); 643 extern PetscErrorCode MatSetValuesBlockedLocal(Mat,PetscInt,const PetscInt[],PetscInt,const PetscInt[],const PetscScalar[],InsertMode); 644 645 extern PetscErrorCode MatStashSetInitialSize(Mat,PetscInt,PetscInt); 646 extern PetscErrorCode MatStashGetInfo(Mat,PetscInt*,PetscInt*,PetscInt*,PetscInt*); 647 648 extern PetscErrorCode MatInterpolate(Mat,Vec,Vec); 649 extern PetscErrorCode MatInterpolateAdd(Mat,Vec,Vec,Vec); 650 PetscPolymorphicSubroutine(MatInterpolateAdd,(Mat A,Vec x,Vec y),(A,x,y,y)) 651 extern PetscErrorCode MatRestrict(Mat,Vec,Vec); 652 extern PetscErrorCode MatGetVecs(Mat,Vec*,Vec*); 653 extern PetscErrorCode MatGetRedundantMatrix(Mat,PetscInt,MPI_Comm,PetscInt,MatReuse,Mat*); 654 extern PetscErrorCode MatGetMultiProcBlock(Mat,MPI_Comm,Mat*); 655 extern PetscErrorCode MatFindZeroDiagonals(Mat,IS*); 656 657 /*MC 658 MatSetValue - Set a single entry into a matrix. 659 660 Not collective 661 662 Input Parameters: 663 + m - the matrix 664 . row - the row location of the entry 665 . col - the column location of the entry 666 . value - the value to insert 667 - mode - either INSERT_VALUES or ADD_VALUES 668 669 Notes: 670 For efficiency one should use MatSetValues() and set several or many 671 values simultaneously if possible. 672 673 Level: beginner 674 675 .seealso: MatSetValues(), MatSetValueLocal() 676 M*/ 677 PETSC_STATIC_INLINE PetscErrorCode MatSetValue(Mat v,PetscInt i,PetscInt j,PetscScalar va,InsertMode mode) {return MatSetValues(v,1,&i,1,&j,&va,mode);} 678 679 PETSC_STATIC_INLINE PetscErrorCode MatGetValue(Mat v,PetscInt i,PetscInt j,PetscScalar *va) {return MatGetValues(v,1,&i,1,&j,va);} 680 681 PETSC_STATIC_INLINE PetscErrorCode MatSetValueLocal(Mat v,PetscInt i,PetscInt j,PetscScalar va,InsertMode mode) {return MatSetValuesLocal(v,1,&i,1,&j,&va,mode);} 682 683 /*MC 684 MatPreallocateInitialize - Begins the block of code that will count the number of nonzeros per 685 row in a matrix providing the data that one can use to correctly preallocate the matrix. 686 687 Synopsis: 688 PetscErrorCode MatPreallocateInitialize(MPI_Comm comm, PetscInt nrows, PetscInt ncols, PetscInt *dnz, PetscInt *onz) 689 690 Collective on MPI_Comm 691 692 Input Parameters: 693 + comm - the communicator that will share the eventually allocated matrix 694 . nrows - the number of LOCAL rows in the matrix 695 - ncols - the number of LOCAL columns in the matrix 696 697 Output Parameters: 698 + dnz - the array that will be passed to the matrix preallocation routines 699 - ozn - the other array passed to the matrix preallocation routines 700 701 702 Level: intermediate 703 704 Notes: 705 See the <A href="../../docs/manual.pdf#nameddest=ch_performance">Hints for Performance Improvment</A> chapter in the users manual for more details. 706 707 Do not malloc or free dnz and onz, that is handled internally by these routines 708 709 Use MatPreallocateInitializeSymmetric() for symmetric matrices (MPISBAIJ matrices) 710 711 This is a MACRO not a function because it has a leading { that is closed by PetscPreallocateFinalize(). 712 713 Concepts: preallocation^Matrix 714 715 .seealso: MatPreallocateFinalize(), MatPreallocateSet(), MatPreallocateSymmetricSet(), MatPreallocateSetLocal(), 716 MatPreallocateInitializeSymmetric(), MatPreallocateSymmetricSetLocal() 717 M*/ 718 #define MatPreallocateInitialize(comm,nrows,ncols,dnz,onz) 0; \ 719 { \ 720 PetscErrorCode _4_ierr; PetscInt __nrows = (nrows),__ctmp = (ncols),__rstart,__start,__end; \ 721 _4_ierr = PetscMalloc2(__nrows,PetscInt,&dnz,__nrows,PetscInt,&onz);CHKERRQ(_4_ierr); \ 722 _4_ierr = PetscMemzero(dnz,__nrows*sizeof(PetscInt));CHKERRQ(_4_ierr);\ 723 _4_ierr = PetscMemzero(onz,__nrows*sizeof(PetscInt));CHKERRQ(_4_ierr);\ 724 _4_ierr = MPI_Scan(&__ctmp,&__end,1,MPIU_INT,MPI_SUM,comm);CHKERRQ(_4_ierr); __start = __end - __ctmp;\ 725 _4_ierr = MPI_Scan(&__nrows,&__rstart,1,MPIU_INT,MPI_SUM,comm);CHKERRQ(_4_ierr); __rstart = __rstart - __nrows; 726 727 /*MC 728 MatPreallocateSymmetricInitialize - Begins the block of code that will count the number of nonzeros per 729 row in a matrix providing the data that one can use to correctly preallocate the matrix. 730 731 Synopsis: 732 PetscErrorCode MatPreallocateSymmetricInitialize(MPI_Comm comm, PetscInt nrows, PetscInt ncols, PetscInt *dnz, PetscInt *onz) 733 734 Collective on MPI_Comm 735 736 Input Parameters: 737 + comm - the communicator that will share the eventually allocated matrix 738 . nrows - the number of LOCAL rows in the matrix 739 - ncols - the number of LOCAL columns in the matrix 740 741 Output Parameters: 742 + dnz - the array that will be passed to the matrix preallocation routines 743 - ozn - the other array passed to the matrix preallocation routines 744 745 746 Level: intermediate 747 748 Notes: 749 See the <A href="../../docs/manual.pdf#nameddest=ch_performance">Hints for Performance Improvment</A> chapter in the users manual for more details. 750 751 Do not malloc or free dnz and onz, that is handled internally by these routines 752 753 This is a MACRO not a function because it has a leading { that is closed by PetscPreallocateFinalize(). 754 755 Concepts: preallocation^Matrix 756 757 .seealso: MatPreallocateFinalize(), MatPreallocateSet(), MatPreallocateSymmetricSet(), MatPreallocateSetLocal(), 758 MatPreallocateInitialize(), MatPreallocateSymmetricSetLocal() 759 M*/ 760 #define MatPreallocateSymmetricInitialize(comm,nrows,ncols,dnz,onz) 0; \ 761 { \ 762 PetscErrorCode _4_ierr; PetscInt __nrows = (nrows),__ctmp = (ncols),__rstart,__end; \ 763 _4_ierr = PetscMalloc2(__nrows,PetscInt,&dnz,__nrows,PetscInt,&onz);CHKERRQ(_4_ierr); \ 764 _4_ierr = PetscMemzero(dnz,__nrows*sizeof(PetscInt));CHKERRQ(_4_ierr);\ 765 _4_ierr = PetscMemzero(onz,__nrows*sizeof(PetscInt));CHKERRQ(_4_ierr);\ 766 _4_ierr = MPI_Scan(&__ctmp,&__end,1,MPIU_INT,MPI_SUM,comm);CHKERRQ(_4_ierr);\ 767 _4_ierr = MPI_Scan(&__nrows,&__rstart,1,MPIU_INT,MPI_SUM,comm);CHKERRQ(_4_ierr); __rstart = __rstart - __nrows; 768 769 /*MC 770 MatPreallocateSetLocal - Indicates the locations (rows and columns) in the matrix where nonzeros will be 771 inserted using a local number of the rows and columns 772 773 Synopsis: 774 PetscErrorCode MatPreallocateSetLocal(ISLocalToGlobalMappping map,PetscInt nrows, PetscInt *rows,PetscInt ncols, PetscInt *cols,PetscInt *dnz, PetscInt *onz) 775 776 Not Collective 777 778 Input Parameters: 779 + map - the row mapping from local numbering to global numbering 780 . nrows - the number of rows indicated 781 . rows - the indices of the rows 782 . cmap - the column mapping from local to global numbering 783 . ncols - the number of columns in the matrix 784 . cols - the columns indicated 785 . dnz - the array that will be passed to the matrix preallocation routines 786 - ozn - the other array passed to the matrix preallocation routines 787 788 789 Level: intermediate 790 791 Notes: 792 See the <A href="../../docs/manual.pdf#nameddest=ch_performance">Hints for Performance Improvment</A> chapter in the users manual for more details. 793 794 Do not malloc or free dnz and onz, that is handled internally by these routines 795 796 Concepts: preallocation^Matrix 797 798 .seealso: MatPreallocateFinalize(), MatPreallocateSet(), MatPreallocateSymmetricSet(), MatPreallocateInitialize(), 799 MatPreallocateInitialize(), MatPreallocateSymmetricSetLocal() 800 M*/ 801 #define MatPreallocateSetLocal(rmap,nrows,rows,cmap,ncols,cols,dnz,onz) 0; \ 802 {\ 803 PetscInt __l;\ 804 _4_ierr = ISLocalToGlobalMappingApply(rmap,nrows,rows,rows);CHKERRQ(_4_ierr);\ 805 _4_ierr = ISLocalToGlobalMappingApply(cmap,ncols,cols,cols);CHKERRQ(_4_ierr);\ 806 for (__l=0;__l<nrows;__l++) {\ 807 _4_ierr = MatPreallocateSet((rows)[__l],ncols,cols,dnz,onz);CHKERRQ(_4_ierr);\ 808 }\ 809 } 810 811 /*MC 812 MatPreallocateSymmetricSetLocal - Indicates the locations (rows and columns) in the matrix where nonzeros will be 813 inserted using a local number of the rows and columns 814 815 Synopsis: 816 PetscErrorCode MatPreallocateSymmetricSetLocal(ISLocalToGlobalMappping map,PetscInt nrows, PetscInt *rows,PetscInt ncols, PetscInt *cols,PetscInt *dnz, PetscInt *onz) 817 818 Not Collective 819 820 Input Parameters: 821 + map - the mapping between local numbering and global numbering 822 . nrows - the number of rows indicated 823 . rows - the indices of the rows 824 . ncols - the number of columns in the matrix 825 . cols - the columns indicated 826 . dnz - the array that will be passed to the matrix preallocation routines 827 - ozn - the other array passed to the matrix preallocation routines 828 829 830 Level: intermediate 831 832 Notes: 833 See the <A href="../../docs/manual.pdf#nameddest=ch_performance">Hints for Performance Improvment</A> chapter in the users manual for more details. 834 835 Do not malloc or free dnz and onz that is handled internally by these routines 836 837 Concepts: preallocation^Matrix 838 839 .seealso: MatPreallocateFinalize(), MatPreallocateSet(), MatPreallocateSymmetricSet(), MatPreallocateInitialize(), 840 MatPreallocateInitialize(), MatPreallocateSymmetricSetLocal(), MatPreallocateSetLocal() 841 M*/ 842 #define MatPreallocateSymmetricSetLocal(map,nrows,rows,ncols,cols,dnz,onz) 0;\ 843 {\ 844 PetscInt __l;\ 845 _4_ierr = ISLocalToGlobalMappingApply(map,nrows,rows,rows);CHKERRQ(_4_ierr);\ 846 _4_ierr = ISLocalToGlobalMappingApply(map,ncols,cols,cols);CHKERRQ(_4_ierr);\ 847 for (__l=0;__l<nrows;__l++) {\ 848 _4_ierr = MatPreallocateSymmetricSet((rows)[__l],ncols,cols,dnz,onz);CHKERRQ(_4_ierr);\ 849 }\ 850 } 851 852 /*MC 853 MatPreallocateSet - Indicates the locations (rows and columns) in the matrix where nonzeros will be 854 inserted using a local number of the rows and columns 855 856 Synopsis: 857 PetscErrorCode MatPreallocateSet(PetscInt nrows, PetscInt *rows,PetscInt ncols, PetscInt *cols,PetscInt *dnz, PetscInt *onz) 858 859 Not Collective 860 861 Input Parameters: 862 + row - the row 863 . ncols - the number of columns in the matrix 864 - cols - the columns indicated 865 866 Output Parameters: 867 + dnz - the array that will be passed to the matrix preallocation routines 868 - ozn - the other array passed to the matrix preallocation routines 869 870 871 Level: intermediate 872 873 Notes: 874 See the <A href="../../docs/manual.pdf#nameddest=ch_performance">Hints for Performance Improvment</A> chapter in the users manual for more details. 875 876 Do not malloc or free dnz and onz that is handled internally by these routines 877 878 This is a MACRO not a function because it uses variables declared in MatPreallocateInitialize(). 879 880 Concepts: preallocation^Matrix 881 882 .seealso: MatPreallocateFinalize(), MatPreallocateSet(), MatPreallocateSymmetricSet(), MatPreallocateInitialize(), 883 MatPreallocateInitialize(), MatPreallocateSymmetricSetLocal(), MatPreallocateSetLocal() 884 M*/ 885 #define MatPreallocateSet(row,nc,cols,dnz,onz) 0;\ 886 { PetscInt __i; \ 887 if (row < __rstart) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Trying to set preallocation for row %D less than first local row %D",row,__rstart);\ 888 if (row >= __rstart+__nrows) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Trying to set preallocation for row %D greater than last local row %D",row,__rstart+__nrows-1);\ 889 for (__i=0; __i<nc; __i++) {\ 890 if ((cols)[__i] < __start || (cols)[__i] >= __end) onz[row - __rstart]++; \ 891 else dnz[row - __rstart]++;\ 892 }\ 893 } 894 895 /*MC 896 MatPreallocateSymmetricSet - Indicates the locations (rows and columns) in the matrix where nonzeros will be 897 inserted using a local number of the rows and columns 898 899 Synopsis: 900 PetscErrorCode MatPreallocateSymmetricSet(PetscInt nrows, PetscInt *rows,PetscInt ncols, PetscInt *cols,PetscInt *dnz, PetscInt *onz) 901 902 Not Collective 903 904 Input Parameters: 905 + nrows - the number of rows indicated 906 . rows - the indices of the rows 907 . ncols - the number of columns in the matrix 908 . cols - the columns indicated 909 . dnz - the array that will be passed to the matrix preallocation routines 910 - ozn - the other array passed to the matrix preallocation routines 911 912 913 Level: intermediate 914 915 Notes: 916 See the <A href="../../docs/manual.pdf#nameddest=ch_performance">Hints for Performance Improvment</A> chapter in the users manual for more details. 917 918 Do not malloc or free dnz and onz that is handled internally by these routines 919 920 This is a MACRO not a function because it uses variables declared in MatPreallocateInitialize(). 921 922 Concepts: preallocation^Matrix 923 924 .seealso: MatPreallocateFinalize(), MatPreallocateSet(), MatPreallocateSymmetricSet(), MatPreallocateInitialize(), 925 MatPreallocateInitialize(), MatPreallocateSymmetricSetLocal(), MatPreallocateSetLocal() 926 M*/ 927 #define MatPreallocateSymmetricSet(row,nc,cols,dnz,onz) 0;\ 928 { PetscInt __i; \ 929 for (__i=0; __i<nc; __i++) {\ 930 if (cols[__i] >= __end) onz[row - __rstart]++; \ 931 else if (cols[__i] >= row) dnz[row - __rstart]++;\ 932 }\ 933 } 934 935 /*MC 936 MatPreallocateLocation - An alternative to MatPreallocationSet() that puts the nonzero locations into the matrix if it exists 937 938 Synopsis: 939 PetscErrorCode MatPreallocateLocations(Mat A,PetscInt row,PetscInt ncols,PetscInt *cols,PetscInt *dnz,PetscInt *onz) 940 941 Not Collective 942 943 Input Parameters: 944 . A - matrix 945 . row - row where values exist (must be local to this process) 946 . ncols - number of columns 947 . cols - columns with nonzeros 948 . dnz - the array that will be passed to the matrix preallocation routines 949 - ozn - the other array passed to the matrix preallocation routines 950 951 952 Level: intermediate 953 954 Notes: 955 See the <A href="../../docs/manual.pdf#nameddest=ch_performance">Hints for Performance Improvment</A> chapter in the users manual for more details. 956 957 Do not malloc or free dnz and onz that is handled internally by these routines 958 959 This is a MACRO not a function because it uses a bunch of variables private to the MatPreallocation.... routines. 960 961 Concepts: preallocation^Matrix 962 963 .seealso: MatPreallocateInitialize(), MatPreallocateSet(), MatPreallocateSymmetricSet(), MatPreallocateSetLocal(), 964 MatPreallocateSymmetricInitialize(), MatPreallocateSymmetricSetLocal() 965 M*/ 966 #define MatPreallocateLocation(A,row,ncols,cols,dnz,onz) 0;if (A) {ierr = MatSetValues(A,1,&row,ncols,cols,PETSC_NULL,INSERT_VALUES);CHKERRQ(ierr);} else {ierr = MatPreallocateSet(row,ncols,cols,dnz,onz);CHKERRQ(ierr);} 967 968 969 /*MC 970 MatPreallocateFinalize - Ends the block of code that will count the number of nonzeros per 971 row in a matrix providing the data that one can use to correctly preallocate the matrix. 972 973 Synopsis: 974 PetscErrorCode MatPreallocateFinalize(PetscInt *dnz, PetscInt *onz) 975 976 Collective on MPI_Comm 977 978 Input Parameters: 979 + dnz - the array that was be passed to the matrix preallocation routines 980 - ozn - the other array passed to the matrix preallocation routines 981 982 983 Level: intermediate 984 985 Notes: 986 See the <A href="../../docs/manual.pdf#nameddest=ch_performance">Hints for Performance Improvment</A> chapter in the users manual for more details. 987 988 Do not malloc or free dnz and onz that is handled internally by these routines 989 990 This is a MACRO not a function because it closes the { started in MatPreallocateInitialize(). 991 992 Concepts: preallocation^Matrix 993 994 .seealso: MatPreallocateInitialize(), MatPreallocateSet(), MatPreallocateSymmetricSet(), MatPreallocateSetLocal(), 995 MatPreallocateSymmetricInitialize(), MatPreallocateSymmetricSetLocal() 996 M*/ 997 #define MatPreallocateFinalize(dnz,onz) 0;_4_ierr = PetscFree2(dnz,onz);CHKERRQ(_4_ierr);} 998 999 1000 1001 /* Routines unique to particular data structures */ 1002 extern PetscErrorCode MatShellGetContext(Mat,void **); 1003 PetscPolymorphicFunction(MatShellGetContext,(Mat A),(A,&t),void*,t) 1004 1005 extern PetscErrorCode MatInodeAdjustForInodes(Mat,IS*,IS*); 1006 extern PetscErrorCode MatInodeGetInodeSizes(Mat,PetscInt *,PetscInt *[],PetscInt *); 1007 1008 extern PetscErrorCode MatSeqAIJSetColumnIndices(Mat,PetscInt[]); 1009 extern PetscErrorCode MatSeqBAIJSetColumnIndices(Mat,PetscInt[]); 1010 extern PetscErrorCode MatCreateSeqAIJWithArrays(MPI_Comm,PetscInt,PetscInt,PetscInt[],PetscInt[],PetscScalar[],Mat*); 1011 extern PetscErrorCode MatCreateSeqBAIJWithArrays(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt[],PetscInt[],PetscScalar[],Mat*); 1012 extern PetscErrorCode MatCreateSeqSBAIJWithArrays(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt[],PetscInt[],PetscScalar[],Mat*); 1013 1014 #define MAT_SKIP_ALLOCATION -4 1015 1016 extern PetscErrorCode MatSeqBAIJSetPreallocation(Mat,PetscInt,PetscInt,const PetscInt[]); 1017 PetscPolymorphicSubroutine(MatSeqBAIJSetPreallocation,(Mat A,PetscInt bs,const PetscInt nnz[]),(A,bs,0,nnz)) 1018 extern PetscErrorCode MatSeqSBAIJSetPreallocation(Mat,PetscInt,PetscInt,const PetscInt[]); 1019 PetscPolymorphicSubroutine(MatSeqSBAIJSetPreallocation,(Mat A,PetscInt bs,const PetscInt nnz[]),(A,bs,0,nnz)) 1020 extern PetscErrorCode MatSeqAIJSetPreallocation(Mat,PetscInt,const PetscInt[]); 1021 PetscPolymorphicSubroutine(MatSeqAIJSetPreallocation,(Mat A,const PetscInt nnz[]),(A,0,nnz)) 1022 1023 extern PetscErrorCode MatMPIBAIJSetPreallocation(Mat,PetscInt,PetscInt,const PetscInt[],PetscInt,const PetscInt[]); 1024 PetscPolymorphicSubroutine(MatMPIBAIJSetPreallocation,(Mat A,PetscInt bs,const PetscInt nnz[],const PetscInt onz[]),(A,bs,0,nnz,0,onz)) 1025 extern PetscErrorCode MatMPISBAIJSetPreallocation(Mat,PetscInt,PetscInt,const PetscInt[],PetscInt,const PetscInt[]); 1026 extern PetscErrorCode MatMPIAIJSetPreallocation(Mat,PetscInt,const PetscInt[],PetscInt,const PetscInt[]); 1027 extern PetscErrorCode MatSeqAIJSetPreallocationCSR(Mat,const PetscInt [],const PetscInt [],const PetscScalar []); 1028 extern PetscErrorCode MatSeqBAIJSetPreallocationCSR(Mat,PetscInt,const PetscInt[],const PetscInt[],const PetscScalar[]); 1029 extern PetscErrorCode MatMPIAIJSetPreallocationCSR(Mat,const PetscInt[],const PetscInt[],const PetscScalar[]); 1030 extern PetscErrorCode MatMPIBAIJSetPreallocationCSR(Mat,PetscInt,const PetscInt[],const PetscInt[],const PetscScalar[]); 1031 extern PetscErrorCode MatMPIAdjSetPreallocation(Mat,PetscInt[],PetscInt[],PetscInt[]); 1032 extern PetscErrorCode MatMPIDenseSetPreallocation(Mat,PetscScalar[]); 1033 extern PetscErrorCode MatSeqDenseSetPreallocation(Mat,PetscScalar[]); 1034 extern PetscErrorCode MatMPIAIJGetSeqAIJ(Mat,Mat*,Mat*,PetscInt*[]); 1035 extern PetscErrorCode MatMPIBAIJGetSeqBAIJ(Mat,Mat*,Mat*,PetscInt*[]); 1036 extern PetscErrorCode MatAdicSetLocalFunction(Mat,void (*)(void)); 1037 1038 extern PetscErrorCode MatSeqDenseSetLDA(Mat,PetscInt); 1039 extern PetscErrorCode MatDenseGetLocalMatrix(Mat,Mat*); 1040 1041 extern PetscErrorCode MatStoreValues(Mat); 1042 extern PetscErrorCode MatRetrieveValues(Mat); 1043 1044 extern PetscErrorCode MatDAADSetCtx(Mat,void*); 1045 1046 extern PetscErrorCode MatFindNonzeroRows(Mat,IS*); 1047 /* 1048 These routines are not usually accessed directly, rather solving is 1049 done through the KSP and PC interfaces. 1050 */ 1051 1052 /*E 1053 MatOrderingType - String with the name of a PETSc matrix ordering or the creation function 1054 with an optional dynamic library name, for example 1055 http://www.mcs.anl.gov/petsc/lib.a:orderingcreate() 1056 1057 Level: beginner 1058 1059 Cannot use const because the PC objects manipulate the string 1060 1061 .seealso: MatGetOrdering() 1062 E*/ 1063 #define MatOrderingType char* 1064 #define MATORDERINGNATURAL "natural" 1065 #define MATORDERINGND "nd" 1066 #define MATORDERING1WD "1wd" 1067 #define MATORDERINGRCM "rcm" 1068 #define MATORDERINGQMD "qmd" 1069 #define MATORDERINGROWLENGTH "rowlength" 1070 #define MATORDERINGDSC_ND "dsc_nd" /* these three are only for DSCPACK, see its documentation for details */ 1071 #define MATORDERINGDSC_MMD "dsc_mmd" 1072 #define MATORDERINGDSC_MDF "dsc_mdf" 1073 #define MATORDERINGAMD "amd" /* only works if UMFPACK is installed with PETSc */ 1074 1075 extern PetscErrorCode MatGetOrdering(Mat,const MatOrderingType,IS*,IS*); 1076 extern PetscErrorCode MatGetOrderingList(PetscFList *list); 1077 extern PetscErrorCode MatOrderingRegister(const char[],const char[],const char[],PetscErrorCode(*)(Mat,const MatOrderingType,IS*,IS*)); 1078 1079 /*MC 1080 MatOrderingRegisterDynamic - Adds a new sparse matrix ordering to the matrix package. 1081 1082 Synopsis: 1083 PetscErrorCode MatOrderingRegisterDynamic(const char *name_ordering,const char *path,const char *name_create,PetscErrorCode (*routine_create)(MatOrdering)) 1084 1085 Not Collective 1086 1087 Input Parameters: 1088 + sname - name of ordering (for example MATORDERINGND) 1089 . path - location of library where creation routine is 1090 . name - name of function that creates the ordering type,a string 1091 - function - function pointer that creates the ordering 1092 1093 Level: developer 1094 1095 If dynamic libraries are used, then the fourth input argument (function) 1096 is ignored. 1097 1098 Sample usage: 1099 .vb 1100 MatOrderingRegisterDynamic("my_order",/home/username/my_lib/lib/libO/solaris/mylib.a, 1101 "MyOrder",MyOrder); 1102 .ve 1103 1104 Then, your partitioner can be chosen with the procedural interface via 1105 $ MatOrderingSetType(part,"my_order) 1106 or at runtime via the option 1107 $ -pc_factor_mat_ordering_type my_order 1108 1109 ${PETSC_ARCH} occuring in pathname will be replaced with appropriate values. 1110 1111 .keywords: matrix, ordering, register 1112 1113 .seealso: MatOrderingRegisterDestroy(), MatOrderingRegisterAll() 1114 M*/ 1115 #if defined(PETSC_USE_DYNAMIC_LIBRARIES) 1116 #define MatOrderingRegisterDynamic(a,b,c,d) MatOrderingRegister(a,b,c,0) 1117 #else 1118 #define MatOrderingRegisterDynamic(a,b,c,d) MatOrderingRegister(a,b,c,d) 1119 #endif 1120 1121 extern PetscErrorCode MatOrderingRegisterDestroy(void); 1122 extern PetscErrorCode MatOrderingRegisterAll(const char[]); 1123 extern PetscBool MatOrderingRegisterAllCalled; 1124 extern PetscFList MatOrderingList; 1125 1126 extern PetscErrorCode MatReorderForNonzeroDiagonal(Mat,PetscReal,IS,IS); 1127 1128 /*S 1129 MatFactorShiftType - Numeric Shift. 1130 1131 Level: beginner 1132 1133 S*/ 1134 typedef enum {MAT_SHIFT_NONE,MAT_SHIFT_NONZERO,MAT_SHIFT_POSITIVE_DEFINITE,MAT_SHIFT_INBLOCKS} MatFactorShiftType; 1135 extern const char *MatFactorShiftTypes[]; 1136 1137 /*S 1138 MatFactorInfo - Data passed into the matrix factorization routines 1139 1140 In Fortran these are simply double precision arrays of size MAT_FACTORINFO_SIZE, that is use 1141 $ MatFactorInfo info(MAT_FACTORINFO_SIZE) 1142 1143 Notes: These are not usually directly used by users, instead use PC type of LU, ILU, CHOLESKY or ICC. 1144 1145 You can use MatFactorInfoInitialize() to set default values. 1146 1147 Level: developer 1148 1149 .seealso: MatLUFactorSymbolic(), MatILUFactorSymbolic(), MatCholeskyFactorSymbolic(), MatICCFactorSymbolic(), MatICCFactor(), 1150 MatFactorInfoInitialize() 1151 1152 S*/ 1153 typedef struct { 1154 PetscReal diagonal_fill; /* force diagonal to fill in if initially not filled */ 1155 PetscReal usedt; 1156 PetscReal dt; /* drop tolerance */ 1157 PetscReal dtcol; /* tolerance for pivoting */ 1158 PetscReal dtcount; /* maximum nonzeros to be allowed per row */ 1159 PetscReal fill; /* expected fill, nonzeros in factored matrix/nonzeros in original matrix */ 1160 PetscReal levels; /* ICC/ILU(levels) */ 1161 PetscReal pivotinblocks; /* for BAIJ and SBAIJ matrices pivot in factorization on blocks, default 1.0 1162 factorization may be faster if do not pivot */ 1163 PetscReal zeropivot; /* pivot is called zero if less than this */ 1164 PetscReal shifttype; /* type of shift added to matrix factor to prevent zero pivots */ 1165 PetscReal shiftamount; /* how large the shift is */ 1166 } MatFactorInfo; 1167 1168 extern PetscErrorCode MatFactorInfoInitialize(MatFactorInfo*); 1169 extern PetscErrorCode MatCholeskyFactor(Mat,IS,const MatFactorInfo*); 1170 extern PetscErrorCode MatCholeskyFactorSymbolic(Mat,Mat,IS,const MatFactorInfo*); 1171 extern PetscErrorCode MatCholeskyFactorNumeric(Mat,Mat,const MatFactorInfo*); 1172 extern PetscErrorCode MatLUFactor(Mat,IS,IS,const MatFactorInfo*); 1173 extern PetscErrorCode MatILUFactor(Mat,IS,IS,const MatFactorInfo*); 1174 extern PetscErrorCode MatLUFactorSymbolic(Mat,Mat,IS,IS,const MatFactorInfo*); 1175 extern PetscErrorCode MatILUFactorSymbolic(Mat,Mat,IS,IS,const MatFactorInfo*); 1176 extern PetscErrorCode MatICCFactorSymbolic(Mat,Mat,IS,const MatFactorInfo*); 1177 extern PetscErrorCode MatICCFactor(Mat,IS,const MatFactorInfo*); 1178 extern PetscErrorCode MatLUFactorNumeric(Mat,Mat,const MatFactorInfo*); 1179 extern PetscErrorCode MatGetInertia(Mat,PetscInt*,PetscInt*,PetscInt*); 1180 extern PetscErrorCode MatSolve(Mat,Vec,Vec); 1181 extern PetscErrorCode MatForwardSolve(Mat,Vec,Vec); 1182 extern PetscErrorCode MatBackwardSolve(Mat,Vec,Vec); 1183 extern PetscErrorCode MatSolveAdd(Mat,Vec,Vec,Vec); 1184 extern PetscErrorCode MatSolveTranspose(Mat,Vec,Vec); 1185 extern PetscErrorCode MatSolveTransposeAdd(Mat,Vec,Vec,Vec); 1186 extern PetscErrorCode MatSolves(Mat,Vecs,Vecs); 1187 1188 extern PetscErrorCode MatSetUnfactored(Mat); 1189 1190 /*E 1191 MatSORType - What type of (S)SOR to perform 1192 1193 Level: beginner 1194 1195 May be bitwise ORd together 1196 1197 Any additions/changes here MUST also be made in include/finclude/petscmat.h 1198 1199 MatSORType may be bitwise ORd together, so do not change the numbers 1200 1201 .seealso: MatSOR() 1202 E*/ 1203 typedef enum {SOR_FORWARD_SWEEP=1,SOR_BACKWARD_SWEEP=2,SOR_SYMMETRIC_SWEEP=3, 1204 SOR_LOCAL_FORWARD_SWEEP=4,SOR_LOCAL_BACKWARD_SWEEP=8, 1205 SOR_LOCAL_SYMMETRIC_SWEEP=12,SOR_ZERO_INITIAL_GUESS=16, 1206 SOR_EISENSTAT=32,SOR_APPLY_UPPER=64,SOR_APPLY_LOWER=128} MatSORType; 1207 extern PetscErrorCode MatSOR(Mat,Vec,PetscReal,MatSORType,PetscReal,PetscInt,PetscInt,Vec); 1208 1209 /* 1210 These routines are for efficiently computing Jacobians via finite differences. 1211 */ 1212 1213 /*E 1214 MatColoringType - String with the name of a PETSc matrix coloring or the creation function 1215 with an optional dynamic library name, for example 1216 http://www.mcs.anl.gov/petsc/lib.a:coloringcreate() 1217 1218 Level: beginner 1219 1220 .seealso: MatGetColoring() 1221 E*/ 1222 #define MatColoringType char* 1223 #define MATCOLORINGNATURAL "natural" 1224 #define MATCOLORINGSL "sl" 1225 #define MATCOLORINGLF "lf" 1226 #define MATCOLORINGID "id" 1227 1228 extern PetscErrorCode MatGetColoring(Mat,const MatColoringType,ISColoring*); 1229 extern PetscErrorCode MatColoringRegister(const char[],const char[],const char[],PetscErrorCode(*)(Mat,MatColoringType,ISColoring *)); 1230 1231 /*MC 1232 MatColoringRegisterDynamic - Adds a new sparse matrix coloring to the 1233 matrix package. 1234 1235 Synopsis: 1236 PetscErrorCode MatColoringRegisterDynamic(const char *name_coloring,const char *path,const char *name_create,PetscErrorCode (*routine_create)(MatColoring)) 1237 1238 Not Collective 1239 1240 Input Parameters: 1241 + sname - name of Coloring (for example MATCOLORINGSL) 1242 . path - location of library where creation routine is 1243 . name - name of function that creates the Coloring type, a string 1244 - function - function pointer that creates the coloring 1245 1246 Level: developer 1247 1248 If dynamic libraries are used, then the fourth input argument (function) 1249 is ignored. 1250 1251 Sample usage: 1252 .vb 1253 MatColoringRegisterDynamic("my_color",/home/username/my_lib/lib/libO/solaris/mylib.a, 1254 "MyColor",MyColor); 1255 .ve 1256 1257 Then, your partitioner can be chosen with the procedural interface via 1258 $ MatColoringSetType(part,"my_color") 1259 or at runtime via the option 1260 $ -mat_coloring_type my_color 1261 1262 $PETSC_ARCH occuring in pathname will be replaced with appropriate values. 1263 1264 .keywords: matrix, Coloring, register 1265 1266 .seealso: MatColoringRegisterDestroy(), MatColoringRegisterAll() 1267 M*/ 1268 #if defined(PETSC_USE_DYNAMIC_LIBRARIES) 1269 #define MatColoringRegisterDynamic(a,b,c,d) MatColoringRegister(a,b,c,0) 1270 #else 1271 #define MatColoringRegisterDynamic(a,b,c,d) MatColoringRegister(a,b,c,d) 1272 #endif 1273 1274 extern PetscBool MatColoringRegisterAllCalled; 1275 1276 extern PetscErrorCode MatColoringRegisterAll(const char[]); 1277 extern PetscErrorCode MatColoringRegisterDestroy(void); 1278 extern PetscErrorCode MatColoringPatch(Mat,PetscInt,PetscInt,ISColoringValue[],ISColoring*); 1279 1280 /*S 1281 MatFDColoring - Object for computing a sparse Jacobian via finite differences 1282 and coloring 1283 1284 Level: beginner 1285 1286 Concepts: coloring, sparse Jacobian, finite differences 1287 1288 .seealso: MatFDColoringCreate() 1289 S*/ 1290 typedef struct _p_MatFDColoring* MatFDColoring; 1291 1292 extern PetscErrorCode MatFDColoringCreate(Mat,ISColoring,MatFDColoring *); 1293 extern PetscErrorCode MatFDColoringDestroy(MatFDColoring*); 1294 extern PetscErrorCode MatFDColoringView(MatFDColoring,PetscViewer); 1295 extern PetscErrorCode MatFDColoringSetFunction(MatFDColoring,PetscErrorCode (*)(void),void*); 1296 extern PetscErrorCode MatFDColoringGetFunction(MatFDColoring,PetscErrorCode (**)(void),void**); 1297 extern PetscErrorCode MatFDColoringSetParameters(MatFDColoring,PetscReal,PetscReal); 1298 extern PetscErrorCode MatFDColoringSetFromOptions(MatFDColoring); 1299 extern PetscErrorCode MatFDColoringApply(Mat,MatFDColoring,Vec,MatStructure*,void *); 1300 extern PetscErrorCode MatFDColoringApplyTS(Mat,MatFDColoring,PetscReal,Vec,MatStructure*,void *); 1301 extern PetscErrorCode MatFDColoringSetF(MatFDColoring,Vec); 1302 extern PetscErrorCode MatFDColoringGetPerturbedColumns(MatFDColoring,PetscInt*,PetscInt*[]); 1303 /* 1304 These routines are for partitioning matrices: currently used only 1305 for adjacency matrix, MatCreateMPIAdj(). 1306 */ 1307 1308 /*S 1309 MatPartitioning - Object for managing the partitioning of a matrix or graph 1310 1311 Level: beginner 1312 1313 Concepts: partitioning 1314 1315 .seealso: MatPartitioningCreate(), MatPartitioningType 1316 S*/ 1317 typedef struct _p_MatPartitioning* MatPartitioning; 1318 1319 /*E 1320 MatPartitioningType - String with the name of a PETSc matrix partitioning or the creation function 1321 with an optional dynamic library name, for example 1322 http://www.mcs.anl.gov/petsc/lib.a:partitioningcreate() 1323 1324 Level: beginner 1325 1326 .seealso: MatPartitioningCreate(), MatPartitioning 1327 E*/ 1328 #define MatPartitioningType char* 1329 #define MATPARTITIONINGCURRENT "current" 1330 #define MATPARTITIONINGSQUARE "square" 1331 #define MATPARTITIONINGPARMETIS "parmetis" 1332 #define MATPARTITIONINGCHACO "chaco" 1333 #define MATPARTITIONINGJOSTLE "jostle" 1334 #define MATPARTITIONINGPARTY "party" 1335 #define MATPARTITIONINGSCOTCH "scotch" 1336 1337 1338 extern PetscErrorCode MatPartitioningCreate(MPI_Comm,MatPartitioning*); 1339 extern PetscErrorCode MatPartitioningSetType(MatPartitioning,const MatPartitioningType); 1340 extern PetscErrorCode MatPartitioningSetNParts(MatPartitioning,PetscInt); 1341 extern PetscErrorCode MatPartitioningSetAdjacency(MatPartitioning,Mat); 1342 extern PetscErrorCode MatPartitioningSetVertexWeights(MatPartitioning,const PetscInt[]); 1343 extern PetscErrorCode MatPartitioningSetPartitionWeights(MatPartitioning,const PetscReal []); 1344 extern PetscErrorCode MatPartitioningApply(MatPartitioning,IS*); 1345 extern PetscErrorCode MatPartitioningDestroy(MatPartitioning*); 1346 1347 extern PetscErrorCode MatPartitioningRegister(const char[],const char[],const char[],PetscErrorCode (*)(MatPartitioning)); 1348 1349 /*MC 1350 MatPartitioningRegisterDynamic - Adds a new sparse matrix partitioning to the 1351 matrix package. 1352 1353 Synopsis: 1354 PetscErrorCode MatPartitioningRegisterDynamic(const char *name_partitioning,const char *path,const char *name_create,PetscErrorCode (*routine_create)(MatPartitioning)) 1355 1356 Not Collective 1357 1358 Input Parameters: 1359 + sname - name of partitioning (for example MATPARTITIONINGCURRENT) or parmetis 1360 . path - location of library where creation routine is 1361 . name - name of function that creates the partitioning type, a string 1362 - function - function pointer that creates the partitioning type 1363 1364 Level: developer 1365 1366 If dynamic libraries are used, then the fourth input argument (function) 1367 is ignored. 1368 1369 Sample usage: 1370 .vb 1371 MatPartitioningRegisterDynamic("my_part",/home/username/my_lib/lib/libO/solaris/mylib.a, 1372 "MyPartCreate",MyPartCreate); 1373 .ve 1374 1375 Then, your partitioner can be chosen with the procedural interface via 1376 $ MatPartitioningSetType(part,"my_part") 1377 or at runtime via the option 1378 $ -mat_partitioning_type my_part 1379 1380 $PETSC_ARCH occuring in pathname will be replaced with appropriate values. 1381 1382 .keywords: matrix, partitioning, register 1383 1384 .seealso: MatPartitioningRegisterDestroy(), MatPartitioningRegisterAll() 1385 M*/ 1386 #if defined(PETSC_USE_DYNAMIC_LIBRARIES) 1387 #define MatPartitioningRegisterDynamic(a,b,c,d) MatPartitioningRegister(a,b,c,0) 1388 #else 1389 #define MatPartitioningRegisterDynamic(a,b,c,d) MatPartitioningRegister(a,b,c,d) 1390 #endif 1391 1392 extern PetscBool MatPartitioningRegisterAllCalled; 1393 1394 extern PetscErrorCode MatPartitioningRegisterAll(const char[]); 1395 extern PetscErrorCode MatPartitioningRegisterDestroy(void); 1396 1397 extern PetscErrorCode MatPartitioningView(MatPartitioning,PetscViewer); 1398 extern PetscErrorCode MatPartitioningSetFromOptions(MatPartitioning); 1399 extern PetscErrorCode MatPartitioningGetType(MatPartitioning,const MatPartitioningType*); 1400 1401 extern PetscErrorCode MatPartitioningParmetisSetCoarseSequential(MatPartitioning); 1402 extern PetscErrorCode MatPartitioningParmetisGetEdgeCut(MatPartitioning, PetscInt *); 1403 1404 extern PetscErrorCode MatPartitioningJostleSetCoarseLevel(MatPartitioning,PetscReal); 1405 extern PetscErrorCode MatPartitioningJostleSetCoarseSequential(MatPartitioning); 1406 1407 typedef enum {MP_CHACO_MULTILEVEL_KL,MP_CHACO_SPECTRAL,MP_CHACO_LINEAR,MP_CHACO_RANDOM, MP_CHACO_SCATTERED} MPChacoGlobalType; 1408 extern PetscErrorCode MatPartitioningChacoSetGlobal(MatPartitioning, MPChacoGlobalType); 1409 typedef enum { MP_CHACO_KERNIGHAN_LIN, MP_CHACO_NONE } MPChacoLocalType; 1410 extern PetscErrorCode MatPartitioningChacoSetLocal(MatPartitioning, MPChacoLocalType); 1411 extern PetscErrorCode MatPartitioningChacoSetCoarseLevel(MatPartitioning,PetscReal); 1412 typedef enum { MP_CHACO_LANCZOS, MP_CHACO_RQI_SYMMLQ } MPChacoEigenType; 1413 extern PetscErrorCode MatPartitioningChacoSetEigenSolver(MatPartitioning,MPChacoEigenType); 1414 extern PetscErrorCode MatPartitioningChacoSetEigenTol(MatPartitioning, PetscReal); 1415 extern PetscErrorCode MatPartitioningChacoSetEigenNumber(MatPartitioning, PetscInt); 1416 1417 #define MP_PARTY_OPT "opt" 1418 #define MP_PARTY_LIN "lin" 1419 #define MP_PARTY_SCA "sca" 1420 #define MP_PARTY_RAN "ran" 1421 #define MP_PARTY_GBF "gbf" 1422 #define MP_PARTY_GCF "gcf" 1423 #define MP_PARTY_BUB "bub" 1424 #define MP_PARTY_DEF "def" 1425 extern PetscErrorCode MatPartitioningPartySetGlobal(MatPartitioning, const char*); 1426 #define MP_PARTY_HELPFUL_SETS "hs" 1427 #define MP_PARTY_KERNIGHAN_LIN "kl" 1428 #define MP_PARTY_NONE "no" 1429 extern PetscErrorCode MatPartitioningPartySetLocal(MatPartitioning, const char*); 1430 extern PetscErrorCode MatPartitioningPartySetCoarseLevel(MatPartitioning,PetscReal); 1431 extern PetscErrorCode MatPartitioningPartySetBipart(MatPartitioning,PetscBool ); 1432 extern PetscErrorCode MatPartitioningPartySetMatchOptimization(MatPartitioning,PetscBool ); 1433 1434 typedef enum { MP_SCOTCH_GREEDY, MP_SCOTCH_GPS, MP_SCOTCH_GR_GPS } MPScotchGlobalType; 1435 extern PetscErrorCode MatPartitioningScotchSetArch(MatPartitioning,const char*); 1436 extern PetscErrorCode MatPartitioningScotchSetMultilevel(MatPartitioning); 1437 extern PetscErrorCode MatPartitioningScotchSetGlobal(MatPartitioning,MPScotchGlobalType); 1438 extern PetscErrorCode MatPartitioningScotchSetCoarseLevel(MatPartitioning,PetscReal); 1439 extern PetscErrorCode MatPartitioningScotchSetHostList(MatPartitioning,const char*); 1440 typedef enum { MP_SCOTCH_KERNIGHAN_LIN, MP_SCOTCH_NONE } MPScotchLocalType; 1441 extern PetscErrorCode MatPartitioningScotchSetLocal(MatPartitioning,MPScotchLocalType); 1442 extern PetscErrorCode MatPartitioningScotchSetMapping(MatPartitioning); 1443 extern PetscErrorCode MatPartitioningScotchSetStrategy(MatPartitioning,char*); 1444 1445 extern PetscErrorCode MatMeshToVertexGraph(Mat,PetscInt,Mat*); 1446 extern PetscErrorCode MatMeshToCellGraph(Mat,PetscInt,Mat*); 1447 1448 /* 1449 If you add entries here you must also add them to finclude/petscmat.h 1450 */ 1451 typedef enum { MATOP_SET_VALUES=0, 1452 MATOP_GET_ROW=1, 1453 MATOP_RESTORE_ROW=2, 1454 MATOP_MULT=3, 1455 MATOP_MULT_ADD=4, 1456 MATOP_MULT_TRANSPOSE=5, 1457 MATOP_MULT_TRANSPOSE_ADD=6, 1458 MATOP_SOLVE=7, 1459 MATOP_SOLVE_ADD=8, 1460 MATOP_SOLVE_TRANSPOSE=9, 1461 MATOP_SOLVE_TRANSPOSE_ADD=10, 1462 MATOP_LUFACTOR=11, 1463 MATOP_CHOLESKYFACTOR=12, 1464 MATOP_SOR=13, 1465 MATOP_TRANSPOSE=14, 1466 MATOP_GETINFO=15, 1467 MATOP_EQUAL=16, 1468 MATOP_GET_DIAGONAL=17, 1469 MATOP_DIAGONAL_SCALE=18, 1470 MATOP_NORM=19, 1471 MATOP_ASSEMBLY_BEGIN=20, 1472 MATOP_ASSEMBLY_END=21, 1473 MATOP_SET_OPTION=22, 1474 MATOP_ZERO_ENTRIES=23, 1475 MATOP_ZERO_ROWS=24, 1476 MATOP_LUFACTOR_SYMBOLIC=25, 1477 MATOP_LUFACTOR_NUMERIC=26, 1478 MATOP_CHOLESKY_FACTOR_SYMBOLIC=27, 1479 MATOP_CHOLESKY_FACTOR_NUMERIC=28, 1480 MATOP_SETUP_PREALLOCATION=29, 1481 MATOP_ILUFACTOR_SYMBOLIC=30, 1482 MATOP_ICCFACTOR_SYMBOLIC=31, 1483 MATOP_GET_ARRAY=32, 1484 MATOP_RESTORE_ARRAY=33, 1485 MATOP_DUPLICATE=34, 1486 MATOP_FORWARD_SOLVE=35, 1487 MATOP_BACKWARD_SOLVE=36, 1488 MATOP_ILUFACTOR=37, 1489 MATOP_ICCFACTOR=38, 1490 MATOP_AXPY=39, 1491 MATOP_GET_SUBMATRICES=40, 1492 MATOP_INCREASE_OVERLAP=41, 1493 MATOP_GET_VALUES=42, 1494 MATOP_COPY=43, 1495 MATOP_GET_ROW_MAX=44, 1496 MATOP_SCALE=45, 1497 MATOP_SHIFT=46, 1498 MATOP_DIAGONAL_SET=47, 1499 MATOP_ILUDT_FACTOR=48, 1500 MATOP_SET_BLOCK_SIZE=49, 1501 MATOP_GET_ROW_IJ=50, 1502 MATOP_RESTORE_ROW_IJ=51, 1503 MATOP_GET_COLUMN_IJ=52, 1504 MATOP_RESTORE_COLUMN_IJ=53, 1505 MATOP_FDCOLORING_CREATE=54, 1506 MATOP_COLORING_PATCH=55, 1507 MATOP_SET_UNFACTORED=56, 1508 MATOP_PERMUTE=57, 1509 MATOP_SET_VALUES_BLOCKED=58, 1510 MATOP_GET_SUBMATRIX=59, 1511 MATOP_DESTROY=60, 1512 MATOP_VIEW=61, 1513 MATOP_CONVERT_FROM=62, 1514 MATOP_USE_SCALED_FORM=63, 1515 MATOP_SCALE_SYSTEM=64, 1516 MATOP_UNSCALE_SYSTEM=65, 1517 MATOP_SET_LOCAL_TO_GLOBAL_MAP=66, 1518 MATOP_SET_VALUES_LOCAL=67, 1519 MATOP_ZERO_ROWS_LOCAL=68, 1520 MATOP_GET_ROW_MAX_ABS=69, 1521 MATOP_GET_ROW_MIN_ABS=70, 1522 MATOP_CONVERT=71, 1523 MATOP_SET_COLORING=72, 1524 MATOP_SET_VALUES_ADIC=73, 1525 MATOP_SET_VALUES_ADIFOR=74, 1526 MATOP_FD_COLORING_APPLY=75, 1527 MATOP_SET_FROM_OPTIONS=76, 1528 MATOP_MULT_CON=77, 1529 MATOP_MULT_TRANSPOSE_CON=78, 1530 MATOP_PERMUTE_SPARSIFY=79, 1531 MATOP_MULT_MULTIPLE=80, 1532 MATOP_SOLVE_MULTIPLE=81, 1533 MATOP_GET_INERTIA=82, 1534 MATOP_LOAD=83, 1535 MATOP_IS_SYMMETRIC=84, 1536 MATOP_IS_HERMITIAN=85, 1537 MATOP_IS_STRUCTURALLY_SYMMETRIC=86, 1538 MATOP_DUMMY=87, 1539 MATOP_GET_VECS=88, 1540 MATOP_MAT_MULT=89, 1541 MATOP_MAT_MULT_SYMBOLIC=90, 1542 MATOP_MAT_MULT_NUMERIC=91, 1543 MATOP_PTAP=92, 1544 MATOP_PTAP_SYMBOLIC=93, 1545 MATOP_PTAP_NUMERIC=94, 1546 MATOP_MAT_MULTTRANSPOSE=95, 1547 MATOP_MAT_MULTTRANSPOSE_SYM=96, 1548 MATOP_MAT_MULTTRANSPOSE_NUM=97, 1549 MATOP_PTAP_SYMBOLIC_SEQAIJ=98, 1550 MATOP_PTAP_NUMERIC_SEQAIJ=99, 1551 MATOP_PTAP_SYMBOLIC_MPIAIJ=100, 1552 MATOP_PTAP_NUMERIC_MPIAIJ=101, 1553 MATOP_CONJUGATE=102, 1554 MATOP_SET_SIZES=103, 1555 MATOP_SET_VALUES_ROW=104, 1556 MATOP_REAL_PART=105, 1557 MATOP_IMAG_PART=106, 1558 MATOP_GET_ROW_UTRIANGULAR=107, 1559 MATOP_RESTORE_ROW_UTRIANGULAR=108, 1560 MATOP_MATSOLVE=109, 1561 MATOP_GET_REDUNDANTMATRIX=110, 1562 MATOP_GET_ROW_MIN=111, 1563 MATOP_GET_COLUMN_VEC=112, 1564 MATOP_MISSING_DIAGONAL=113, 1565 MATOP_MATGETSEQNONZEROSTRUCTURE=114, 1566 MATOP_CREATE=115, 1567 MATOP_GET_GHOSTS=116, 1568 MATOP_GET_LOCALSUBMATRIX=117, 1569 MATOP_RESTORE_LOCALSUBMATRIX=118, 1570 MATOP_MULT_DIAGONAL_BLOCK=119, 1571 MATOP_HERMITIANTRANSPOSE=120, 1572 MATOP_MULTHERMITIANTRANSPOSE=121, 1573 MATOP_MULTHERMITIANTRANSPOSEADD=122, 1574 MATOP_GETMULTIPROCBLOCK=123, 1575 MATOP_GETCOLUMNNORMS=125, 1576 MATOP_GET_SUBMATRICES_PARALLEL=128 1577 } MatOperation; 1578 extern PetscErrorCode MatHasOperation(Mat,MatOperation,PetscBool *); 1579 extern PetscErrorCode MatShellSetOperation(Mat,MatOperation,void(*)(void)); 1580 extern PetscErrorCode MatShellGetOperation(Mat,MatOperation,void(**)(void)); 1581 extern PetscErrorCode MatShellSetContext(Mat,void*); 1582 1583 /* 1584 Codes for matrices stored on disk. By default they are 1585 stored in a universal format. By changing the format with 1586 PetscViewerSetFormat(viewer,PETSC_VIEWER_NATIVE); the matrices will 1587 be stored in a way natural for the matrix, for example dense matrices 1588 would be stored as dense. Matrices stored this way may only be 1589 read into matrices of the same type. 1590 */ 1591 #define MATRIX_BINARY_FORMAT_DENSE -1 1592 1593 extern PetscErrorCode MatMPIBAIJSetHashTableFactor(Mat,PetscReal); 1594 extern PetscErrorCode MatISGetLocalMat(Mat,Mat*); 1595 1596 /*S 1597 MatNullSpace - Object that removes a null space from a vector, i.e. 1598 orthogonalizes the vector to a subsapce 1599 1600 Level: advanced 1601 1602 Concepts: matrix; linear operator, null space 1603 1604 Users manual sections: 1605 . sec_singular 1606 1607 .seealso: MatNullSpaceCreate() 1608 S*/ 1609 typedef struct _p_MatNullSpace* MatNullSpace; 1610 1611 extern PetscErrorCode MatNullSpaceCreate(MPI_Comm,PetscBool ,PetscInt,const Vec[],MatNullSpace*); 1612 extern PetscErrorCode MatNullSpaceSetFunction(MatNullSpace,PetscErrorCode (*)(MatNullSpace,Vec,void*),void*); 1613 extern PetscErrorCode MatNullSpaceDestroy(MatNullSpace*); 1614 extern PetscErrorCode MatNullSpaceRemove(MatNullSpace,Vec,Vec*); 1615 extern PetscErrorCode MatNullSpaceAttach(Mat,MatNullSpace); 1616 extern PetscErrorCode MatNullSpaceTest(MatNullSpace,Mat,PetscBool *); 1617 extern PetscErrorCode MatNullSpaceView(MatNullSpace,PetscViewer); 1618 1619 extern PetscErrorCode MatReorderingSeqSBAIJ(Mat,IS); 1620 extern PetscErrorCode MatMPISBAIJSetHashTableFactor(Mat,PetscReal); 1621 extern PetscErrorCode MatSeqSBAIJSetColumnIndices(Mat,PetscInt *); 1622 extern PetscErrorCode MatSeqBAIJInvertBlockDiagonal(Mat); 1623 1624 extern PetscErrorCode MatCreateMAIJ(Mat,PetscInt,Mat*); 1625 extern PetscErrorCode MatMAIJRedimension(Mat,PetscInt,Mat*); 1626 extern PetscErrorCode MatMAIJGetAIJ(Mat,Mat*); 1627 1628 extern PetscErrorCode MatComputeExplicitOperator(Mat,Mat*); 1629 1630 extern PetscErrorCode MatDiagonalScaleLocal(Mat,Vec); 1631 1632 extern PetscErrorCode MatCreateMFFD(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,Mat*); 1633 extern PetscErrorCode MatMFFDSetBase(Mat,Vec,Vec); 1634 extern PetscErrorCode MatMFFDSetFunction(Mat,PetscErrorCode(*)(void*,Vec,Vec),void*); 1635 extern PetscErrorCode MatMFFDSetFunctioni(Mat,PetscErrorCode (*)(void*,PetscInt,Vec,PetscScalar*)); 1636 extern PetscErrorCode MatMFFDSetFunctioniBase(Mat,PetscErrorCode (*)(void*,Vec)); 1637 extern PetscErrorCode MatMFFDAddNullSpace(Mat,MatNullSpace); 1638 extern PetscErrorCode MatMFFDSetHHistory(Mat,PetscScalar[],PetscInt); 1639 extern PetscErrorCode MatMFFDResetHHistory(Mat); 1640 extern PetscErrorCode MatMFFDSetFunctionError(Mat,PetscReal); 1641 extern PetscErrorCode MatMFFDSetPeriod(Mat,PetscInt); 1642 extern PetscErrorCode MatMFFDGetH(Mat,PetscScalar *); 1643 extern PetscErrorCode MatMFFDSetOptionsPrefix(Mat,const char[]); 1644 extern PetscErrorCode MatMFFDCheckPositivity(void*,Vec,Vec,PetscScalar*); 1645 extern PetscErrorCode MatMFFDSetCheckh(Mat,PetscErrorCode (*)(void*,Vec,Vec,PetscScalar*),void*); 1646 1647 /*S 1648 MatMFFD - A data structured used to manage the computation of the h differencing parameter for matrix-free 1649 Jacobian vector products 1650 1651 Notes: MATMFFD is a specific MatType which uses the MatMFFD data structure 1652 1653 MatMFFD*() methods actually take the Mat as their first argument. Not a MatMFFD data structure 1654 1655 Level: developer 1656 1657 .seealso: MATMFFD, MatCreateMFFD(), MatMFFDSetFuction(), MatMFFDSetType(), MatMFFDRegister() 1658 S*/ 1659 typedef struct _p_MatMFFD* MatMFFD; 1660 1661 /*E 1662 MatMFFDType - algorithm used to compute the h used in computing matrix-vector products via differencing of the function 1663 1664 Level: beginner 1665 1666 .seealso: MatMFFDSetType(), MatMFFDRegister() 1667 E*/ 1668 #define MatMFFDType char* 1669 #define MATMFFD_DS "ds" 1670 #define MATMFFD_WP "wp" 1671 1672 extern PetscErrorCode MatMFFDSetType(Mat,const MatMFFDType); 1673 extern PetscErrorCode MatMFFDRegister(const char[],const char[],const char[],PetscErrorCode (*)(MatMFFD)); 1674 1675 /*MC 1676 MatMFFDRegisterDynamic - Adds a method to the MatMFFD registry. 1677 1678 Synopsis: 1679 PetscErrorCode MatMFFDRegisterDynamic(const char *name_solver,const char *path,const char *name_create,PetscErrorCode (*routine_create)(MatMFFD)) 1680 1681 Not Collective 1682 1683 Input Parameters: 1684 + name_solver - name of a new user-defined compute-h module 1685 . path - path (either absolute or relative) the library containing this solver 1686 . name_create - name of routine to create method context 1687 - routine_create - routine to create method context 1688 1689 Level: developer 1690 1691 Notes: 1692 MatMFFDRegisterDynamic() may be called multiple times to add several user-defined solvers. 1693 1694 If dynamic libraries are used, then the fourth input argument (routine_create) 1695 is ignored. 1696 1697 Sample usage: 1698 .vb 1699 MatMFFDRegisterDynamic("my_h",/home/username/my_lib/lib/libO/solaris/mylib.a, 1700 "MyHCreate",MyHCreate); 1701 .ve 1702 1703 Then, your solver can be chosen with the procedural interface via 1704 $ MatMFFDSetType(mfctx,"my_h") 1705 or at runtime via the option 1706 $ -snes_mf_type my_h 1707 1708 .keywords: MatMFFD, register 1709 1710 .seealso: MatMFFDRegisterAll(), MatMFFDRegisterDestroy() 1711 M*/ 1712 #if defined(PETSC_USE_DYNAMIC_LIBRARIES) 1713 #define MatMFFDRegisterDynamic(a,b,c,d) MatMFFDRegister(a,b,c,0) 1714 #else 1715 #define MatMFFDRegisterDynamic(a,b,c,d) MatMFFDRegister(a,b,c,d) 1716 #endif 1717 1718 extern PetscErrorCode MatMFFDRegisterAll(const char[]); 1719 extern PetscErrorCode MatMFFDRegisterDestroy(void); 1720 extern PetscErrorCode MatMFFDDSSetUmin(Mat,PetscReal); 1721 extern PetscErrorCode MatMFFDWPSetComputeNormU(Mat,PetscBool ); 1722 1723 1724 extern PetscErrorCode PetscViewerMathematicaPutMatrix(PetscViewer, PetscInt, PetscInt, PetscReal *); 1725 extern PetscErrorCode PetscViewerMathematicaPutCSRMatrix(PetscViewer, PetscInt, PetscInt, PetscInt *, PetscInt *, PetscReal *); 1726 1727 /* 1728 PETSc interface to MUMPS 1729 */ 1730 #ifdef PETSC_HAVE_MUMPS 1731 extern PetscErrorCode MatMumpsSetIcntl(Mat,PetscInt,PetscInt); 1732 #endif 1733 1734 /* 1735 PETSc interface to SUPERLU 1736 */ 1737 #ifdef PETSC_HAVE_SUPERLU 1738 extern PetscErrorCode MatSuperluSetILUDropTol(Mat,PetscReal); 1739 #endif 1740 1741 #if defined(PETSC_HAVE_CUSP) 1742 extern PetscErrorCode MatCreateSeqAIJCUSP(MPI_Comm,PetscInt,PetscInt,PetscInt,const PetscInt[],Mat*); 1743 extern PetscErrorCode MatCreateMPIAIJCUSP(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt,const PetscInt[],PetscInt,const PetscInt[],Mat*); 1744 #endif 1745 1746 extern PetscErrorCode MatCreateNest(MPI_Comm,PetscInt,const IS[],PetscInt,const IS[],const Mat[],Mat*); 1747 extern PetscErrorCode MatNestGetSize(Mat,PetscInt*,PetscInt*); 1748 extern PetscErrorCode MatNestGetSubMats(Mat,PetscInt*,PetscInt*,Mat***); 1749 extern PetscErrorCode MatNestGetSubMat(Mat,PetscInt,PetscInt,Mat*); 1750 extern PetscErrorCode MatNestSetVecType(Mat,const VecType); 1751 extern PetscErrorCode MatNestSetSubMats(Mat,PetscInt,const IS[],PetscInt,const IS[],const Mat[]); 1752 extern PetscErrorCode MatNestSetSubMat(Mat,PetscInt,PetscInt,Mat); 1753 1754 PETSC_EXTERN_CXX_END 1755 #endif 1756