1 /* 2 Include file for the matrix component of PETSc 3 */ 4 #ifndef __PETSCMAT_H 5 #define __PETSCMAT_H 6 #include "petscvec.h" 7 PETSC_EXTERN_CXX_BEGIN 8 9 /*S 10 Mat - Abstract PETSc matrix object 11 12 Level: beginner 13 14 Concepts: matrix; linear operator 15 16 .seealso: MatCreate(), MatType, MatSetType() 17 S*/ 18 typedef struct _p_Mat* Mat; 19 20 /*J 21 MatType - String with the name of a PETSc matrix or the creation function 22 with an optional dynamic library name, for example 23 http://www.mcs.anl.gov/petsc/lib.a:mymatcreate() 24 25 Level: beginner 26 27 .seealso: MatSetType(), Mat, MatSolverPackage 28 J*/ 29 #define MatType char* 30 #define MATSAME "same" 31 #define MATMAIJ "maij" 32 #define MATSEQMAIJ "seqmaij" 33 #define MATMPIMAIJ "mpimaij" 34 #define MATIS "is" 35 #define MATAIJ "aij" 36 #define MATSEQAIJ "seqaij" 37 #define MATSEQAIJPTHREAD "seqaijpthread" 38 #define MATAIJPTHREAD "aijpthread" 39 #define MATMPIAIJ "mpiaij" 40 #define MATAIJCRL "aijcrl" 41 #define MATSEQAIJCRL "seqaijcrl" 42 #define MATMPIAIJCRL "mpiaijcrl" 43 #define MATAIJCUSP "aijcusp" 44 #define MATSEQAIJCUSP "seqaijcusp" 45 #define MATMPIAIJCUSP "mpiaijcusp" 46 #define MATAIJPERM "aijperm" 47 #define MATSEQAIJPERM "seqaijperm" 48 #define MATMPIAIJPERM "mpiaijperm" 49 #define MATSHELL "shell" 50 #define MATDENSE "dense" 51 #define MATSEQDENSE "seqdense" 52 #define MATMPIDENSE "mpidense" 53 #define MATBAIJ "baij" 54 #define MATSEQBAIJ "seqbaij" 55 #define MATMPIBAIJ "mpibaij" 56 #define MATMPIADJ "mpiadj" 57 #define MATSBAIJ "sbaij" 58 #define MATSEQSBAIJ "seqsbaij" 59 #define MATMPISBAIJ "mpisbaij" 60 #define MATSEQBSTRM "seqbstrm" 61 #define MATMPIBSTRM "mpibstrm" 62 #define MATBSTRM "bstrm" 63 #define MATSEQSBSTRM "seqsbstrm" 64 #define MATMPISBSTRM "mpisbstrm" 65 #define MATSBSTRM "sbstrm" 66 #define MATDAAD "daad" 67 #define MATMFFD "mffd" 68 #define MATNORMAL "normal" 69 #define MATLRC "lrc" 70 #define MATSCATTER "scatter" 71 #define MATBLOCKMAT "blockmat" 72 #define MATCOMPOSITE "composite" 73 #define MATFFT "fft" 74 #define MATFFTW "fftw" 75 #define MATSEQCUFFT "seqcufft" 76 #define MATTRANSPOSEMAT "transpose" 77 #define MATSCHURCOMPLEMENT "schurcomplement" 78 #define MATPYTHON "python" 79 #define MATHYPRESTRUCT "hyprestruct" 80 #define MATHYPRESSTRUCT "hypresstruct" 81 #define MATSUBMATRIX "submatrix" 82 #define MATLOCALREF "localref" 83 #define MATNEST "nest" 84 #define MATIJ "ij" 85 86 /*J 87 MatSolverPackage - String with the name of a PETSc matrix solver type. 88 89 For example: "petsc" indicates what PETSc provides, "superlu" indicates either 90 SuperLU or SuperLU_Dist etc. 91 92 93 Level: beginner 94 95 .seealso: MatGetFactor(), Mat, MatSetType(), MatType 96 J*/ 97 #define MatSolverPackage char* 98 #define MATSOLVERSPOOLES "spooles" 99 #define MATSOLVERSUPERLU "superlu" 100 #define MATSOLVERSUPERLU_DIST "superlu_dist" 101 #define MATSOLVERUMFPACK "umfpack" 102 #define MATSOLVERCHOLMOD "cholmod" 103 #define MATSOLVERESSL "essl" 104 #define MATSOLVERLUSOL "lusol" 105 #define MATSOLVERMUMPS "mumps" 106 #define MATSOLVERPASTIX "pastix" 107 #define MATSOLVERMATLAB "matlab" 108 #define MATSOLVERPETSC "petsc" 109 #define MATSOLVERPLAPACK "plapack" 110 #define MATSOLVERBAS "bas" 111 112 #define MATSOLVERBSTRM "bstrm" 113 #define MATSOLVERSBSTRM "sbstrm" 114 115 /*E 116 MatFactorType - indicates what type of factorization is requested 117 118 Level: beginner 119 120 Any additions/changes here MUST also be made in include/finclude/petscmat.h 121 122 .seealso: MatSolverPackage, MatGetFactor() 123 E*/ 124 typedef enum {MAT_FACTOR_NONE, MAT_FACTOR_LU, MAT_FACTOR_CHOLESKY, MAT_FACTOR_ILU, MAT_FACTOR_ICC,MAT_FACTOR_ILUDT} MatFactorType; 125 extern const char *const MatFactorTypes[]; 126 127 extern PetscErrorCode MatGetFactor(Mat,const MatSolverPackage,MatFactorType,Mat*); 128 extern PetscErrorCode MatGetFactorAvailable(Mat,const MatSolverPackage,MatFactorType,PetscBool *); 129 extern PetscErrorCode MatFactorGetSolverPackage(Mat,const MatSolverPackage*); 130 extern PetscErrorCode MatGetFactorType(Mat,MatFactorType*); 131 132 /* Logging support */ 133 #define MAT_FILE_CLASSID 1211216 /* used to indicate matrices in binary files */ 134 extern PetscClassId MAT_CLASSID; 135 extern PetscClassId MAT_FDCOLORING_CLASSID; 136 extern PetscClassId MAT_PARTITIONING_CLASSID; 137 extern PetscClassId MAT_NULLSPACE_CLASSID; 138 extern PetscClassId MATMFFD_CLASSID; 139 140 /*E 141 MatReuse - Indicates if matrices obtained from a previous call to MatGetSubMatrices() 142 or MatGetSubMatrix() are to be reused to store the new matrix values. For MatConvert() is used to indicate 143 that the input matrix is to be replaced with the converted matrix. 144 145 Level: beginner 146 147 Any additions/changes here MUST also be made in include/finclude/petscmat.h 148 149 .seealso: MatGetSubMatrices(), MatGetSubMatrix(), MatDestroyMatrices(), MatConvert() 150 E*/ 151 typedef enum {MAT_INITIAL_MATRIX,MAT_REUSE_MATRIX,MAT_IGNORE_MATRIX} MatReuse; 152 153 /*E 154 MatGetSubMatrixOption - Indicates if matrices obtained from a call to MatGetSubMatrices() 155 include the matrix values. Currently it is only used by MatGetSeqNonzerostructure(). 156 157 Level: beginner 158 159 .seealso: MatGetSeqNonzerostructure() 160 E*/ 161 typedef enum {MAT_DO_NOT_GET_VALUES,MAT_GET_VALUES} MatGetSubMatrixOption; 162 163 extern PetscErrorCode MatInitializePackage(const char[]); 164 165 extern PetscErrorCode MatCreate(MPI_Comm,Mat*); 166 PetscPolymorphicFunction(MatCreate,(MPI_Comm comm),(comm,&A),Mat,A) 167 PetscPolymorphicFunction(MatCreate,(),(PETSC_COMM_WORLD,&A),Mat,A) 168 extern PetscErrorCode MatSetSizes(Mat,PetscInt,PetscInt,PetscInt,PetscInt); 169 extern PetscErrorCode MatSetType(Mat,const MatType); 170 extern PetscErrorCode MatSetFromOptions(Mat); 171 extern PetscErrorCode MatSetUpPreallocation(Mat); 172 extern PetscErrorCode MatRegisterAll(const char[]); 173 extern PetscErrorCode MatRegister(const char[],const char[],const char[],PetscErrorCode(*)(Mat)); 174 extern PetscErrorCode MatRegisterBaseName(const char[],const char[],const char[]); 175 extern PetscErrorCode MatSetOptionsPrefix(Mat,const char[]); 176 extern PetscErrorCode MatAppendOptionsPrefix(Mat,const char[]); 177 extern PetscErrorCode MatGetOptionsPrefix(Mat,const char*[]); 178 179 /*MC 180 MatRegisterDynamic - Adds a new matrix type 181 182 Synopsis: 183 PetscErrorCode MatRegisterDynamic(const char *name,const char *path,const char *name_create,PetscErrorCode (*routine_create)(Mat)) 184 185 Not Collective 186 187 Input Parameters: 188 + name - name of a new user-defined matrix type 189 . path - path (either absolute or relative) the library containing this solver 190 . name_create - name of routine to create method context 191 - routine_create - routine to create method context 192 193 Notes: 194 MatRegisterDynamic() may be called multiple times to add several user-defined solvers. 195 196 If dynamic libraries are used, then the fourth input argument (routine_create) 197 is ignored. 198 199 Sample usage: 200 .vb 201 MatRegisterDynamic("my_mat",/home/username/my_lib/lib/libO/solaris/mylib.a, 202 "MyMatCreate",MyMatCreate); 203 .ve 204 205 Then, your solver can be chosen with the procedural interface via 206 $ MatSetType(Mat,"my_mat") 207 or at runtime via the option 208 $ -mat_type my_mat 209 210 Level: advanced 211 212 Notes: ${PETSC_ARCH} occuring in pathname will be replaced with appropriate values. 213 If your function is not being put into a shared library then use VecRegister() instead 214 215 .keywords: Mat, register 216 217 .seealso: MatRegisterAll(), MatRegisterDestroy() 218 219 M*/ 220 #if defined(PETSC_USE_DYNAMIC_LIBRARIES) 221 #define MatRegisterDynamic(a,b,c,d) MatRegister(a,b,c,0) 222 #else 223 #define MatRegisterDynamic(a,b,c,d) MatRegister(a,b,c,d) 224 #endif 225 226 extern PetscBool MatRegisterAllCalled; 227 extern PetscFList MatList; 228 extern PetscFList MatColoringList; 229 extern PetscFList MatPartitioningList; 230 231 /*E 232 MatStructure - Indicates if the matrix has the same nonzero structure 233 234 Level: beginner 235 236 Any additions/changes here MUST also be made in include/finclude/petscmat.h 237 238 .seealso: MatCopy(), KSPSetOperators(), PCSetOperators() 239 E*/ 240 typedef enum {DIFFERENT_NONZERO_PATTERN,SUBSET_NONZERO_PATTERN,SAME_NONZERO_PATTERN,SAME_PRECONDITIONER} MatStructure; 241 242 extern PetscErrorCode MatCreateSeqDense(MPI_Comm,PetscInt,PetscInt,PetscScalar[],Mat*); 243 extern PetscErrorCode MatCreateMPIDense(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,PetscScalar[],Mat*); 244 extern PetscErrorCode MatCreateSeqAIJ(MPI_Comm,PetscInt,PetscInt,PetscInt,const PetscInt[],Mat*); 245 PetscPolymorphicFunction(MatCreateSeqAIJ,(PetscInt m,PetscInt n,PetscInt nz,const PetscInt nnz[]),(PETSC_COMM_SELF,m,n,nz,nnz,&A),Mat,A) 246 PetscPolymorphicFunction(MatCreateSeqAIJ,(PetscInt m,PetscInt n,PetscInt nz),(PETSC_COMM_SELF,m,n,nz,PETSC_NULL,&A),Mat,A) 247 PetscPolymorphicFunction(MatCreateSeqAIJ,(PetscInt m,PetscInt n,const PetscInt nnz[]),(PETSC_COMM_SELF,m,n,0,nnz,&A),Mat,A) 248 PetscPolymorphicFunction(MatCreateSeqAIJ,(PetscInt m,PetscInt n),(PETSC_COMM_SELF,m,n,0,PETSC_NULL,&A),Mat,A) 249 PetscPolymorphicSubroutine(MatCreateSeqAIJ,(PetscInt m,PetscInt n,PetscInt nz,Mat *A),(PETSC_COMM_SELF,m,n,nz,PETSC_NULL,A)) 250 PetscPolymorphicSubroutine(MatCreateSeqAIJ,(PetscInt m,PetscInt n,const PetscInt nnz[],Mat *A),(PETSC_COMM_SELF,m,n,0,nnz,A)) 251 PetscPolymorphicSubroutine(MatCreateSeqAIJ,(PetscInt m,PetscInt n,Mat *A),(PETSC_COMM_SELF,m,n,0,PETSC_NULL,A)) 252 extern PetscErrorCode MatCreateMPIAIJ(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt,const PetscInt[],PetscInt,const PetscInt[],Mat*); 253 PetscPolymorphicFunction(MatCreateMPIAIJ,(MPI_Comm comm,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,const PetscInt nnz[],PetscInt onz,const PetscInt onnz[]),(comm,m,n,M,N,nz,nnz,onz,onnz,&A),Mat,A) 254 PetscPolymorphicFunction(MatCreateMPIAIJ,(MPI_Comm comm,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,PetscInt nnz),(comm,m,n,M,N,nz,PETSC_NULL,nnz,PETSC_NULL,&A),Mat,A) 255 PetscPolymorphicFunction(MatCreateMPIAIJ,(MPI_Comm comm,PetscInt m,PetscInt n,PetscInt M,PetscInt N,const PetscInt nnz[],const PetscInt onz[]),(comm,m,n,M,N,0,nnz,0,onz,&A),Mat,A) 256 PetscPolymorphicFunction(MatCreateMPIAIJ,(MPI_Comm comm,PetscInt m,PetscInt n,PetscInt M,PetscInt N),(comm,m,n,M,N,0,PETSC_NULL,0,PETSC_NULL,&A),Mat,A) 257 PetscPolymorphicSubroutine(MatCreateMPIAIJ,(MPI_Comm comm,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,PetscInt nnz,Mat *A),(comm,m,n,M,N,nz,PETSC_NULL,nnz,PETSC_NULL,A)) 258 PetscPolymorphicSubroutine(MatCreateMPIAIJ,(MPI_Comm comm,PetscInt m,PetscInt n,PetscInt M,PetscInt N,const PetscInt nnz[],const PetscInt onz[],Mat *A),(comm,m,n,M,N,0,nnz,0,onz,A)) 259 PetscPolymorphicSubroutine(MatCreateMPIAIJ,(MPI_Comm comm,PetscInt m,PetscInt n,PetscInt M,PetscInt N,Mat *A),(comm,m,n,M,N,0,PETSC_NULL,0,PETSC_NULL,A)) 260 PetscPolymorphicFunction(MatCreateMPIAIJ,(PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,const PetscInt nnz[],PetscInt onz,const PetscInt onnz[]),(PETSC_COMM_WORLD,m,n,M,N,nz,nnz,onz,onnz,&A),Mat,A) 261 PetscPolymorphicFunction(MatCreateMPIAIJ,(PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,PetscInt nnz),(PETSC_COMM_WORLD,m,n,M,N,nz,PETSC_NULL,nnz,PETSC_NULL,&A),Mat,A) 262 PetscPolymorphicFunction(MatCreateMPIAIJ,(PetscInt m,PetscInt n,PetscInt M,PetscInt N,const PetscInt nnz[],const PetscInt onz[]),(PETSC_COMM_WORLD,m,n,M,N,0,nnz,0,onz,&A),Mat,A) 263 PetscPolymorphicFunction(MatCreateMPIAIJ,(PetscInt m,PetscInt n,PetscInt M,PetscInt N),(PETSC_COMM_WORLD,m,n,M,N,0,PETSC_NULL,0,PETSC_NULL,&A),Mat,A) 264 PetscPolymorphicSubroutine(MatCreateMPIAIJ,(PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,PetscInt nnz,Mat *A),(PETSC_COMM_WORLD,m,n,M,N,nz,PETSC_NULL,nnz,PETSC_NULL,A)) 265 PetscPolymorphicSubroutine(MatCreateMPIAIJ,(PetscInt m,PetscInt n,PetscInt M,PetscInt N,const PetscInt nnz[],const PetscInt onz[],Mat *A),(PETSC_COMM_WORLD,m,n,M,N,0,nnz,0,onz,A)) 266 PetscPolymorphicSubroutine(MatCreateMPIAIJ,(PetscInt m,PetscInt n,PetscInt M,PetscInt N,Mat *A),(PETSC_COMM_WORLD,m,n,M,N,0,PETSC_NULL,0,PETSC_NULL,A)) 267 extern PetscErrorCode MatCreateMPIAIJWithArrays(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,const PetscInt[],const PetscInt[],const PetscScalar[],Mat *); 268 extern PetscErrorCode MatCreateMPIAIJWithSplitArrays(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt[],PetscInt[],PetscScalar[],PetscInt[],PetscInt[],PetscScalar[],Mat*); 269 270 extern PetscErrorCode MatCreateSeqBAIJ(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,const PetscInt[],Mat*); 271 PetscPolymorphicFunction(MatCreateSeqBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt nz,const PetscInt nnz[]),(PETSC_COMM_SELF,bs,m,n,nz,nnz,&A),Mat,A) 272 PetscPolymorphicFunction(MatCreateSeqBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt nz),(PETSC_COMM_SELF,bs,m,n,nz,PETSC_NULL,&A),Mat,A) 273 PetscPolymorphicFunction(MatCreateSeqBAIJ,(PetscInt bs,PetscInt m,PetscInt n,const PetscInt nnz[]),(PETSC_COMM_SELF,bs,m,n,0,nnz,&A),Mat,A) 274 PetscPolymorphicFunction(MatCreateSeqBAIJ,(PetscInt bs,PetscInt m,PetscInt n),(PETSC_COMM_SELF,bs,m,n,0,PETSC_NULL,&A),Mat,A) 275 PetscPolymorphicSubroutine(MatCreateSeqBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt nz,Mat *A),(PETSC_COMM_SELF,bs,m,n,nz,PETSC_NULL,A)) 276 PetscPolymorphicSubroutine(MatCreateSeqBAIJ,(PetscInt bs,PetscInt m,PetscInt n,const PetscInt nnz[],Mat *A),(PETSC_COMM_SELF,bs,m,n,0,nnz,A)) 277 PetscPolymorphicSubroutine(MatCreateSeqBAIJ,(PetscInt bs,PetscInt m,PetscInt n,Mat *A),(PETSC_COMM_SELF,bs,m,n,0,PETSC_NULL,A)) 278 extern PetscErrorCode MatCreateMPIBAIJ(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt,const PetscInt[],PetscInt,const PetscInt[],Mat*); 279 PetscPolymorphicFunction(MatCreateMPIBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,const PetscInt nnz[],PetscInt onz,const PetscInt onnz[]),(comm,bs,m,n,M,N,nz,nnz,onz,onnz,&A),Mat,A) 280 PetscPolymorphicFunction(MatCreateMPIBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,PetscInt nnz),(comm,bs,m,n,M,N,nz,PETSC_NULL,nnz,PETSC_NULL,&A),Mat,A) 281 PetscPolymorphicFunction(MatCreateMPIBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,const PetscInt nnz[],const PetscInt onz[]),(comm,bs,m,n,M,N,0,nnz,0,onz,&A),Mat,A) 282 PetscPolymorphicFunction(MatCreateMPIBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N),(comm,bs,m,n,M,N,0,PETSC_NULL,0,PETSC_NULL,&A),Mat,A) 283 PetscPolymorphicSubroutine(MatCreateMPIBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,PetscInt nnz,Mat *A),(comm,bs,m,n,M,N,nz,PETSC_NULL,nnz,PETSC_NULL,A)) 284 PetscPolymorphicSubroutine(MatCreateMPIBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,const PetscInt nnz[],const PetscInt onz[],Mat *A),(comm,bs,m,n,M,N,0,nnz,0,onz,A)) 285 PetscPolymorphicSubroutine(MatCreateMPIBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,Mat *A),(comm,bs,m,n,M,N,0,PETSC_NULL,0,PETSC_NULL,A)) 286 PetscPolymorphicFunction(MatCreateMPIBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,const PetscInt nnz[],PetscInt onz,const PetscInt onnz[]),(PETSC_COMM_WORLD,bs,m,n,M,N,nz,nnz,onz,onnz,&A),Mat,A) 287 PetscPolymorphicFunction(MatCreateMPIBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,PetscInt nnz),(PETSC_COMM_WORLD,bs,m,n,M,N,nz,PETSC_NULL,nnz,PETSC_NULL,&A),Mat,A) 288 PetscPolymorphicFunction(MatCreateMPIBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,const PetscInt nnz[],const PetscInt onz[]),(PETSC_COMM_WORLD,bs,m,n,M,N,0,nnz,0,onz,&A),Mat,A) 289 PetscPolymorphicFunction(MatCreateMPIBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N),(PETSC_COMM_WORLD,bs,m,n,M,N,0,PETSC_NULL,0,PETSC_NULL,&A),Mat,A) 290 PetscPolymorphicSubroutine(MatCreateMPIBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,PetscInt nnz,Mat *A),(PETSC_COMM_WORLD,bs,m,n,M,N,nz,PETSC_NULL,nnz,PETSC_NULL,A)) 291 PetscPolymorphicSubroutine(MatCreateMPIBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,const PetscInt nnz[],const PetscInt onz[],Mat *A),(PETSC_COMM_WORLD,bs,m,n,M,N,0,nnz,0,onz,A)) 292 PetscPolymorphicSubroutine(MatCreateMPIBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,Mat *A),(PETSC_COMM_WORLD,bs,m,n,M,N,0,PETSC_NULL,0,PETSC_NULL,A)) 293 extern PetscErrorCode MatCreateMPIBAIJWithArrays(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt,const PetscInt[],const PetscInt[],const PetscScalar[],Mat*); 294 295 extern PetscErrorCode MatCreateMPIAdj(MPI_Comm,PetscInt,PetscInt,PetscInt[],PetscInt[],PetscInt[],Mat*); 296 extern PetscErrorCode MatCreateSeqSBAIJ(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,const PetscInt[],Mat*); 297 PetscPolymorphicFunction(MatCreateSeqSBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt nz,const PetscInt nnz[]),(PETSC_COMM_SELF,bs,m,n,nz,nnz,&A),Mat,A) 298 PetscPolymorphicFunction(MatCreateSeqSBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt nz),(PETSC_COMM_SELF,bs,m,n,nz,PETSC_NULL,&A),Mat,A) 299 PetscPolymorphicFunction(MatCreateSeqSBAIJ,(PetscInt bs,PetscInt m,PetscInt n,const PetscInt nnz[]),(PETSC_COMM_SELF,bs,m,n,0,nnz,&A),Mat,A) 300 PetscPolymorphicFunction(MatCreateSeqSBAIJ,(PetscInt bs,PetscInt m,PetscInt n),(PETSC_COMM_SELF,bs,m,n,0,PETSC_NULL,&A),Mat,A) 301 PetscPolymorphicSubroutine(MatCreateSeqSBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt nz,Mat *A),(PETSC_COMM_SELF,bs,m,n,nz,PETSC_NULL,A)) 302 PetscPolymorphicSubroutine(MatCreateSeqSBAIJ,(PetscInt bs,PetscInt m,PetscInt n,const PetscInt nnz[],Mat *A),(PETSC_COMM_SELF,bs,m,n,0,nnz,A)) 303 PetscPolymorphicSubroutine(MatCreateSeqSBAIJ,(PetscInt bs,PetscInt m,PetscInt n,Mat *A),(PETSC_COMM_SELF,bs,m,n,0,PETSC_NULL,A)) 304 305 extern PetscErrorCode MatCreateMPISBAIJ(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt,const PetscInt[],PetscInt,const PetscInt[],Mat*); 306 PetscPolymorphicFunction(MatCreateMPISBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,const PetscInt nnz[],PetscInt onz,const PetscInt onnz[]),(comm,bs,m,n,M,N,nz,nnz,onz,onnz,&A),Mat,A) 307 PetscPolymorphicFunction(MatCreateMPISBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,PetscInt nnz),(comm,bs,m,n,M,N,nz,PETSC_NULL,nnz,PETSC_NULL,&A),Mat,A) 308 PetscPolymorphicFunction(MatCreateMPISBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,const PetscInt nnz[],const PetscInt onz[]),(comm,bs,m,n,M,N,0,nnz,0,onz,&A),Mat,A) 309 PetscPolymorphicFunction(MatCreateMPISBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N),(comm,bs,m,n,M,N,0,PETSC_NULL,0,PETSC_NULL,&A),Mat,A) 310 PetscPolymorphicSubroutine(MatCreateMPISBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,PetscInt nnz,Mat *A),(comm,bs,m,n,M,N,nz,PETSC_NULL,nnz,PETSC_NULL,A)) 311 PetscPolymorphicSubroutine(MatCreateMPISBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,const PetscInt nnz[],const PetscInt onz[],Mat *A),(comm,bs,m,n,M,N,0,nnz,0,onz,A)) 312 PetscPolymorphicSubroutine(MatCreateMPISBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,Mat *A),(comm,bs,m,n,M,N,0,PETSC_NULL,0,PETSC_NULL,A)) 313 PetscPolymorphicFunction(MatCreateMPISBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,const PetscInt nnz[],PetscInt onz,const PetscInt onnz[]),(PETSC_COMM_WORLD,bs,m,n,M,N,nz,nnz,onz,onnz,&A),Mat,A) 314 PetscPolymorphicFunction(MatCreateMPISBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,PetscInt nnz),(PETSC_COMM_WORLD,bs,m,n,M,N,nz,PETSC_NULL,nnz,PETSC_NULL,&A),Mat,A) 315 PetscPolymorphicFunction(MatCreateMPISBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,const PetscInt nnz[],const PetscInt onz[]),(PETSC_COMM_WORLD,bs,m,n,M,N,0,nnz,0,onz,&A),Mat,A) 316 PetscPolymorphicFunction(MatCreateMPISBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N),(PETSC_COMM_WORLD,bs,m,n,M,N,0,PETSC_NULL,0,PETSC_NULL,&A),Mat,A) 317 PetscPolymorphicSubroutine(MatCreateMPISBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,PetscInt nnz,Mat *A),(PETSC_COMM_WORLD,bs,m,n,M,N,nz,PETSC_NULL,nnz,PETSC_NULL,A)) 318 PetscPolymorphicSubroutine(MatCreateMPISBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,const PetscInt nnz[],const PetscInt onz[],Mat *A),(PETSC_COMM_WORLD,bs,m,n,M,N,0,nnz,0,onz,A)) 319 PetscPolymorphicSubroutine(MatCreateMPISBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,Mat *A),(PETSC_COMM_WORLD,bs,m,n,M,N,0,PETSC_NULL,0,PETSC_NULL,A)) 320 extern PetscErrorCode MatCreateMPISBAIJWithArrays(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt,const PetscInt[],const PetscInt[],const PetscScalar[],Mat *); 321 extern PetscErrorCode MatMPISBAIJSetPreallocationCSR(Mat,PetscInt,const PetscInt[],const PetscInt[],const PetscScalar[]); 322 323 extern PetscErrorCode MatCreateShell(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,void *,Mat*); 324 PetscPolymorphicFunction(MatCreateShell,(MPI_Comm comm,PetscInt m,PetscInt n,PetscInt M,PetscInt N,void *ctx),(comm,m,n,M,N,ctx,&A),Mat,A) 325 PetscPolymorphicFunction(MatCreateShell,(PetscInt m,PetscInt n,PetscInt M,PetscInt N,void *ctx),(PETSC_COMM_WORLD,m,n,M,N,ctx,&A),Mat,A) 326 extern PetscErrorCode MatCreateAdic(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt,void (*)(void),Mat*); 327 extern PetscErrorCode MatCreateNormal(Mat,Mat*); 328 PetscPolymorphicFunction(MatCreateNormal,(Mat mat),(mat,&A),Mat,A) 329 extern PetscErrorCode MatCreateLRC(Mat,Mat,Mat,Mat*); 330 extern PetscErrorCode MatCreateIS(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,ISLocalToGlobalMapping,Mat*); 331 extern PetscErrorCode MatCreateSeqAIJCRL(MPI_Comm,PetscInt,PetscInt,PetscInt,const PetscInt[],Mat*); 332 extern PetscErrorCode MatCreateMPIAIJCRL(MPI_Comm,PetscInt,PetscInt,PetscInt,const PetscInt[],PetscInt,const PetscInt[],Mat*); 333 334 extern PetscErrorCode MatCreateSeqBSTRM(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,const PetscInt[],Mat*); 335 extern PetscErrorCode MatCreateMPIBSTRM(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt,const PetscInt[],PetscInt,const PetscInt[],Mat*); 336 extern PetscErrorCode MatCreateSeqSBSTRM(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,const PetscInt[],Mat*); 337 extern PetscErrorCode MatCreateMPISBSTRM(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt,const PetscInt[],PetscInt,const PetscInt[],Mat*); 338 339 extern PetscErrorCode MatCreateScatter(MPI_Comm,VecScatter,Mat*); 340 extern PetscErrorCode MatScatterSetVecScatter(Mat,VecScatter); 341 extern PetscErrorCode MatScatterGetVecScatter(Mat,VecScatter*); 342 extern PetscErrorCode MatCreateBlockMat(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt*,Mat*); 343 extern PetscErrorCode MatCompositeAddMat(Mat,Mat); 344 extern PetscErrorCode MatCompositeMerge(Mat); 345 extern PetscErrorCode MatCreateComposite(MPI_Comm,PetscInt,const Mat*,Mat*); 346 typedef enum {MAT_COMPOSITE_ADDITIVE,MAT_COMPOSITE_MULTIPLICATIVE} MatCompositeType; 347 extern PetscErrorCode MatCompositeSetType(Mat,MatCompositeType); 348 349 extern PetscErrorCode MatCreateFFT(MPI_Comm,PetscInt,const PetscInt[],const MatType,Mat*); 350 extern PetscErrorCode MatCreateSeqCUFFT(MPI_Comm,PetscInt,const PetscInt[],Mat*); 351 352 extern PetscErrorCode MatCreateTranspose(Mat,Mat*); 353 extern PetscErrorCode MatCreateSubMatrix(Mat,IS,IS,Mat*); 354 extern PetscErrorCode MatSubMatrixUpdate(Mat,Mat,IS,IS); 355 extern PetscErrorCode MatCreateLocalRef(Mat,IS,IS,Mat*); 356 357 extern PetscErrorCode MatPythonSetType(Mat,const char[]); 358 359 extern PetscErrorCode MatSetUp(Mat); 360 extern PetscErrorCode MatDestroy(Mat*); 361 362 extern PetscErrorCode MatConjugate(Mat); 363 extern PetscErrorCode MatRealPart(Mat); 364 extern PetscErrorCode MatImaginaryPart(Mat); 365 extern PetscErrorCode MatGetDiagonalBlock(Mat,Mat*); 366 extern PetscErrorCode MatGetTrace(Mat,PetscScalar*); 367 extern PetscErrorCode MatInvertBlockDiagonal(Mat,PetscScalar **); 368 369 /* ------------------------------------------------------------*/ 370 extern PetscErrorCode MatSetValues(Mat,PetscInt,const PetscInt[],PetscInt,const PetscInt[],const PetscScalar[],InsertMode); 371 extern PetscErrorCode MatSetValuesBlocked(Mat,PetscInt,const PetscInt[],PetscInt,const PetscInt[],const PetscScalar[],InsertMode); 372 extern PetscErrorCode MatSetValuesRow(Mat,PetscInt,const PetscScalar[]); 373 extern PetscErrorCode MatSetValuesRowLocal(Mat,PetscInt,const PetscScalar[]); 374 extern PetscErrorCode MatSetValuesBatch(Mat,PetscInt,PetscInt,PetscInt[],const PetscScalar[]); 375 376 /*S 377 MatStencil - Data structure (C struct) for storing information about a single row or 378 column of a matrix as indexed on an associated grid. 379 380 Fortran usage is different, see MatSetValuesStencil() for details. 381 382 Level: beginner 383 384 Concepts: matrix; linear operator 385 386 .seealso: MatSetValuesStencil(), MatSetStencil(), MatSetValuesBlockedStencil() 387 S*/ 388 typedef struct { 389 PetscInt k,j,i,c; 390 } MatStencil; 391 392 extern PetscErrorCode MatSetValuesStencil(Mat,PetscInt,const MatStencil[],PetscInt,const MatStencil[],const PetscScalar[],InsertMode); 393 extern PetscErrorCode MatSetValuesBlockedStencil(Mat,PetscInt,const MatStencil[],PetscInt,const MatStencil[],const PetscScalar[],InsertMode); 394 extern PetscErrorCode MatSetStencil(Mat,PetscInt,const PetscInt[],const PetscInt[],PetscInt); 395 396 extern PetscErrorCode MatSetColoring(Mat,ISColoring); 397 extern PetscErrorCode MatSetValuesAdic(Mat,void*); 398 extern PetscErrorCode MatSetValuesAdifor(Mat,PetscInt,void*); 399 400 /*E 401 MatAssemblyType - Indicates if the matrix is now to be used, or if you plan 402 to continue to add values to it 403 404 Level: beginner 405 406 .seealso: MatAssemblyBegin(), MatAssemblyEnd() 407 E*/ 408 typedef enum {MAT_FLUSH_ASSEMBLY=1,MAT_FINAL_ASSEMBLY=0} MatAssemblyType; 409 extern PetscErrorCode MatAssemblyBegin(Mat,MatAssemblyType); 410 extern PetscErrorCode MatAssemblyEnd(Mat,MatAssemblyType); 411 extern PetscErrorCode MatAssembled(Mat,PetscBool *); 412 413 414 415 /*E 416 MatOption - Options that may be set for a matrix and its behavior or storage 417 418 Level: beginner 419 420 Any additions/changes here MUST also be made in include/finclude/petscmat.h 421 422 .seealso: MatSetOption() 423 E*/ 424 typedef enum {MAT_ROW_ORIENTED,MAT_NEW_NONZERO_LOCATIONS, 425 MAT_SYMMETRIC, 426 MAT_STRUCTURALLY_SYMMETRIC, 427 MAT_NEW_DIAGONALS,MAT_IGNORE_OFF_PROC_ENTRIES, 428 MAT_NEW_NONZERO_LOCATION_ERR, 429 MAT_NEW_NONZERO_ALLOCATION_ERR,MAT_USE_HASH_TABLE, 430 MAT_KEEP_NONZERO_PATTERN,MAT_IGNORE_ZERO_ENTRIES, 431 MAT_USE_INODES, 432 MAT_HERMITIAN, 433 MAT_SYMMETRY_ETERNAL, 434 MAT_CHECK_COMPRESSED_ROW, 435 MAT_IGNORE_LOWER_TRIANGULAR,MAT_ERROR_LOWER_TRIANGULAR, 436 MAT_GETROW_UPPERTRIANGULAR,MAT_UNUSED_NONZERO_LOCATION_ERR, 437 MAT_SPD,MAT_NO_OFF_PROC_ENTRIES,MAT_NO_OFF_PROC_ZERO_ROWS, 438 NUM_MAT_OPTIONS} MatOption; 439 extern const char *MatOptions[]; 440 extern PetscErrorCode MatSetOption(Mat,MatOption,PetscBool ); 441 extern PetscErrorCode MatGetType(Mat,const MatType*); 442 PetscPolymorphicFunction(MatGetType,(Mat mat),(mat,&t),const MatType,t) 443 444 extern PetscErrorCode MatGetValues(Mat,PetscInt,const PetscInt[],PetscInt,const PetscInt[],PetscScalar[]); 445 extern PetscErrorCode MatGetRow(Mat,PetscInt,PetscInt *,const PetscInt *[],const PetscScalar*[]); 446 extern PetscErrorCode MatRestoreRow(Mat,PetscInt,PetscInt *,const PetscInt *[],const PetscScalar*[]); 447 extern PetscErrorCode MatGetRowUpperTriangular(Mat); 448 extern PetscErrorCode MatRestoreRowUpperTriangular(Mat); 449 extern PetscErrorCode MatGetColumn(Mat,PetscInt,PetscInt *,const PetscInt *[],const PetscScalar*[]); 450 extern PetscErrorCode MatRestoreColumn(Mat,PetscInt,PetscInt *,const PetscInt *[],const PetscScalar*[]); 451 extern PetscErrorCode MatGetColumnVector(Mat,Vec,PetscInt); 452 extern PetscErrorCode MatGetArray(Mat,PetscScalar *[]); 453 PetscPolymorphicFunction(MatGetArray,(Mat mat),(mat,&a),PetscScalar*,a) 454 extern PetscErrorCode MatRestoreArray(Mat,PetscScalar *[]); 455 extern PetscErrorCode MatGetBlockSize(Mat,PetscInt *); 456 PetscPolymorphicFunction(MatGetBlockSize,(Mat mat),(mat,&a),PetscInt,a) 457 extern PetscErrorCode MatSetBlockSize(Mat,PetscInt); 458 459 460 extern PetscErrorCode MatMult(Mat,Vec,Vec); 461 extern PetscErrorCode MatMultDiagonalBlock(Mat,Vec,Vec); 462 extern PetscErrorCode MatMultAdd(Mat,Vec,Vec,Vec); 463 PetscPolymorphicSubroutine(MatMultAdd,(Mat A,Vec x,Vec y),(A,x,y,y)) 464 extern PetscErrorCode MatMultTranspose(Mat,Vec,Vec); 465 extern PetscErrorCode MatMultHermitianTranspose(Mat,Vec,Vec); 466 extern PetscErrorCode MatIsTranspose(Mat,Mat,PetscReal,PetscBool *); 467 PetscPolymorphicFunction(MatIsTranspose,(Mat A,Mat B,PetscReal tol),(A,B,tol,&t),PetscBool ,t) 468 PetscPolymorphicFunction(MatIsTranspose,(Mat A,Mat B),(A,B,0,&t),PetscBool ,t) 469 extern PetscErrorCode MatIsHermitianTranspose(Mat,Mat,PetscReal,PetscBool *); 470 extern PetscErrorCode MatMultTransposeAdd(Mat,Vec,Vec,Vec); 471 extern PetscErrorCode MatMultHermitianTransposeAdd(Mat,Vec,Vec,Vec); 472 PetscPolymorphicSubroutine(MatMultTransposeAdd,(Mat A,Vec x,Vec y),(A,x,y,y)) 473 extern PetscErrorCode MatMultConstrained(Mat,Vec,Vec); 474 extern PetscErrorCode MatMultTransposeConstrained(Mat,Vec,Vec); 475 extern PetscErrorCode MatMatSolve(Mat,Mat,Mat); 476 477 /*E 478 MatDuplicateOption - Indicates if a duplicated sparse matrix should have 479 its numerical values copied over or just its nonzero structure. 480 481 Level: beginner 482 483 Any additions/changes here MUST also be made in include/finclude/petscmat.h 484 485 $ MAT_SHARE_NONZERO_PATTERN - the i and j arrays in the new matrix will be shared with the original matrix 486 $ this also triggers the MAT_DO_NOT_COPY_VALUES option. This is used when you 487 $ have several matrices with the same nonzero pattern. 488 489 .seealso: MatDuplicate() 490 E*/ 491 typedef enum {MAT_DO_NOT_COPY_VALUES,MAT_COPY_VALUES,MAT_SHARE_NONZERO_PATTERN} MatDuplicateOption; 492 493 extern PetscErrorCode MatConvert(Mat,const MatType,MatReuse,Mat*); 494 PetscPolymorphicFunction(MatConvert,(Mat A,const MatType t),(A,t,MAT_INITIAL_MATRIX,&a),Mat,a) 495 extern PetscErrorCode MatDuplicate(Mat,MatDuplicateOption,Mat*); 496 PetscPolymorphicFunction(MatDuplicate,(Mat A,MatDuplicateOption o),(A,o,&a),Mat,a) 497 PetscPolymorphicFunction(MatDuplicate,(Mat A),(A,MAT_COPY_VALUES,&a),Mat,a) 498 499 500 extern PetscErrorCode MatCopy(Mat,Mat,MatStructure); 501 extern PetscErrorCode MatView(Mat,PetscViewer); 502 extern PetscErrorCode MatIsSymmetric(Mat,PetscReal,PetscBool *); 503 PetscPolymorphicFunction(MatIsSymmetric,(Mat A,PetscReal tol),(A,tol,&t),PetscBool ,t) 504 PetscPolymorphicFunction(MatIsSymmetric,(Mat A),(A,0,&t),PetscBool ,t) 505 extern PetscErrorCode MatIsStructurallySymmetric(Mat,PetscBool *); 506 PetscPolymorphicFunction(MatIsStructurallySymmetric,(Mat A),(A,&t),PetscBool ,t) 507 extern PetscErrorCode MatIsHermitian(Mat,PetscReal,PetscBool *); 508 PetscPolymorphicFunction(MatIsHermitian,(Mat A),(A,0,&t),PetscBool ,t) 509 extern PetscErrorCode MatIsSymmetricKnown(Mat,PetscBool *,PetscBool *); 510 extern PetscErrorCode MatIsHermitianKnown(Mat,PetscBool *,PetscBool *); 511 extern PetscErrorCode MatMissingDiagonal(Mat,PetscBool *,PetscInt *); 512 extern PetscErrorCode MatLoad(Mat, PetscViewer); 513 514 extern PetscErrorCode MatGetRowIJ(Mat,PetscInt,PetscBool ,PetscBool ,PetscInt*,PetscInt *[],PetscInt *[],PetscBool *); 515 extern PetscErrorCode MatRestoreRowIJ(Mat,PetscInt,PetscBool ,PetscBool ,PetscInt *,PetscInt *[],PetscInt *[],PetscBool *); 516 extern PetscErrorCode MatGetColumnIJ(Mat,PetscInt,PetscBool ,PetscBool ,PetscInt*,PetscInt *[],PetscInt *[],PetscBool *); 517 extern PetscErrorCode MatRestoreColumnIJ(Mat,PetscInt,PetscBool ,PetscBool ,PetscInt *,PetscInt *[],PetscInt *[],PetscBool *); 518 519 /*S 520 MatInfo - Context of matrix information, used with MatGetInfo() 521 522 In Fortran this is simply a double precision array of dimension MAT_INFO_SIZE 523 524 Level: intermediate 525 526 Concepts: matrix^nonzero information 527 528 .seealso: MatGetInfo(), MatInfoType 529 S*/ 530 typedef struct { 531 PetscLogDouble block_size; /* block size */ 532 PetscLogDouble nz_allocated,nz_used,nz_unneeded; /* number of nonzeros */ 533 PetscLogDouble memory; /* memory allocated */ 534 PetscLogDouble assemblies; /* number of matrix assemblies called */ 535 PetscLogDouble mallocs; /* number of mallocs during MatSetValues() */ 536 PetscLogDouble fill_ratio_given,fill_ratio_needed; /* fill ratio for LU/ILU */ 537 PetscLogDouble factor_mallocs; /* number of mallocs during factorization */ 538 } MatInfo; 539 540 /*E 541 MatInfoType - Indicates if you want information about the local part of the matrix, 542 the entire parallel matrix or the maximum over all the local parts. 543 544 Level: beginner 545 546 Any additions/changes here MUST also be made in include/finclude/petscmat.h 547 548 .seealso: MatGetInfo(), MatInfo 549 E*/ 550 typedef enum {MAT_LOCAL=1,MAT_GLOBAL_MAX=2,MAT_GLOBAL_SUM=3} MatInfoType; 551 extern PetscErrorCode MatGetInfo(Mat,MatInfoType,MatInfo*); 552 extern PetscErrorCode MatGetDiagonal(Mat,Vec); 553 extern PetscErrorCode MatGetRowMax(Mat,Vec,PetscInt[]); 554 extern PetscErrorCode MatGetRowMin(Mat,Vec,PetscInt[]); 555 extern PetscErrorCode MatGetRowMaxAbs(Mat,Vec,PetscInt[]); 556 extern PetscErrorCode MatGetRowMinAbs(Mat,Vec,PetscInt[]); 557 extern PetscErrorCode MatGetRowSum(Mat,Vec); 558 extern PetscErrorCode MatTranspose(Mat,MatReuse,Mat*); 559 PetscPolymorphicFunction(MatTranspose,(Mat A),(A,MAT_INITIAL_MATRIX,&t),Mat,t) 560 extern PetscErrorCode MatHermitianTranspose(Mat,MatReuse,Mat*); 561 extern PetscErrorCode MatPermute(Mat,IS,IS,Mat *); 562 PetscPolymorphicFunction(MatPermute,(Mat A,IS is1,IS is2),(A,is1,is2,&t),Mat,t) 563 extern PetscErrorCode MatDiagonalScale(Mat,Vec,Vec); 564 extern PetscErrorCode MatDiagonalSet(Mat,Vec,InsertMode); 565 extern PetscErrorCode MatEqual(Mat,Mat,PetscBool *); 566 PetscPolymorphicFunction(MatEqual,(Mat A,Mat B),(A,B,&t),PetscBool ,t) 567 extern PetscErrorCode MatMultEqual(Mat,Mat,PetscInt,PetscBool *); 568 extern PetscErrorCode MatMultAddEqual(Mat,Mat,PetscInt,PetscBool *); 569 extern PetscErrorCode MatMultTransposeEqual(Mat,Mat,PetscInt,PetscBool *); 570 extern PetscErrorCode MatMultTransposeAddEqual(Mat,Mat,PetscInt,PetscBool *); 571 572 extern PetscErrorCode MatNorm(Mat,NormType,PetscReal *); 573 PetscPolymorphicFunction(MatNorm,(Mat A,NormType t),(A,t,&n),PetscReal,n) 574 extern PetscErrorCode MatGetColumnNorms(Mat,NormType,PetscReal *); 575 extern PetscErrorCode MatZeroEntries(Mat); 576 extern PetscErrorCode MatZeroRows(Mat,PetscInt,const PetscInt [],PetscScalar,Vec,Vec); 577 extern PetscErrorCode MatZeroRowsIS(Mat,IS,PetscScalar,Vec,Vec); 578 extern PetscErrorCode MatZeroRowsStencil(Mat,PetscInt,const MatStencil [],PetscScalar,Vec,Vec); 579 extern PetscErrorCode MatZeroRowsColumnsStencil(Mat,PetscInt,const MatStencil[],PetscScalar,Vec,Vec); 580 extern PetscErrorCode MatZeroRowsColumns(Mat,PetscInt,const PetscInt [],PetscScalar,Vec,Vec); 581 extern PetscErrorCode MatZeroRowsColumnsIS(Mat,IS,PetscScalar,Vec,Vec); 582 583 extern PetscErrorCode MatUseScaledForm(Mat,PetscBool ); 584 extern PetscErrorCode MatScaleSystem(Mat,Vec,Vec); 585 extern PetscErrorCode MatUnScaleSystem(Mat,Vec,Vec); 586 587 extern PetscErrorCode MatGetSize(Mat,PetscInt*,PetscInt*); 588 extern PetscErrorCode MatGetLocalSize(Mat,PetscInt*,PetscInt*); 589 extern PetscErrorCode MatGetOwnershipRange(Mat,PetscInt*,PetscInt*); 590 extern PetscErrorCode MatGetOwnershipRanges(Mat,const PetscInt**); 591 extern PetscErrorCode MatGetOwnershipRangeColumn(Mat,PetscInt*,PetscInt*); 592 extern PetscErrorCode MatGetOwnershipRangesColumn(Mat,const PetscInt**); 593 594 extern PetscErrorCode MatGetSubMatrices(Mat,PetscInt,const IS[],const IS[],MatReuse,Mat *[]); 595 extern PetscErrorCode MatGetSubMatricesParallel(Mat,PetscInt,const IS[],const IS[],MatReuse,Mat *[]); 596 extern PetscErrorCode MatDestroyMatrices(PetscInt,Mat *[]); 597 extern PetscErrorCode MatGetSubMatrix(Mat,IS,IS,MatReuse,Mat *); 598 extern PetscErrorCode MatGetLocalSubMatrix(Mat,IS,IS,Mat*); 599 extern PetscErrorCode MatRestoreLocalSubMatrix(Mat,IS,IS,Mat*); 600 extern PetscErrorCode MatGetSeqNonzeroStructure(Mat,Mat*); 601 extern PetscErrorCode MatDestroySeqNonzeroStructure(Mat*); 602 603 extern PetscErrorCode MatMerge(MPI_Comm,Mat,PetscInt,MatReuse,Mat*); 604 extern PetscErrorCode MatMergeSymbolic(MPI_Comm,Mat,PetscInt,Mat*); 605 extern PetscErrorCode MatMergeNumeric(MPI_Comm,Mat,PetscInt,Mat); 606 extern PetscErrorCode MatMerge_SeqsToMPI(MPI_Comm,Mat,PetscInt,PetscInt,MatReuse,Mat*); 607 extern PetscErrorCode MatMerge_SeqsToMPISymbolic(MPI_Comm,Mat,PetscInt,PetscInt,Mat*); 608 extern PetscErrorCode MatMerge_SeqsToMPINumeric(Mat,Mat); 609 extern PetscErrorCode MatMPIAIJGetLocalMat(Mat,MatReuse,Mat*); 610 extern PetscErrorCode MatMPIAIJGetLocalMatCondensed(Mat,MatReuse,IS*,IS*,Mat*); 611 extern PetscErrorCode MatGetBrowsOfAcols(Mat,Mat,MatReuse,IS*,IS*,Mat*); 612 extern PetscErrorCode MatGetBrowsOfAoCols(Mat,Mat,MatReuse,PetscInt**,PetscInt**,MatScalar**,Mat*); 613 #if defined (PETSC_USE_CTABLE) 614 extern PetscErrorCode MatGetCommunicationStructs(Mat, Vec *, PetscTable *, VecScatter *); 615 #else 616 extern PetscErrorCode MatGetCommunicationStructs(Mat, Vec *, PetscInt *[], VecScatter *); 617 #endif 618 extern PetscErrorCode MatGetGhosts(Mat, PetscInt *,const PetscInt *[]); 619 620 extern PetscErrorCode MatIncreaseOverlap(Mat,PetscInt,IS[],PetscInt); 621 622 extern PetscErrorCode MatMatMult(Mat,Mat,MatReuse,PetscReal,Mat*); 623 extern PetscErrorCode MatMatMultSymbolic(Mat,Mat,PetscReal,Mat*); 624 extern PetscErrorCode MatMatMultNumeric(Mat,Mat,Mat); 625 626 extern PetscErrorCode MatPtAP(Mat,Mat,MatReuse,PetscReal,Mat*); 627 extern PetscErrorCode MatPtAPSymbolic(Mat,Mat,PetscReal,Mat*); 628 extern PetscErrorCode MatPtAPNumeric(Mat,Mat,Mat); 629 630 extern PetscErrorCode MatMatTransposeMult(Mat,Mat,MatReuse,PetscReal,Mat*); 631 extern PetscErrorCode MatMatTransposetMultSymbolic(Mat,Mat,PetscReal,Mat*); 632 extern PetscErrorCode MatMatTransposetMultNumeric(Mat,Mat,Mat); 633 extern PetscErrorCode MatMatMultTranspose(Mat,Mat,MatReuse,PetscReal,Mat*); 634 extern PetscErrorCode MatMatMultTransposeSymbolic(Mat,Mat,PetscReal,Mat*); 635 extern PetscErrorCode MatMatMultTransposeNumeric(Mat,Mat,Mat); 636 637 extern PetscErrorCode MatAXPY(Mat,PetscScalar,Mat,MatStructure); 638 extern PetscErrorCode MatAYPX(Mat,PetscScalar,Mat,MatStructure); 639 640 extern PetscErrorCode MatScale(Mat,PetscScalar); 641 extern PetscErrorCode MatShift(Mat,PetscScalar); 642 643 extern PetscErrorCode MatSetLocalToGlobalMapping(Mat,ISLocalToGlobalMapping,ISLocalToGlobalMapping); 644 extern PetscErrorCode MatSetLocalToGlobalMappingBlock(Mat,ISLocalToGlobalMapping,ISLocalToGlobalMapping); 645 extern PetscErrorCode MatGetLocalToGlobalMapping(Mat,ISLocalToGlobalMapping*,ISLocalToGlobalMapping*); 646 extern PetscErrorCode MatGetLocalToGlobalMappingBlock(Mat,ISLocalToGlobalMapping*,ISLocalToGlobalMapping*); 647 extern PetscErrorCode MatZeroRowsLocal(Mat,PetscInt,const PetscInt [],PetscScalar,Vec,Vec); 648 extern PetscErrorCode MatZeroRowsLocalIS(Mat,IS,PetscScalar,Vec,Vec); 649 extern PetscErrorCode MatZeroRowsColumnsLocal(Mat,PetscInt,const PetscInt [],PetscScalar,Vec,Vec); 650 extern PetscErrorCode MatZeroRowsColumnsLocalIS(Mat,IS,PetscScalar,Vec,Vec); 651 extern PetscErrorCode MatSetValuesLocal(Mat,PetscInt,const PetscInt[],PetscInt,const PetscInt[],const PetscScalar[],InsertMode); 652 extern PetscErrorCode MatSetValuesBlockedLocal(Mat,PetscInt,const PetscInt[],PetscInt,const PetscInt[],const PetscScalar[],InsertMode); 653 654 extern PetscErrorCode MatStashSetInitialSize(Mat,PetscInt,PetscInt); 655 extern PetscErrorCode MatStashGetInfo(Mat,PetscInt*,PetscInt*,PetscInt*,PetscInt*); 656 657 extern PetscErrorCode MatInterpolate(Mat,Vec,Vec); 658 extern PetscErrorCode MatInterpolateAdd(Mat,Vec,Vec,Vec); 659 PetscPolymorphicSubroutine(MatInterpolateAdd,(Mat A,Vec x,Vec y),(A,x,y,y)) 660 extern PetscErrorCode MatRestrict(Mat,Vec,Vec); 661 extern PetscErrorCode MatGetVecs(Mat,Vec*,Vec*); 662 extern PetscErrorCode MatGetRedundantMatrix(Mat,PetscInt,MPI_Comm,PetscInt,MatReuse,Mat*); 663 extern PetscErrorCode MatGetMultiProcBlock(Mat,MPI_Comm,Mat*); 664 extern PetscErrorCode MatFindZeroDiagonals(Mat,IS*); 665 666 /*MC 667 MatSetValue - Set a single entry into a matrix. 668 669 Not collective 670 671 Input Parameters: 672 + m - the matrix 673 . row - the row location of the entry 674 . col - the column location of the entry 675 . value - the value to insert 676 - mode - either INSERT_VALUES or ADD_VALUES 677 678 Notes: 679 For efficiency one should use MatSetValues() and set several or many 680 values simultaneously if possible. 681 682 Level: beginner 683 684 .seealso: MatSetValues(), MatSetValueLocal() 685 M*/ 686 PETSC_STATIC_INLINE PetscErrorCode MatSetValue(Mat v,PetscInt i,PetscInt j,PetscScalar va,InsertMode mode) {return MatSetValues(v,1,&i,1,&j,&va,mode);} 687 688 PETSC_STATIC_INLINE PetscErrorCode MatGetValue(Mat v,PetscInt i,PetscInt j,PetscScalar *va) {return MatGetValues(v,1,&i,1,&j,va);} 689 690 PETSC_STATIC_INLINE PetscErrorCode MatSetValueLocal(Mat v,PetscInt i,PetscInt j,PetscScalar va,InsertMode mode) {return MatSetValuesLocal(v,1,&i,1,&j,&va,mode);} 691 692 /*MC 693 MatPreallocateInitialize - Begins the block of code that will count the number of nonzeros per 694 row in a matrix providing the data that one can use to correctly preallocate the matrix. 695 696 Synopsis: 697 PetscErrorCode MatPreallocateInitialize(MPI_Comm comm, PetscInt nrows, PetscInt ncols, PetscInt *dnz, PetscInt *onz) 698 699 Collective on MPI_Comm 700 701 Input Parameters: 702 + comm - the communicator that will share the eventually allocated matrix 703 . nrows - the number of LOCAL rows in the matrix 704 - ncols - the number of LOCAL columns in the matrix 705 706 Output Parameters: 707 + dnz - the array that will be passed to the matrix preallocation routines 708 - ozn - the other array passed to the matrix preallocation routines 709 710 711 Level: intermediate 712 713 Notes: 714 See the <A href="../../docs/manual.pdf#nameddest=ch_performance">Hints for Performance Improvment</A> chapter in the users manual for more details. 715 716 Do not malloc or free dnz and onz, that is handled internally by these routines 717 718 Use MatPreallocateInitializeSymmetric() for symmetric matrices (MPISBAIJ matrices) 719 720 This is a MACRO not a function because it has a leading { that is closed by PetscPreallocateFinalize(). 721 722 Concepts: preallocation^Matrix 723 724 .seealso: MatPreallocateFinalize(), MatPreallocateSet(), MatPreallocateSymmetricSet(), MatPreallocateSetLocal(), 725 MatPreallocateInitializeSymmetric(), MatPreallocateSymmetricSetLocal() 726 M*/ 727 #define MatPreallocateInitialize(comm,nrows,ncols,dnz,onz) 0; \ 728 { \ 729 PetscErrorCode _4_ierr; PetscInt __nrows = (nrows),__ctmp = (ncols),__rstart,__start,__end; \ 730 _4_ierr = PetscMalloc2(__nrows,PetscInt,&dnz,__nrows,PetscInt,&onz);CHKERRQ(_4_ierr); \ 731 _4_ierr = PetscMemzero(dnz,__nrows*sizeof(PetscInt));CHKERRQ(_4_ierr);\ 732 _4_ierr = PetscMemzero(onz,__nrows*sizeof(PetscInt));CHKERRQ(_4_ierr);\ 733 _4_ierr = MPI_Scan(&__ctmp,&__end,1,MPIU_INT,MPI_SUM,comm);CHKERRQ(_4_ierr); __start = __end - __ctmp;\ 734 _4_ierr = MPI_Scan(&__nrows,&__rstart,1,MPIU_INT,MPI_SUM,comm);CHKERRQ(_4_ierr); __rstart = __rstart - __nrows; 735 736 /*MC 737 MatPreallocateSymmetricInitialize - Begins the block of code that will count the number of nonzeros per 738 row in a matrix providing the data that one can use to correctly preallocate the matrix. 739 740 Synopsis: 741 PetscErrorCode MatPreallocateSymmetricInitialize(MPI_Comm comm, PetscInt nrows, PetscInt ncols, PetscInt *dnz, PetscInt *onz) 742 743 Collective on MPI_Comm 744 745 Input Parameters: 746 + comm - the communicator that will share the eventually allocated matrix 747 . nrows - the number of LOCAL rows in the matrix 748 - ncols - the number of LOCAL columns in the matrix 749 750 Output Parameters: 751 + dnz - the array that will be passed to the matrix preallocation routines 752 - ozn - the other array passed to the matrix preallocation routines 753 754 755 Level: intermediate 756 757 Notes: 758 See the <A href="../../docs/manual.pdf#nameddest=ch_performance">Hints for Performance Improvment</A> chapter in the users manual for more details. 759 760 Do not malloc or free dnz and onz, that is handled internally by these routines 761 762 This is a MACRO not a function because it has a leading { that is closed by PetscPreallocateFinalize(). 763 764 Concepts: preallocation^Matrix 765 766 .seealso: MatPreallocateFinalize(), MatPreallocateSet(), MatPreallocateSymmetricSet(), MatPreallocateSetLocal(), 767 MatPreallocateInitialize(), MatPreallocateSymmetricSetLocal() 768 M*/ 769 #define MatPreallocateSymmetricInitialize(comm,nrows,ncols,dnz,onz) 0; \ 770 { \ 771 PetscErrorCode _4_ierr; PetscInt __nrows = (nrows),__ctmp = (ncols),__rstart,__end; \ 772 _4_ierr = PetscMalloc2(__nrows,PetscInt,&dnz,__nrows,PetscInt,&onz);CHKERRQ(_4_ierr); \ 773 _4_ierr = PetscMemzero(dnz,__nrows*sizeof(PetscInt));CHKERRQ(_4_ierr);\ 774 _4_ierr = PetscMemzero(onz,__nrows*sizeof(PetscInt));CHKERRQ(_4_ierr);\ 775 _4_ierr = MPI_Scan(&__ctmp,&__end,1,MPIU_INT,MPI_SUM,comm);CHKERRQ(_4_ierr);\ 776 _4_ierr = MPI_Scan(&__nrows,&__rstart,1,MPIU_INT,MPI_SUM,comm);CHKERRQ(_4_ierr); __rstart = __rstart - __nrows; 777 778 /*MC 779 MatPreallocateSetLocal - Indicates the locations (rows and columns) in the matrix where nonzeros will be 780 inserted using a local number of the rows and columns 781 782 Synopsis: 783 PetscErrorCode MatPreallocateSetLocal(ISLocalToGlobalMappping map,PetscInt nrows, PetscInt *rows,PetscInt ncols, PetscInt *cols,PetscInt *dnz, PetscInt *onz) 784 785 Not Collective 786 787 Input Parameters: 788 + map - the row mapping from local numbering to global numbering 789 . nrows - the number of rows indicated 790 . rows - the indices of the rows 791 . cmap - the column mapping from local to global numbering 792 . ncols - the number of columns in the matrix 793 . cols - the columns indicated 794 . dnz - the array that will be passed to the matrix preallocation routines 795 - ozn - the other array passed to the matrix preallocation routines 796 797 798 Level: intermediate 799 800 Notes: 801 See the <A href="../../docs/manual.pdf#nameddest=ch_performance">Hints for Performance Improvment</A> chapter in the users manual for more details. 802 803 Do not malloc or free dnz and onz, that is handled internally by these routines 804 805 Concepts: preallocation^Matrix 806 807 .seealso: MatPreallocateFinalize(), MatPreallocateSet(), MatPreallocateSymmetricSet(), MatPreallocateInitialize(), 808 MatPreallocateInitialize(), MatPreallocateSymmetricSetLocal() 809 M*/ 810 #define MatPreallocateSetLocal(rmap,nrows,rows,cmap,ncols,cols,dnz,onz) 0; \ 811 {\ 812 PetscInt __l;\ 813 _4_ierr = ISLocalToGlobalMappingApply(rmap,nrows,rows,rows);CHKERRQ(_4_ierr);\ 814 _4_ierr = ISLocalToGlobalMappingApply(cmap,ncols,cols,cols);CHKERRQ(_4_ierr);\ 815 for (__l=0;__l<nrows;__l++) {\ 816 _4_ierr = MatPreallocateSet((rows)[__l],ncols,cols,dnz,onz);CHKERRQ(_4_ierr);\ 817 }\ 818 } 819 820 /*MC 821 MatPreallocateSymmetricSetLocal - Indicates the locations (rows and columns) in the matrix where nonzeros will be 822 inserted using a local number of the rows and columns 823 824 Synopsis: 825 PetscErrorCode MatPreallocateSymmetricSetLocal(ISLocalToGlobalMappping map,PetscInt nrows, PetscInt *rows,PetscInt ncols, PetscInt *cols,PetscInt *dnz, PetscInt *onz) 826 827 Not Collective 828 829 Input Parameters: 830 + map - the mapping between local numbering and global numbering 831 . nrows - the number of rows indicated 832 . rows - the indices of the rows 833 . ncols - the number of columns in the matrix 834 . cols - the columns indicated 835 . dnz - the array that will be passed to the matrix preallocation routines 836 - ozn - the other array passed to the matrix preallocation routines 837 838 839 Level: intermediate 840 841 Notes: 842 See the <A href="../../docs/manual.pdf#nameddest=ch_performance">Hints for Performance Improvment</A> chapter in the users manual for more details. 843 844 Do not malloc or free dnz and onz that is handled internally by these routines 845 846 Concepts: preallocation^Matrix 847 848 .seealso: MatPreallocateFinalize(), MatPreallocateSet(), MatPreallocateSymmetricSet(), MatPreallocateInitialize(), 849 MatPreallocateInitialize(), MatPreallocateSymmetricSetLocal(), MatPreallocateSetLocal() 850 M*/ 851 #define MatPreallocateSymmetricSetLocal(map,nrows,rows,ncols,cols,dnz,onz) 0;\ 852 {\ 853 PetscInt __l;\ 854 _4_ierr = ISLocalToGlobalMappingApply(map,nrows,rows,rows);CHKERRQ(_4_ierr);\ 855 _4_ierr = ISLocalToGlobalMappingApply(map,ncols,cols,cols);CHKERRQ(_4_ierr);\ 856 for (__l=0;__l<nrows;__l++) {\ 857 _4_ierr = MatPreallocateSymmetricSet((rows)[__l],ncols,cols,dnz,onz);CHKERRQ(_4_ierr);\ 858 }\ 859 } 860 861 /*MC 862 MatPreallocateSet - Indicates the locations (rows and columns) in the matrix where nonzeros will be 863 inserted using a local number of the rows and columns 864 865 Synopsis: 866 PetscErrorCode MatPreallocateSet(PetscInt nrows, PetscInt *rows,PetscInt ncols, PetscInt *cols,PetscInt *dnz, PetscInt *onz) 867 868 Not Collective 869 870 Input Parameters: 871 + row - the row 872 . ncols - the number of columns in the matrix 873 - cols - the columns indicated 874 875 Output Parameters: 876 + dnz - the array that will be passed to the matrix preallocation routines 877 - ozn - the other array passed to the matrix preallocation routines 878 879 880 Level: intermediate 881 882 Notes: 883 See the <A href="../../docs/manual.pdf#nameddest=ch_performance">Hints for Performance Improvment</A> chapter in the users manual for more details. 884 885 Do not malloc or free dnz and onz that is handled internally by these routines 886 887 This is a MACRO not a function because it uses variables declared in MatPreallocateInitialize(). 888 889 Concepts: preallocation^Matrix 890 891 .seealso: MatPreallocateFinalize(), MatPreallocateSet(), MatPreallocateSymmetricSet(), MatPreallocateInitialize(), 892 MatPreallocateInitialize(), MatPreallocateSymmetricSetLocal(), MatPreallocateSetLocal() 893 M*/ 894 #define MatPreallocateSet(row,nc,cols,dnz,onz) 0;\ 895 { PetscInt __i; \ 896 if (row < __rstart) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Trying to set preallocation for row %D less than first local row %D",row,__rstart);\ 897 if (row >= __rstart+__nrows) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Trying to set preallocation for row %D greater than last local row %D",row,__rstart+__nrows-1);\ 898 for (__i=0; __i<nc; __i++) {\ 899 if ((cols)[__i] < __start || (cols)[__i] >= __end) onz[row - __rstart]++; \ 900 else dnz[row - __rstart]++;\ 901 }\ 902 } 903 904 /*MC 905 MatPreallocateSymmetricSet - Indicates the locations (rows and columns) in the matrix where nonzeros will be 906 inserted using a local number of the rows and columns 907 908 Synopsis: 909 PetscErrorCode MatPreallocateSymmetricSet(PetscInt nrows, PetscInt *rows,PetscInt ncols, PetscInt *cols,PetscInt *dnz, PetscInt *onz) 910 911 Not Collective 912 913 Input Parameters: 914 + nrows - the number of rows indicated 915 . rows - the indices of the rows 916 . ncols - the number of columns in the matrix 917 . cols - the columns indicated 918 . dnz - the array that will be passed to the matrix preallocation routines 919 - ozn - the other array passed to the matrix preallocation routines 920 921 922 Level: intermediate 923 924 Notes: 925 See the <A href="../../docs/manual.pdf#nameddest=ch_performance">Hints for Performance Improvment</A> chapter in the users manual for more details. 926 927 Do not malloc or free dnz and onz that is handled internally by these routines 928 929 This is a MACRO not a function because it uses variables declared in MatPreallocateInitialize(). 930 931 Concepts: preallocation^Matrix 932 933 .seealso: MatPreallocateFinalize(), MatPreallocateSet(), MatPreallocateSymmetricSet(), MatPreallocateInitialize(), 934 MatPreallocateInitialize(), MatPreallocateSymmetricSetLocal(), MatPreallocateSetLocal() 935 M*/ 936 #define MatPreallocateSymmetricSet(row,nc,cols,dnz,onz) 0;\ 937 { PetscInt __i; \ 938 for (__i=0; __i<nc; __i++) {\ 939 if (cols[__i] >= __end) onz[row - __rstart]++; \ 940 else if (cols[__i] >= row) dnz[row - __rstart]++;\ 941 }\ 942 } 943 944 /*MC 945 MatPreallocateLocation - An alternative to MatPreallocationSet() that puts the nonzero locations into the matrix if it exists 946 947 Synopsis: 948 PetscErrorCode MatPreallocateLocations(Mat A,PetscInt row,PetscInt ncols,PetscInt *cols,PetscInt *dnz,PetscInt *onz) 949 950 Not Collective 951 952 Input Parameters: 953 . A - matrix 954 . row - row where values exist (must be local to this process) 955 . ncols - number of columns 956 . cols - columns with nonzeros 957 . dnz - the array that will be passed to the matrix preallocation routines 958 - ozn - the other array passed to the matrix preallocation routines 959 960 961 Level: intermediate 962 963 Notes: 964 See the <A href="../../docs/manual.pdf#nameddest=ch_performance">Hints for Performance Improvment</A> chapter in the users manual for more details. 965 966 Do not malloc or free dnz and onz that is handled internally by these routines 967 968 This is a MACRO not a function because it uses a bunch of variables private to the MatPreallocation.... routines. 969 970 Concepts: preallocation^Matrix 971 972 .seealso: MatPreallocateInitialize(), MatPreallocateSet(), MatPreallocateSymmetricSet(), MatPreallocateSetLocal(), 973 MatPreallocateSymmetricInitialize(), MatPreallocateSymmetricSetLocal() 974 M*/ 975 #define MatPreallocateLocation(A,row,ncols,cols,dnz,onz) 0;if (A) {ierr = MatSetValues(A,1,&row,ncols,cols,PETSC_NULL,INSERT_VALUES);CHKERRQ(ierr);} else {ierr = MatPreallocateSet(row,ncols,cols,dnz,onz);CHKERRQ(ierr);} 976 977 978 /*MC 979 MatPreallocateFinalize - Ends the block of code that will count the number of nonzeros per 980 row in a matrix providing the data that one can use to correctly preallocate the matrix. 981 982 Synopsis: 983 PetscErrorCode MatPreallocateFinalize(PetscInt *dnz, PetscInt *onz) 984 985 Collective on MPI_Comm 986 987 Input Parameters: 988 + dnz - the array that was be passed to the matrix preallocation routines 989 - ozn - the other array passed to the matrix preallocation routines 990 991 992 Level: intermediate 993 994 Notes: 995 See the <A href="../../docs/manual.pdf#nameddest=ch_performance">Hints for Performance Improvment</A> chapter in the users manual for more details. 996 997 Do not malloc or free dnz and onz that is handled internally by these routines 998 999 This is a MACRO not a function because it closes the { started in MatPreallocateInitialize(). 1000 1001 Concepts: preallocation^Matrix 1002 1003 .seealso: MatPreallocateInitialize(), MatPreallocateSet(), MatPreallocateSymmetricSet(), MatPreallocateSetLocal(), 1004 MatPreallocateSymmetricInitialize(), MatPreallocateSymmetricSetLocal() 1005 M*/ 1006 #define MatPreallocateFinalize(dnz,onz) 0;_4_ierr = PetscFree2(dnz,onz);CHKERRQ(_4_ierr);} 1007 1008 1009 1010 /* Routines unique to particular data structures */ 1011 extern PetscErrorCode MatShellGetContext(Mat,void *); 1012 PetscPolymorphicFunction(MatShellGetContext,(Mat A),(A,&t),void*,t) 1013 1014 extern PetscErrorCode MatInodeAdjustForInodes(Mat,IS*,IS*); 1015 extern PetscErrorCode MatInodeGetInodeSizes(Mat,PetscInt *,PetscInt *[],PetscInt *); 1016 1017 extern PetscErrorCode MatSeqAIJSetColumnIndices(Mat,PetscInt[]); 1018 extern PetscErrorCode MatSeqBAIJSetColumnIndices(Mat,PetscInt[]); 1019 extern PetscErrorCode MatCreateSeqAIJWithArrays(MPI_Comm,PetscInt,PetscInt,PetscInt[],PetscInt[],PetscScalar[],Mat*); 1020 extern PetscErrorCode MatCreateSeqBAIJWithArrays(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt[],PetscInt[],PetscScalar[],Mat*); 1021 extern PetscErrorCode MatCreateSeqSBAIJWithArrays(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt[],PetscInt[],PetscScalar[],Mat*); 1022 extern PetscErrorCode MatCreateSeqAIJFromTriple(MPI_Comm,PetscInt,PetscInt,PetscInt[],PetscInt[],PetscScalar[],Mat*,PetscInt,PetscBool); 1023 1024 #define MAT_SKIP_ALLOCATION -4 1025 1026 extern PetscErrorCode MatSeqBAIJSetPreallocation(Mat,PetscInt,PetscInt,const PetscInt[]); 1027 PetscPolymorphicSubroutine(MatSeqBAIJSetPreallocation,(Mat A,PetscInt bs,const PetscInt nnz[]),(A,bs,0,nnz)) 1028 extern PetscErrorCode MatSeqSBAIJSetPreallocation(Mat,PetscInt,PetscInt,const PetscInt[]); 1029 PetscPolymorphicSubroutine(MatSeqSBAIJSetPreallocation,(Mat A,PetscInt bs,const PetscInt nnz[]),(A,bs,0,nnz)) 1030 extern PetscErrorCode MatSeqAIJSetPreallocation(Mat,PetscInt,const PetscInt[]); 1031 PetscPolymorphicSubroutine(MatSeqAIJSetPreallocation,(Mat A,const PetscInt nnz[]),(A,0,nnz)) 1032 1033 extern PetscErrorCode MatMPIBAIJSetPreallocation(Mat,PetscInt,PetscInt,const PetscInt[],PetscInt,const PetscInt[]); 1034 PetscPolymorphicSubroutine(MatMPIBAIJSetPreallocation,(Mat A,PetscInt bs,const PetscInt nnz[],const PetscInt onz[]),(A,bs,0,nnz,0,onz)) 1035 extern PetscErrorCode MatMPISBAIJSetPreallocation(Mat,PetscInt,PetscInt,const PetscInt[],PetscInt,const PetscInt[]); 1036 extern PetscErrorCode MatMPIAIJSetPreallocation(Mat,PetscInt,const PetscInt[],PetscInt,const PetscInt[]); 1037 extern PetscErrorCode MatSeqAIJSetPreallocationCSR(Mat,const PetscInt [],const PetscInt [],const PetscScalar []); 1038 extern PetscErrorCode MatSeqBAIJSetPreallocationCSR(Mat,PetscInt,const PetscInt[],const PetscInt[],const PetscScalar[]); 1039 extern PetscErrorCode MatMPIAIJSetPreallocationCSR(Mat,const PetscInt[],const PetscInt[],const PetscScalar[]); 1040 extern PetscErrorCode MatMPIBAIJSetPreallocationCSR(Mat,PetscInt,const PetscInt[],const PetscInt[],const PetscScalar[]); 1041 extern PetscErrorCode MatMPIAdjSetPreallocation(Mat,PetscInt[],PetscInt[],PetscInt[]); 1042 extern PetscErrorCode MatMPIDenseSetPreallocation(Mat,PetscScalar[]); 1043 extern PetscErrorCode MatSeqDenseSetPreallocation(Mat,PetscScalar[]); 1044 extern PetscErrorCode MatMPIAIJGetSeqAIJ(Mat,Mat*,Mat*,PetscInt*[]); 1045 extern PetscErrorCode MatMPIBAIJGetSeqBAIJ(Mat,Mat*,Mat*,PetscInt*[]); 1046 extern PetscErrorCode MatAdicSetLocalFunction(Mat,void (*)(void)); 1047 1048 extern PetscErrorCode MatSeqDenseSetLDA(Mat,PetscInt); 1049 extern PetscErrorCode MatDenseGetLocalMatrix(Mat,Mat*); 1050 1051 extern PetscErrorCode MatStoreValues(Mat); 1052 extern PetscErrorCode MatRetrieveValues(Mat); 1053 1054 extern PetscErrorCode MatDAADSetCtx(Mat,void*); 1055 1056 extern PetscErrorCode MatFindNonzeroRows(Mat,IS*); 1057 /* 1058 These routines are not usually accessed directly, rather solving is 1059 done through the KSP and PC interfaces. 1060 */ 1061 1062 /*J 1063 MatOrderingType - String with the name of a PETSc matrix ordering or the creation function 1064 with an optional dynamic library name, for example 1065 http://www.mcs.anl.gov/petsc/lib.a:orderingcreate() 1066 1067 Level: beginner 1068 1069 Cannot use const because the PC objects manipulate the string 1070 1071 .seealso: MatGetOrdering() 1072 J*/ 1073 #define MatOrderingType char* 1074 #define MATORDERINGNATURAL "natural" 1075 #define MATORDERINGND "nd" 1076 #define MATORDERING1WD "1wd" 1077 #define MATORDERINGRCM "rcm" 1078 #define MATORDERINGQMD "qmd" 1079 #define MATORDERINGROWLENGTH "rowlength" 1080 #define MATORDERINGAMD "amd" /* only works if UMFPACK is installed with PETSc */ 1081 1082 extern PetscErrorCode MatGetOrdering(Mat,const MatOrderingType,IS*,IS*); 1083 extern PetscErrorCode MatGetOrderingList(PetscFList *list); 1084 extern PetscErrorCode MatOrderingRegister(const char[],const char[],const char[],PetscErrorCode(*)(Mat,const MatOrderingType,IS*,IS*)); 1085 1086 /*MC 1087 MatOrderingRegisterDynamic - Adds a new sparse matrix ordering to the matrix package. 1088 1089 Synopsis: 1090 PetscErrorCode MatOrderingRegisterDynamic(const char *name_ordering,const char *path,const char *name_create,PetscErrorCode (*routine_create)(MatOrdering)) 1091 1092 Not Collective 1093 1094 Input Parameters: 1095 + sname - name of ordering (for example MATORDERINGND) 1096 . path - location of library where creation routine is 1097 . name - name of function that creates the ordering type,a string 1098 - function - function pointer that creates the ordering 1099 1100 Level: developer 1101 1102 If dynamic libraries are used, then the fourth input argument (function) 1103 is ignored. 1104 1105 Sample usage: 1106 .vb 1107 MatOrderingRegisterDynamic("my_order",/home/username/my_lib/lib/libO/solaris/mylib.a, 1108 "MyOrder",MyOrder); 1109 .ve 1110 1111 Then, your partitioner can be chosen with the procedural interface via 1112 $ MatOrderingSetType(part,"my_order) 1113 or at runtime via the option 1114 $ -pc_factor_mat_ordering_type my_order 1115 1116 ${PETSC_ARCH} occuring in pathname will be replaced with appropriate values. 1117 1118 .keywords: matrix, ordering, register 1119 1120 .seealso: MatOrderingRegisterDestroy(), MatOrderingRegisterAll() 1121 M*/ 1122 #if defined(PETSC_USE_DYNAMIC_LIBRARIES) 1123 #define MatOrderingRegisterDynamic(a,b,c,d) MatOrderingRegister(a,b,c,0) 1124 #else 1125 #define MatOrderingRegisterDynamic(a,b,c,d) MatOrderingRegister(a,b,c,d) 1126 #endif 1127 1128 extern PetscErrorCode MatOrderingRegisterDestroy(void); 1129 extern PetscErrorCode MatOrderingRegisterAll(const char[]); 1130 extern PetscBool MatOrderingRegisterAllCalled; 1131 extern PetscFList MatOrderingList; 1132 1133 extern PetscErrorCode MatReorderForNonzeroDiagonal(Mat,PetscReal,IS,IS); 1134 1135 /*S 1136 MatFactorShiftType - Numeric Shift. 1137 1138 Level: beginner 1139 1140 S*/ 1141 typedef enum {MAT_SHIFT_NONE,MAT_SHIFT_NONZERO,MAT_SHIFT_POSITIVE_DEFINITE,MAT_SHIFT_INBLOCKS} MatFactorShiftType; 1142 extern const char *MatFactorShiftTypes[]; 1143 1144 /*S 1145 MatFactorInfo - Data passed into the matrix factorization routines 1146 1147 In Fortran these are simply double precision arrays of size MAT_FACTORINFO_SIZE, that is use 1148 $ MatFactorInfo info(MAT_FACTORINFO_SIZE) 1149 1150 Notes: These are not usually directly used by users, instead use PC type of LU, ILU, CHOLESKY or ICC. 1151 1152 You can use MatFactorInfoInitialize() to set default values. 1153 1154 Level: developer 1155 1156 .seealso: MatLUFactorSymbolic(), MatILUFactorSymbolic(), MatCholeskyFactorSymbolic(), MatICCFactorSymbolic(), MatICCFactor(), 1157 MatFactorInfoInitialize() 1158 1159 S*/ 1160 typedef struct { 1161 PetscReal diagonal_fill; /* force diagonal to fill in if initially not filled */ 1162 PetscReal usedt; 1163 PetscReal dt; /* drop tolerance */ 1164 PetscReal dtcol; /* tolerance for pivoting */ 1165 PetscReal dtcount; /* maximum nonzeros to be allowed per row */ 1166 PetscReal fill; /* expected fill, nonzeros in factored matrix/nonzeros in original matrix */ 1167 PetscReal levels; /* ICC/ILU(levels) */ 1168 PetscReal pivotinblocks; /* for BAIJ and SBAIJ matrices pivot in factorization on blocks, default 1.0 1169 factorization may be faster if do not pivot */ 1170 PetscReal zeropivot; /* pivot is called zero if less than this */ 1171 PetscReal shifttype; /* type of shift added to matrix factor to prevent zero pivots */ 1172 PetscReal shiftamount; /* how large the shift is */ 1173 } MatFactorInfo; 1174 1175 extern PetscErrorCode MatFactorInfoInitialize(MatFactorInfo*); 1176 extern PetscErrorCode MatCholeskyFactor(Mat,IS,const MatFactorInfo*); 1177 extern PetscErrorCode MatCholeskyFactorSymbolic(Mat,Mat,IS,const MatFactorInfo*); 1178 extern PetscErrorCode MatCholeskyFactorNumeric(Mat,Mat,const MatFactorInfo*); 1179 extern PetscErrorCode MatLUFactor(Mat,IS,IS,const MatFactorInfo*); 1180 extern PetscErrorCode MatILUFactor(Mat,IS,IS,const MatFactorInfo*); 1181 extern PetscErrorCode MatLUFactorSymbolic(Mat,Mat,IS,IS,const MatFactorInfo*); 1182 extern PetscErrorCode MatILUFactorSymbolic(Mat,Mat,IS,IS,const MatFactorInfo*); 1183 extern PetscErrorCode MatICCFactorSymbolic(Mat,Mat,IS,const MatFactorInfo*); 1184 extern PetscErrorCode MatICCFactor(Mat,IS,const MatFactorInfo*); 1185 extern PetscErrorCode MatLUFactorNumeric(Mat,Mat,const MatFactorInfo*); 1186 extern PetscErrorCode MatGetInertia(Mat,PetscInt*,PetscInt*,PetscInt*); 1187 extern PetscErrorCode MatSolve(Mat,Vec,Vec); 1188 extern PetscErrorCode MatForwardSolve(Mat,Vec,Vec); 1189 extern PetscErrorCode MatBackwardSolve(Mat,Vec,Vec); 1190 extern PetscErrorCode MatSolveAdd(Mat,Vec,Vec,Vec); 1191 extern PetscErrorCode MatSolveTranspose(Mat,Vec,Vec); 1192 extern PetscErrorCode MatSolveTransposeAdd(Mat,Vec,Vec,Vec); 1193 extern PetscErrorCode MatSolves(Mat,Vecs,Vecs); 1194 1195 extern PetscErrorCode MatSetUnfactored(Mat); 1196 1197 /*E 1198 MatSORType - What type of (S)SOR to perform 1199 1200 Level: beginner 1201 1202 May be bitwise ORd together 1203 1204 Any additions/changes here MUST also be made in include/finclude/petscmat.h 1205 1206 MatSORType may be bitwise ORd together, so do not change the numbers 1207 1208 .seealso: MatSOR() 1209 E*/ 1210 typedef enum {SOR_FORWARD_SWEEP=1,SOR_BACKWARD_SWEEP=2,SOR_SYMMETRIC_SWEEP=3, 1211 SOR_LOCAL_FORWARD_SWEEP=4,SOR_LOCAL_BACKWARD_SWEEP=8, 1212 SOR_LOCAL_SYMMETRIC_SWEEP=12,SOR_ZERO_INITIAL_GUESS=16, 1213 SOR_EISENSTAT=32,SOR_APPLY_UPPER=64,SOR_APPLY_LOWER=128} MatSORType; 1214 extern PetscErrorCode MatSOR(Mat,Vec,PetscReal,MatSORType,PetscReal,PetscInt,PetscInt,Vec); 1215 1216 /* 1217 These routines are for efficiently computing Jacobians via finite differences. 1218 */ 1219 1220 /*J 1221 MatColoringType - String with the name of a PETSc matrix coloring or the creation function 1222 with an optional dynamic library name, for example 1223 http://www.mcs.anl.gov/petsc/lib.a:coloringcreate() 1224 1225 Level: beginner 1226 1227 .seealso: MatGetColoring() 1228 J*/ 1229 #define MatColoringType char* 1230 #define MATCOLORINGNATURAL "natural" 1231 #define MATCOLORINGSL "sl" 1232 #define MATCOLORINGLF "lf" 1233 #define MATCOLORINGID "id" 1234 1235 extern PetscErrorCode MatGetColoring(Mat,const MatColoringType,ISColoring*); 1236 extern PetscErrorCode MatColoringRegister(const char[],const char[],const char[],PetscErrorCode(*)(Mat,MatColoringType,ISColoring *)); 1237 1238 /*MC 1239 MatColoringRegisterDynamic - Adds a new sparse matrix coloring to the 1240 matrix package. 1241 1242 Synopsis: 1243 PetscErrorCode MatColoringRegisterDynamic(const char *name_coloring,const char *path,const char *name_create,PetscErrorCode (*routine_create)(MatColoring)) 1244 1245 Not Collective 1246 1247 Input Parameters: 1248 + sname - name of Coloring (for example MATCOLORINGSL) 1249 . path - location of library where creation routine is 1250 . name - name of function that creates the Coloring type, a string 1251 - function - function pointer that creates the coloring 1252 1253 Level: developer 1254 1255 If dynamic libraries are used, then the fourth input argument (function) 1256 is ignored. 1257 1258 Sample usage: 1259 .vb 1260 MatColoringRegisterDynamic("my_color",/home/username/my_lib/lib/libO/solaris/mylib.a, 1261 "MyColor",MyColor); 1262 .ve 1263 1264 Then, your partitioner can be chosen with the procedural interface via 1265 $ MatColoringSetType(part,"my_color") 1266 or at runtime via the option 1267 $ -mat_coloring_type my_color 1268 1269 $PETSC_ARCH occuring in pathname will be replaced with appropriate values. 1270 1271 .keywords: matrix, Coloring, register 1272 1273 .seealso: MatColoringRegisterDestroy(), MatColoringRegisterAll() 1274 M*/ 1275 #if defined(PETSC_USE_DYNAMIC_LIBRARIES) 1276 #define MatColoringRegisterDynamic(a,b,c,d) MatColoringRegister(a,b,c,0) 1277 #else 1278 #define MatColoringRegisterDynamic(a,b,c,d) MatColoringRegister(a,b,c,d) 1279 #endif 1280 1281 extern PetscBool MatColoringRegisterAllCalled; 1282 1283 extern PetscErrorCode MatColoringRegisterAll(const char[]); 1284 extern PetscErrorCode MatColoringRegisterDestroy(void); 1285 extern PetscErrorCode MatColoringPatch(Mat,PetscInt,PetscInt,ISColoringValue[],ISColoring*); 1286 1287 /*S 1288 MatFDColoring - Object for computing a sparse Jacobian via finite differences 1289 and coloring 1290 1291 Level: beginner 1292 1293 Concepts: coloring, sparse Jacobian, finite differences 1294 1295 .seealso: MatFDColoringCreate() 1296 S*/ 1297 typedef struct _p_MatFDColoring* MatFDColoring; 1298 1299 extern PetscErrorCode MatFDColoringCreate(Mat,ISColoring,MatFDColoring *); 1300 extern PetscErrorCode MatFDColoringDestroy(MatFDColoring*); 1301 extern PetscErrorCode MatFDColoringView(MatFDColoring,PetscViewer); 1302 extern PetscErrorCode MatFDColoringSetFunction(MatFDColoring,PetscErrorCode (*)(void),void*); 1303 extern PetscErrorCode MatFDColoringGetFunction(MatFDColoring,PetscErrorCode (**)(void),void**); 1304 extern PetscErrorCode MatFDColoringSetParameters(MatFDColoring,PetscReal,PetscReal); 1305 extern PetscErrorCode MatFDColoringSetFromOptions(MatFDColoring); 1306 extern PetscErrorCode MatFDColoringApply(Mat,MatFDColoring,Vec,MatStructure*,void *); 1307 extern PetscErrorCode MatFDColoringApplyTS(Mat,MatFDColoring,PetscReal,Vec,MatStructure*,void *); 1308 extern PetscErrorCode MatFDColoringSetF(MatFDColoring,Vec); 1309 extern PetscErrorCode MatFDColoringGetPerturbedColumns(MatFDColoring,PetscInt*,PetscInt*[]); 1310 /* 1311 These routines are for partitioning matrices: currently used only 1312 for adjacency matrix, MatCreateMPIAdj(). 1313 */ 1314 1315 /*S 1316 MatPartitioning - Object for managing the partitioning of a matrix or graph 1317 1318 Level: beginner 1319 1320 Concepts: partitioning 1321 1322 .seealso: MatPartitioningCreate(), MatPartitioningType 1323 S*/ 1324 typedef struct _p_MatPartitioning* MatPartitioning; 1325 1326 /*J 1327 MatPartitioningType - String with the name of a PETSc matrix partitioning or the creation function 1328 with an optional dynamic library name, for example 1329 http://www.mcs.anl.gov/petsc/lib.a:partitioningcreate() 1330 1331 Level: beginner 1332 1333 .seealso: MatPartitioningCreate(), MatPartitioning 1334 J*/ 1335 #define MatPartitioningType char* 1336 #define MATPARTITIONINGCURRENT "current" 1337 #define MATPARTITIONINGSQUARE "square" 1338 #define MATPARTITIONINGPARMETIS "parmetis" 1339 #define MATPARTITIONINGCHACO "chaco" 1340 #define MATPARTITIONINGPARTY "party" 1341 #define MATPARTITIONINGPTSCOTCH "ptscotch" 1342 1343 1344 extern PetscErrorCode MatPartitioningCreate(MPI_Comm,MatPartitioning*); 1345 extern PetscErrorCode MatPartitioningSetType(MatPartitioning,const MatPartitioningType); 1346 extern PetscErrorCode MatPartitioningSetNParts(MatPartitioning,PetscInt); 1347 extern PetscErrorCode MatPartitioningSetAdjacency(MatPartitioning,Mat); 1348 extern PetscErrorCode MatPartitioningSetVertexWeights(MatPartitioning,const PetscInt[]); 1349 extern PetscErrorCode MatPartitioningSetPartitionWeights(MatPartitioning,const PetscReal []); 1350 extern PetscErrorCode MatPartitioningApply(MatPartitioning,IS*); 1351 extern PetscErrorCode MatPartitioningDestroy(MatPartitioning*); 1352 1353 extern PetscErrorCode MatPartitioningRegister(const char[],const char[],const char[],PetscErrorCode (*)(MatPartitioning)); 1354 1355 /*MC 1356 MatPartitioningRegisterDynamic - Adds a new sparse matrix partitioning to the 1357 matrix package. 1358 1359 Synopsis: 1360 PetscErrorCode MatPartitioningRegisterDynamic(const char *name_partitioning,const char *path,const char *name_create,PetscErrorCode (*routine_create)(MatPartitioning)) 1361 1362 Not Collective 1363 1364 Input Parameters: 1365 + sname - name of partitioning (for example MATPARTITIONINGCURRENT) or parmetis 1366 . path - location of library where creation routine is 1367 . name - name of function that creates the partitioning type, a string 1368 - function - function pointer that creates the partitioning type 1369 1370 Level: developer 1371 1372 If dynamic libraries are used, then the fourth input argument (function) 1373 is ignored. 1374 1375 Sample usage: 1376 .vb 1377 MatPartitioningRegisterDynamic("my_part",/home/username/my_lib/lib/libO/solaris/mylib.a, 1378 "MyPartCreate",MyPartCreate); 1379 .ve 1380 1381 Then, your partitioner can be chosen with the procedural interface via 1382 $ MatPartitioningSetType(part,"my_part") 1383 or at runtime via the option 1384 $ -mat_partitioning_type my_part 1385 1386 $PETSC_ARCH occuring in pathname will be replaced with appropriate values. 1387 1388 .keywords: matrix, partitioning, register 1389 1390 .seealso: MatPartitioningRegisterDestroy(), MatPartitioningRegisterAll() 1391 M*/ 1392 #if defined(PETSC_USE_DYNAMIC_LIBRARIES) 1393 #define MatPartitioningRegisterDynamic(a,b,c,d) MatPartitioningRegister(a,b,c,0) 1394 #else 1395 #define MatPartitioningRegisterDynamic(a,b,c,d) MatPartitioningRegister(a,b,c,d) 1396 #endif 1397 1398 extern PetscBool MatPartitioningRegisterAllCalled; 1399 1400 extern PetscErrorCode MatPartitioningRegisterAll(const char[]); 1401 extern PetscErrorCode MatPartitioningRegisterDestroy(void); 1402 1403 extern PetscErrorCode MatPartitioningView(MatPartitioning,PetscViewer); 1404 extern PetscErrorCode MatPartitioningSetFromOptions(MatPartitioning); 1405 extern PetscErrorCode MatPartitioningGetType(MatPartitioning,const MatPartitioningType*); 1406 1407 extern PetscErrorCode MatPartitioningParmetisSetCoarseSequential(MatPartitioning); 1408 extern PetscErrorCode MatPartitioningParmetisGetEdgeCut(MatPartitioning, PetscInt *); 1409 1410 typedef enum { MP_CHACO_MULTILEVEL=1,MP_CHACO_SPECTRAL=2,MP_CHACO_LINEAR=4,MP_CHACO_RANDOM=5,MP_CHACO_SCATTERED=6 } MPChacoGlobalType; 1411 extern const char *MPChacoGlobalTypes[]; 1412 typedef enum { MP_CHACO_KERNIGHAN=1,MP_CHACO_NONE=2 } MPChacoLocalType; 1413 extern const char *MPChacoLocalTypes[]; 1414 typedef enum { MP_CHACO_LANCZOS=0,MP_CHACO_RQI=1 } MPChacoEigenType; 1415 extern const char *MPChacoEigenTypes[]; 1416 1417 extern PetscErrorCode MatPartitioningChacoSetGlobal(MatPartitioning,MPChacoGlobalType); 1418 extern PetscErrorCode MatPartitioningChacoGetGlobal(MatPartitioning,MPChacoGlobalType*); 1419 extern PetscErrorCode MatPartitioningChacoSetLocal(MatPartitioning,MPChacoLocalType); 1420 extern PetscErrorCode MatPartitioningChacoGetLocal(MatPartitioning,MPChacoLocalType*); 1421 extern PetscErrorCode MatPartitioningChacoSetCoarseLevel(MatPartitioning,PetscReal); 1422 extern PetscErrorCode MatPartitioningChacoSetEigenSolver(MatPartitioning,MPChacoEigenType); 1423 extern PetscErrorCode MatPartitioningChacoGetEigenSolver(MatPartitioning,MPChacoEigenType*); 1424 extern PetscErrorCode MatPartitioningChacoSetEigenTol(MatPartitioning,PetscReal); 1425 extern PetscErrorCode MatPartitioningChacoGetEigenTol(MatPartitioning,PetscReal*); 1426 extern PetscErrorCode MatPartitioningChacoSetEigenNumber(MatPartitioning,PetscInt); 1427 extern PetscErrorCode MatPartitioningChacoGetEigenNumber(MatPartitioning,PetscInt*); 1428 1429 #define MP_PARTY_OPT "opt" 1430 #define MP_PARTY_LIN "lin" 1431 #define MP_PARTY_SCA "sca" 1432 #define MP_PARTY_RAN "ran" 1433 #define MP_PARTY_GBF "gbf" 1434 #define MP_PARTY_GCF "gcf" 1435 #define MP_PARTY_BUB "bub" 1436 #define MP_PARTY_DEF "def" 1437 extern PetscErrorCode MatPartitioningPartySetGlobal(MatPartitioning,const char*); 1438 #define MP_PARTY_HELPFUL_SETS "hs" 1439 #define MP_PARTY_KERNIGHAN_LIN "kl" 1440 #define MP_PARTY_NONE "no" 1441 extern PetscErrorCode MatPartitioningPartySetLocal(MatPartitioning,const char*); 1442 extern PetscErrorCode MatPartitioningPartySetCoarseLevel(MatPartitioning,PetscReal); 1443 extern PetscErrorCode MatPartitioningPartySetBipart(MatPartitioning,PetscBool); 1444 extern PetscErrorCode MatPartitioningPartySetMatchOptimization(MatPartitioning,PetscBool); 1445 1446 typedef enum { MP_PTSCOTCH_QUALITY,MP_PTSCOTCH_SPEED,MP_PTSCOTCH_BALANCE,MP_PTSCOTCH_SAFETY,MP_PTSCOTCH_SCALABILITY } MPPTScotchStrategyType; 1447 extern const char *MPPTScotchStrategyTypes[]; 1448 1449 extern PetscErrorCode MatPartitioningPTScotchSetImbalance(MatPartitioning,PetscReal); 1450 extern PetscErrorCode MatPartitioningPTScotchGetImbalance(MatPartitioning,PetscReal*); 1451 extern PetscErrorCode MatPartitioningPTScotchSetStrategy(MatPartitioning,MPPTScotchStrategyType); 1452 extern PetscErrorCode MatPartitioningPTScotchGetStrategy(MatPartitioning,MPPTScotchStrategyType*); 1453 1454 extern PetscErrorCode MatMeshToVertexGraph(Mat,PetscInt,Mat*); 1455 extern PetscErrorCode MatMeshToCellGraph(Mat,PetscInt,Mat*); 1456 1457 /* 1458 If you add entries here you must also add them to finclude/petscmat.h 1459 */ 1460 typedef enum { MATOP_SET_VALUES=0, 1461 MATOP_GET_ROW=1, 1462 MATOP_RESTORE_ROW=2, 1463 MATOP_MULT=3, 1464 MATOP_MULT_ADD=4, 1465 MATOP_MULT_TRANSPOSE=5, 1466 MATOP_MULT_TRANSPOSE_ADD=6, 1467 MATOP_SOLVE=7, 1468 MATOP_SOLVE_ADD=8, 1469 MATOP_SOLVE_TRANSPOSE=9, 1470 MATOP_SOLVE_TRANSPOSE_ADD=10, 1471 MATOP_LUFACTOR=11, 1472 MATOP_CHOLESKYFACTOR=12, 1473 MATOP_SOR=13, 1474 MATOP_TRANSPOSE=14, 1475 MATOP_GETINFO=15, 1476 MATOP_EQUAL=16, 1477 MATOP_GET_DIAGONAL=17, 1478 MATOP_DIAGONAL_SCALE=18, 1479 MATOP_NORM=19, 1480 MATOP_ASSEMBLY_BEGIN=20, 1481 MATOP_ASSEMBLY_END=21, 1482 MATOP_SET_OPTION=22, 1483 MATOP_ZERO_ENTRIES=23, 1484 MATOP_ZERO_ROWS=24, 1485 MATOP_LUFACTOR_SYMBOLIC=25, 1486 MATOP_LUFACTOR_NUMERIC=26, 1487 MATOP_CHOLESKY_FACTOR_SYMBOLIC=27, 1488 MATOP_CHOLESKY_FACTOR_NUMERIC=28, 1489 MATOP_SETUP_PREALLOCATION=29, 1490 MATOP_ILUFACTOR_SYMBOLIC=30, 1491 MATOP_ICCFACTOR_SYMBOLIC=31, 1492 MATOP_GET_ARRAY=32, 1493 MATOP_RESTORE_ARRAY=33, 1494 MATOP_DUPLICATE=34, 1495 MATOP_FORWARD_SOLVE=35, 1496 MATOP_BACKWARD_SOLVE=36, 1497 MATOP_ILUFACTOR=37, 1498 MATOP_ICCFACTOR=38, 1499 MATOP_AXPY=39, 1500 MATOP_GET_SUBMATRICES=40, 1501 MATOP_INCREASE_OVERLAP=41, 1502 MATOP_GET_VALUES=42, 1503 MATOP_COPY=43, 1504 MATOP_GET_ROW_MAX=44, 1505 MATOP_SCALE=45, 1506 MATOP_SHIFT=46, 1507 MATOP_DIAGONAL_SET=47, 1508 MATOP_ILUDT_FACTOR=48, 1509 MATOP_SET_BLOCK_SIZE=49, 1510 MATOP_GET_ROW_IJ=50, 1511 MATOP_RESTORE_ROW_IJ=51, 1512 MATOP_GET_COLUMN_IJ=52, 1513 MATOP_RESTORE_COLUMN_IJ=53, 1514 MATOP_FDCOLORING_CREATE=54, 1515 MATOP_COLORING_PATCH=55, 1516 MATOP_SET_UNFACTORED=56, 1517 MATOP_PERMUTE=57, 1518 MATOP_SET_VALUES_BLOCKED=58, 1519 MATOP_GET_SUBMATRIX=59, 1520 MATOP_DESTROY=60, 1521 MATOP_VIEW=61, 1522 MATOP_CONVERT_FROM=62, 1523 MATOP_USE_SCALED_FORM=63, 1524 MATOP_SCALE_SYSTEM=64, 1525 MATOP_UNSCALE_SYSTEM=65, 1526 MATOP_SET_LOCAL_TO_GLOBAL_MAP=66, 1527 MATOP_SET_VALUES_LOCAL=67, 1528 MATOP_ZERO_ROWS_LOCAL=68, 1529 MATOP_GET_ROW_MAX_ABS=69, 1530 MATOP_GET_ROW_MIN_ABS=70, 1531 MATOP_CONVERT=71, 1532 MATOP_SET_COLORING=72, 1533 MATOP_SET_VALUES_ADIC=73, 1534 MATOP_SET_VALUES_ADIFOR=74, 1535 MATOP_FD_COLORING_APPLY=75, 1536 MATOP_SET_FROM_OPTIONS=76, 1537 MATOP_MULT_CON=77, 1538 MATOP_MULT_TRANSPOSE_CON=78, 1539 MATOP_PERMUTE_SPARSIFY=79, 1540 MATOP_MULT_MULTIPLE=80, 1541 MATOP_SOLVE_MULTIPLE=81, 1542 MATOP_GET_INERTIA=82, 1543 MATOP_LOAD=83, 1544 MATOP_IS_SYMMETRIC=84, 1545 MATOP_IS_HERMITIAN=85, 1546 MATOP_IS_STRUCTURALLY_SYMMETRIC=86, 1547 MATOP_DUMMY=87, 1548 MATOP_GET_VECS=88, 1549 MATOP_MAT_MULT=89, 1550 MATOP_MAT_MULT_SYMBOLIC=90, 1551 MATOP_MAT_MULT_NUMERIC=91, 1552 MATOP_PTAP=92, 1553 MATOP_PTAP_SYMBOLIC=93, 1554 MATOP_PTAP_NUMERIC=94, 1555 MATOP_MAT_MULTTRANSPOSE=95, 1556 MATOP_MAT_MULTTRANSPOSE_SYM=96, 1557 MATOP_MAT_MULTTRANSPOSE_NUM=97, 1558 MATOP_PTAP_SYMBOLIC_SEQAIJ=98, 1559 MATOP_PTAP_NUMERIC_SEQAIJ=99, 1560 MATOP_PTAP_SYMBOLIC_MPIAIJ=100, 1561 MATOP_PTAP_NUMERIC_MPIAIJ=101, 1562 MATOP_CONJUGATE=102, 1563 MATOP_SET_SIZES=103, 1564 MATOP_SET_VALUES_ROW=104, 1565 MATOP_REAL_PART=105, 1566 MATOP_IMAG_PART=106, 1567 MATOP_GET_ROW_UTRIANGULAR=107, 1568 MATOP_RESTORE_ROW_UTRIANGULAR=108, 1569 MATOP_MATSOLVE=109, 1570 MATOP_GET_REDUNDANTMATRIX=110, 1571 MATOP_GET_ROW_MIN=111, 1572 MATOP_GET_COLUMN_VEC=112, 1573 MATOP_MISSING_DIAGONAL=113, 1574 MATOP_MATGETSEQNONZEROSTRUCTURE=114, 1575 MATOP_CREATE=115, 1576 MATOP_GET_GHOSTS=116, 1577 MATOP_GET_LOCALSUBMATRIX=117, 1578 MATOP_RESTORE_LOCALSUBMATRIX=118, 1579 MATOP_MULT_DIAGONAL_BLOCK=119, 1580 MATOP_HERMITIANTRANSPOSE=120, 1581 MATOP_MULTHERMITIANTRANSPOSE=121, 1582 MATOP_MULTHERMITIANTRANSPOSEADD=122, 1583 MATOP_GETMULTIPROCBLOCK=123, 1584 MATOP_GETCOLUMNNORMS=125, 1585 MATOP_GET_SUBMATRICES_PARALLEL=128, 1586 MATOP_SET_VALUES_BATCH=129 1587 } MatOperation; 1588 extern PetscErrorCode MatHasOperation(Mat,MatOperation,PetscBool *); 1589 extern PetscErrorCode MatShellSetOperation(Mat,MatOperation,void(*)(void)); 1590 extern PetscErrorCode MatShellGetOperation(Mat,MatOperation,void(**)(void)); 1591 extern PetscErrorCode MatShellSetContext(Mat,void*); 1592 1593 /* 1594 Codes for matrices stored on disk. By default they are 1595 stored in a universal format. By changing the format with 1596 PetscViewerSetFormat(viewer,PETSC_VIEWER_NATIVE); the matrices will 1597 be stored in a way natural for the matrix, for example dense matrices 1598 would be stored as dense. Matrices stored this way may only be 1599 read into matrices of the same type. 1600 */ 1601 #define MATRIX_BINARY_FORMAT_DENSE -1 1602 1603 extern PetscErrorCode MatMPIBAIJSetHashTableFactor(Mat,PetscReal); 1604 extern PetscErrorCode MatISGetLocalMat(Mat,Mat*); 1605 1606 /*S 1607 MatNullSpace - Object that removes a null space from a vector, i.e. 1608 orthogonalizes the vector to a subsapce 1609 1610 Level: advanced 1611 1612 Concepts: matrix; linear operator, null space 1613 1614 Users manual sections: 1615 . sec_singular 1616 1617 .seealso: MatNullSpaceCreate() 1618 S*/ 1619 typedef struct _p_MatNullSpace* MatNullSpace; 1620 1621 extern PetscErrorCode MatNullSpaceCreate(MPI_Comm,PetscBool ,PetscInt,const Vec[],MatNullSpace*); 1622 extern PetscErrorCode MatNullSpaceSetFunction(MatNullSpace,PetscErrorCode (*)(MatNullSpace,Vec,void*),void*); 1623 extern PetscErrorCode MatNullSpaceDestroy(MatNullSpace*); 1624 extern PetscErrorCode MatNullSpaceRemove(MatNullSpace,Vec,Vec*); 1625 extern PetscErrorCode MatSetNullSpace(Mat,MatNullSpace); 1626 extern PetscErrorCode MatSetNearNullSpace(Mat,MatNullSpace); 1627 extern PetscErrorCode MatNullSpaceTest(MatNullSpace,Mat,PetscBool *); 1628 extern PetscErrorCode MatNullSpaceView(MatNullSpace,PetscViewer); 1629 1630 extern PetscErrorCode MatReorderingSeqSBAIJ(Mat,IS); 1631 extern PetscErrorCode MatMPISBAIJSetHashTableFactor(Mat,PetscReal); 1632 extern PetscErrorCode MatSeqSBAIJSetColumnIndices(Mat,PetscInt *); 1633 extern PetscErrorCode MatSeqBAIJInvertBlockDiagonal(Mat); 1634 1635 extern PetscErrorCode MatCreateMAIJ(Mat,PetscInt,Mat*); 1636 extern PetscErrorCode MatMAIJRedimension(Mat,PetscInt,Mat*); 1637 extern PetscErrorCode MatMAIJGetAIJ(Mat,Mat*); 1638 1639 extern PetscErrorCode MatComputeExplicitOperator(Mat,Mat*); 1640 1641 extern PetscErrorCode MatDiagonalScaleLocal(Mat,Vec); 1642 1643 extern PetscErrorCode MatCreateMFFD(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,Mat*); 1644 extern PetscErrorCode MatMFFDSetBase(Mat,Vec,Vec); 1645 extern PetscErrorCode MatMFFDSetFunction(Mat,PetscErrorCode(*)(void*,Vec,Vec),void*); 1646 extern PetscErrorCode MatMFFDSetFunctioni(Mat,PetscErrorCode (*)(void*,PetscInt,Vec,PetscScalar*)); 1647 extern PetscErrorCode MatMFFDSetFunctioniBase(Mat,PetscErrorCode (*)(void*,Vec)); 1648 extern PetscErrorCode MatMFFDAddNullSpace(Mat,MatNullSpace); 1649 extern PetscErrorCode MatMFFDSetHHistory(Mat,PetscScalar[],PetscInt); 1650 extern PetscErrorCode MatMFFDResetHHistory(Mat); 1651 extern PetscErrorCode MatMFFDSetFunctionError(Mat,PetscReal); 1652 extern PetscErrorCode MatMFFDSetPeriod(Mat,PetscInt); 1653 extern PetscErrorCode MatMFFDGetH(Mat,PetscScalar *); 1654 extern PetscErrorCode MatMFFDSetOptionsPrefix(Mat,const char[]); 1655 extern PetscErrorCode MatMFFDCheckPositivity(void*,Vec,Vec,PetscScalar*); 1656 extern PetscErrorCode MatMFFDSetCheckh(Mat,PetscErrorCode (*)(void*,Vec,Vec,PetscScalar*),void*); 1657 1658 /*S 1659 MatMFFD - A data structured used to manage the computation of the h differencing parameter for matrix-free 1660 Jacobian vector products 1661 1662 Notes: MATMFFD is a specific MatType which uses the MatMFFD data structure 1663 1664 MatMFFD*() methods actually take the Mat as their first argument. Not a MatMFFD data structure 1665 1666 Level: developer 1667 1668 .seealso: MATMFFD, MatCreateMFFD(), MatMFFDSetFuction(), MatMFFDSetType(), MatMFFDRegister() 1669 S*/ 1670 typedef struct _p_MatMFFD* MatMFFD; 1671 1672 /*J 1673 MatMFFDType - algorithm used to compute the h used in computing matrix-vector products via differencing of the function 1674 1675 Level: beginner 1676 1677 .seealso: MatMFFDSetType(), MatMFFDRegister() 1678 J*/ 1679 #define MatMFFDType char* 1680 #define MATMFFD_DS "ds" 1681 #define MATMFFD_WP "wp" 1682 1683 extern PetscErrorCode MatMFFDSetType(Mat,const MatMFFDType); 1684 extern PetscErrorCode MatMFFDRegister(const char[],const char[],const char[],PetscErrorCode (*)(MatMFFD)); 1685 1686 /*MC 1687 MatMFFDRegisterDynamic - Adds a method to the MatMFFD registry. 1688 1689 Synopsis: 1690 PetscErrorCode MatMFFDRegisterDynamic(const char *name_solver,const char *path,const char *name_create,PetscErrorCode (*routine_create)(MatMFFD)) 1691 1692 Not Collective 1693 1694 Input Parameters: 1695 + name_solver - name of a new user-defined compute-h module 1696 . path - path (either absolute or relative) the library containing this solver 1697 . name_create - name of routine to create method context 1698 - routine_create - routine to create method context 1699 1700 Level: developer 1701 1702 Notes: 1703 MatMFFDRegisterDynamic() may be called multiple times to add several user-defined solvers. 1704 1705 If dynamic libraries are used, then the fourth input argument (routine_create) 1706 is ignored. 1707 1708 Sample usage: 1709 .vb 1710 MatMFFDRegisterDynamic("my_h",/home/username/my_lib/lib/libO/solaris/mylib.a, 1711 "MyHCreate",MyHCreate); 1712 .ve 1713 1714 Then, your solver can be chosen with the procedural interface via 1715 $ MatMFFDSetType(mfctx,"my_h") 1716 or at runtime via the option 1717 $ -snes_mf_type my_h 1718 1719 .keywords: MatMFFD, register 1720 1721 .seealso: MatMFFDRegisterAll(), MatMFFDRegisterDestroy() 1722 M*/ 1723 #if defined(PETSC_USE_DYNAMIC_LIBRARIES) 1724 #define MatMFFDRegisterDynamic(a,b,c,d) MatMFFDRegister(a,b,c,0) 1725 #else 1726 #define MatMFFDRegisterDynamic(a,b,c,d) MatMFFDRegister(a,b,c,d) 1727 #endif 1728 1729 extern PetscErrorCode MatMFFDRegisterAll(const char[]); 1730 extern PetscErrorCode MatMFFDRegisterDestroy(void); 1731 extern PetscErrorCode MatMFFDDSSetUmin(Mat,PetscReal); 1732 extern PetscErrorCode MatMFFDWPSetComputeNormU(Mat,PetscBool ); 1733 1734 1735 extern PetscErrorCode PetscViewerMathematicaPutMatrix(PetscViewer, PetscInt, PetscInt, PetscReal *); 1736 extern PetscErrorCode PetscViewerMathematicaPutCSRMatrix(PetscViewer, PetscInt, PetscInt, PetscInt *, PetscInt *, PetscReal *); 1737 1738 /* 1739 PETSc interface to MUMPS 1740 */ 1741 #ifdef PETSC_HAVE_MUMPS 1742 extern PetscErrorCode MatMumpsSetIcntl(Mat,PetscInt,PetscInt); 1743 #endif 1744 1745 /* 1746 PETSc interface to SUPERLU 1747 */ 1748 #ifdef PETSC_HAVE_SUPERLU 1749 extern PetscErrorCode MatSuperluSetILUDropTol(Mat,PetscReal); 1750 #endif 1751 1752 #if defined(PETSC_HAVE_CUSP) 1753 extern PetscErrorCode MatCreateSeqAIJCUSP(MPI_Comm,PetscInt,PetscInt,PetscInt,const PetscInt[],Mat*); 1754 extern PetscErrorCode MatCreateMPIAIJCUSP(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt,const PetscInt[],PetscInt,const PetscInt[],Mat*); 1755 #endif 1756 1757 /* 1758 PETSc interface to FFTW 1759 */ 1760 #if defined(PETSC_HAVE_FFTW) 1761 extern PetscErrorCode VecScatterPetscToFFTW(Mat,Vec,Vec); 1762 extern PetscErrorCode VecScatterFFTWToPetsc(Mat,Vec,Vec); 1763 extern PetscErrorCode MatGetVecsFFTW(Mat,Vec*,Vec*,Vec*); 1764 #endif 1765 1766 extern PetscErrorCode MatCreateNest(MPI_Comm,PetscInt,const IS[],PetscInt,const IS[],const Mat[],Mat*); 1767 extern PetscErrorCode MatNestGetSize(Mat,PetscInt*,PetscInt*); 1768 extern PetscErrorCode MatNestGetSubMats(Mat,PetscInt*,PetscInt*,Mat***); 1769 extern PetscErrorCode MatNestGetSubMat(Mat,PetscInt,PetscInt,Mat*); 1770 extern PetscErrorCode MatNestSetVecType(Mat,const VecType); 1771 extern PetscErrorCode MatNestSetSubMats(Mat,PetscInt,const IS[],PetscInt,const IS[],const Mat[]); 1772 extern PetscErrorCode MatNestSetSubMat(Mat,PetscInt,PetscInt,Mat); 1773 1774 /* 1775 MatIJ: 1776 An unweighted directed pseudograph 1777 An interpretation of this matrix as a (pseudo)graph allows us to define additional operations on it: 1778 A MatIJ can act on sparse arrays: arrays of indices, or index arrays of integers, scalars, or integer-scalar pairs 1779 by mapping the indices to the indices connected to them by the (pseudo)graph ed 1780 */ 1781 typedef enum {MATIJ_LOCAL, MATIJ_GLOBAL} MatIJIndexType; 1782 extern PetscErrorCode MatIJSetMultivalued(Mat, PetscBool); 1783 extern PetscErrorCode MatIJGetMultivalued(Mat, PetscBool*); 1784 extern PetscErrorCode MatIJSetEdges(Mat, PetscInt, const PetscInt*, const PetscInt*); 1785 extern PetscErrorCode MatIJGetEdges(Mat, PetscInt *, PetscInt **, PetscInt **); 1786 extern PetscErrorCode MatIJSetEdgesIS(Mat, IS, IS); 1787 extern PetscErrorCode MatIJGetEdgesIS(Mat, IS*, IS*); 1788 extern PetscErrorCode MatIJGetRowSizes(Mat, MatIJIndexType, PetscInt, const PetscInt *, PetscInt **); 1789 extern PetscErrorCode MatIJGetMinRowSize(Mat, PetscInt *); 1790 extern PetscErrorCode MatIJGetMaxRowSize(Mat, PetscInt *); 1791 extern PetscErrorCode MatIJGetSupport(Mat, PetscInt *, PetscInt **); 1792 extern PetscErrorCode MatIJGetSupportIS(Mat, IS *); 1793 extern PetscErrorCode MatIJGetImage(Mat, PetscInt*, PetscInt**); 1794 extern PetscErrorCode MatIJGetImageIS(Mat, IS *); 1795 extern PetscErrorCode MatIJGetSupportSize(Mat, PetscInt *); 1796 extern PetscErrorCode MatIJGetImageSize(Mat, PetscInt *); 1797 1798 extern PetscErrorCode MatIJBinRenumber(Mat, Mat*); 1799 1800 extern PetscErrorCode MatIJMap(Mat, MatIJIndexType, PetscInt,const PetscInt*,const PetscInt*,const PetscScalar*, MatIJIndexType,PetscInt*,PetscInt**,PetscInt**,PetscScalar**,PetscInt**); 1801 extern PetscErrorCode MatIJBin(Mat, MatIJIndexType, PetscInt,const PetscInt*,const PetscInt*,const PetscScalar*,PetscInt*,PetscInt**,PetscInt**,PetscScalar**,PetscInt**); 1802 extern PetscErrorCode MatIJBinMap(Mat,Mat, MatIJIndexType,PetscInt,const PetscInt*,const PetscInt*,const PetscScalar*,MatIJIndexType,PetscInt*,PetscInt**,PetscInt**,PetscScalar**,PetscInt**); 1803 1804 PETSC_EXTERN_CXX_END 1805 #endif 1806