1 #include <petsc/private/matimpl.h> /*I "petscmat.h" I*/ 2 3 PETSC_INTERN PetscErrorCode MatSetBlockSizes_Default(Mat mat, PetscInt rbs, PetscInt cbs) { 4 PetscFunctionBegin; 5 if (!mat->preallocated) PetscFunctionReturn(0); 6 PetscCheck(mat->rmap->bs <= 0 || mat->rmap->bs == rbs, PetscObjectComm((PetscObject)mat), PETSC_ERR_SUP, "Cannot change row block size %" PetscInt_FMT " to %" PetscInt_FMT, mat->rmap->bs, rbs); 7 PetscCheck(mat->cmap->bs <= 0 || mat->cmap->bs == cbs, PetscObjectComm((PetscObject)mat), PETSC_ERR_SUP, "Cannot change column block size %" PetscInt_FMT " to %" PetscInt_FMT, mat->cmap->bs, cbs); 8 PetscFunctionReturn(0); 9 } 10 11 PETSC_INTERN PetscErrorCode MatShift_Basic(Mat Y, PetscScalar a) { 12 PetscInt i, start, end; 13 PetscScalar alpha = a; 14 PetscBool prevoption; 15 16 PetscFunctionBegin; 17 PetscCall(MatGetOption(Y, MAT_NO_OFF_PROC_ENTRIES, &prevoption)); 18 PetscCall(MatSetOption(Y, MAT_NO_OFF_PROC_ENTRIES, PETSC_TRUE)); 19 PetscCall(MatGetOwnershipRange(Y, &start, &end)); 20 for (i = start; i < end; i++) { 21 if (i < Y->cmap->N) PetscCall(MatSetValues(Y, 1, &i, 1, &i, &alpha, ADD_VALUES)); 22 } 23 PetscCall(MatAssemblyBegin(Y, MAT_FINAL_ASSEMBLY)); 24 PetscCall(MatAssemblyEnd(Y, MAT_FINAL_ASSEMBLY)); 25 PetscCall(MatSetOption(Y, MAT_NO_OFF_PROC_ENTRIES, prevoption)); 26 PetscFunctionReturn(0); 27 } 28 29 /*@ 30 MatCreate - Creates a matrix where the type is determined 31 from either a call to `MatSetType()` or from the options database 32 with a call to `MatSetFromOptions()`. The default matrix type is 33 `MATAIJ`, using the routines `MatCreateSeqAIJ()` or `MatCreateAIJ()` 34 if you do not set a type in the options database. If you never 35 call `MatSetType()` or `MatSetFromOptions()` it will generate an 36 error when you try to use the matrix. 37 38 Collective 39 40 Input Parameter: 41 . comm - MPI communicator 42 43 Output Parameter: 44 . A - the matrix 45 46 Options Database Keys: 47 + -mat_type seqaij - `MATSEQAIJ` type, uses `MatCreateSeqAIJ()` 48 . -mat_type mpiaij - `MATMPIAIJ` type, uses `MatCreateAIJ()` 49 . -mat_type seqdense - `MATSEQDENSE`, uses `MatCreateSeqDense()` 50 . -mat_type mpidense - `MATMPIDENSE` type, uses `MatCreateDense()` 51 . -mat_type seqbaij - `MATSEQBAIJ` type, uses `MatCreateSeqBAIJ()` 52 - -mat_type mpibaij - `MATMPIBAIJ` type, uses `MatCreateBAIJ()` 53 54 Even More Options Database Keys: 55 See the manpages for particular formats (e.g., `MatCreateSeqAIJ()`) 56 for additional format-specific options. 57 58 Level: beginner 59 60 `.seealso: `MatCreateSeqAIJ()`, `MatCreateAIJ()`, 61 `MatCreateSeqDense()`, `MatCreateDense()`, 62 `MatCreateSeqBAIJ()`, `MatCreateBAIJ()`, 63 `MatCreateSeqSBAIJ()`, `MatCreateSBAIJ()`, 64 `MatConvert()` 65 @*/ 66 PetscErrorCode MatCreate(MPI_Comm comm, Mat *A) { 67 Mat B; 68 69 PetscFunctionBegin; 70 PetscValidPointer(A, 2); 71 72 *A = NULL; 73 PetscCall(MatInitializePackage()); 74 75 PetscCall(PetscHeaderCreate(B, MAT_CLASSID, "Mat", "Matrix", "Mat", comm, MatDestroy, MatView)); 76 PetscCall(PetscLayoutCreate(comm, &B->rmap)); 77 PetscCall(PetscLayoutCreate(comm, &B->cmap)); 78 PetscCall(PetscStrallocpy(VECSTANDARD, &B->defaultvectype)); 79 80 B->symmetric = PETSC_BOOL3_UNKNOWN; 81 B->hermitian = PETSC_BOOL3_UNKNOWN; 82 B->structurally_symmetric = PETSC_BOOL3_UNKNOWN; 83 B->spd = PETSC_BOOL3_UNKNOWN; 84 B->symmetry_eternal = PETSC_FALSE; 85 B->structural_symmetry_eternal = PETSC_FALSE; 86 87 B->congruentlayouts = PETSC_DECIDE; 88 B->preallocated = PETSC_FALSE; 89 #if defined(PETSC_HAVE_DEVICE) 90 B->boundtocpu = PETSC_TRUE; 91 #endif 92 *A = B; 93 PetscFunctionReturn(0); 94 } 95 96 /*@ 97 MatSetErrorIfFailure - Causes `Mat` to generate an immediate error, for example a zero pivot, is detected. 98 99 Logically Collective on mat 100 101 Input Parameters: 102 + mat - matrix obtained from `MatCreate()` 103 - flg - `PETSC_TRUE` indicates you want the error generated 104 105 Level: advanced 106 107 Note: 108 If this flag is not set then the matrix operation will note the error and continue. The error may cause a later `PC` or `KSP` error 109 or result in a `KSPConvergedReason` indicating the method did not converge. 110 111 .seealso: `PCSetErrorIfFailure()`, `KSPConvergedReason`, `SNESConvergedReason` 112 @*/ 113 PetscErrorCode MatSetErrorIfFailure(Mat mat, PetscBool flg) { 114 PetscFunctionBegin; 115 PetscValidHeaderSpecific(mat, MAT_CLASSID, 1); 116 PetscValidLogicalCollectiveBool(mat, flg, 2); 117 mat->erroriffailure = flg; 118 PetscFunctionReturn(0); 119 } 120 121 /*@ 122 MatSetSizes - Sets the local and global sizes, and checks to determine compatibility 123 124 Collective on Mat 125 126 Input Parameters: 127 + A - the matrix 128 . m - number of local rows (or `PETSC_DECIDE`) 129 . n - number of local columns (or `PETSC_DECIDE`) 130 . M - number of global rows (or `PETSC_DETERMINE`) 131 - N - number of global columns (or `PETSC_DETERMINE`) 132 133 Notes: 134 m (n) and M (N) cannot be both `PETSC_DECIDE` 135 If one processor calls this with M (N) of `PETSC_DECIDE` then all processors must, otherwise the program will hang. 136 137 If `PETSC_DECIDE` is not used for the arguments 'm' and 'n', then the 138 user must ensure that they are chosen to be compatible with the 139 vectors. To do this, one first considers the matrix-vector product 140 'y = A x'. The 'm' that is used in the above routine must match the 141 local size used in the vector creation routine VecCreateMPI() for 'y'. 142 Likewise, the 'n' used must match that used as the local size in 143 `VecCreateMPI()` for 'x'. 144 145 You cannot change the sizes once they have been set. 146 147 The sizes must be set before `MatSetUp()` or MatXXXSetPreallocation() is called. 148 149 Level: beginner 150 151 .seealso: `MatGetSize()`, `PetscSplitOwnership()` 152 @*/ 153 PetscErrorCode MatSetSizes(Mat A, PetscInt m, PetscInt n, PetscInt M, PetscInt N) { 154 PetscFunctionBegin; 155 PetscValidHeaderSpecific(A, MAT_CLASSID, 1); 156 PetscValidLogicalCollectiveInt(A, M, 4); 157 PetscValidLogicalCollectiveInt(A, N, 5); 158 PetscCheck(M <= 0 || m <= M, PETSC_COMM_SELF, PETSC_ERR_ARG_INCOMP, "Local row size %" PetscInt_FMT " cannot be larger than global row size %" PetscInt_FMT, m, M); 159 PetscCheck(N <= 0 || n <= N, PETSC_COMM_SELF, PETSC_ERR_ARG_INCOMP, "Local column size %" PetscInt_FMT " cannot be larger than global column size %" PetscInt_FMT, n, N); 160 PetscCheck((A->rmap->n < 0 || A->rmap->N < 0) || (A->rmap->n == m && (M <= 0 || A->rmap->N == M)), PETSC_COMM_SELF, PETSC_ERR_SUP, "Cannot change/reset row sizes to %" PetscInt_FMT " local %" PetscInt_FMT " global after previously setting them to %" PetscInt_FMT " local %" PetscInt_FMT " global", m, M, 161 A->rmap->n, A->rmap->N); 162 PetscCheck((A->cmap->n < 0 || A->cmap->N < 0) || (A->cmap->n == n && (N <= 0 || A->cmap->N == N)), PETSC_COMM_SELF, PETSC_ERR_SUP, "Cannot change/reset column sizes to %" PetscInt_FMT " local %" PetscInt_FMT " global after previously setting them to %" PetscInt_FMT " local %" PetscInt_FMT " global", n, N, 163 A->cmap->n, A->cmap->N); 164 A->rmap->n = m; 165 A->cmap->n = n; 166 A->rmap->N = M > -1 ? M : A->rmap->N; 167 A->cmap->N = N > -1 ? N : A->cmap->N; 168 PetscFunctionReturn(0); 169 } 170 171 /*@ 172 MatSetFromOptions - Creates a matrix where the type is determined 173 from the options database. Generates a parallel MPI matrix if the 174 communicator has more than one processor. The default matrix type is 175 `MATAIJ`, using the routines `MatCreateSeqAIJ()` and `MatCreateAIJ()` if 176 you do not select a type in the options database. 177 178 Collective on B 179 180 Input Parameter: 181 . A - the matrix 182 183 Options Database Keys: 184 + -mat_type seqaij - `MATSEQAIJ` type, uses `MatCreateSeqAIJ()` 185 . -mat_type mpiaij - `MATMPIAIJ` type, uses `MatCreateAIJ()` 186 . -mat_type seqdense - `MATSEQDENSE` type, uses `MatCreateSeqDense()` 187 . -mat_type mpidense - `MATMPIDENSE`, uses `MatCreateDense()` 188 . -mat_type seqbaij - `MATSEQBAIJ`, uses `MatCreateSeqBAIJ()` 189 - -mat_type mpibaij - `MATMPIBAIJ`, uses `MatCreateBAIJ()` 190 191 Even More Options Database Keys: 192 See the manpages for particular formats (e.g., `MatCreateSeqAIJ()`) 193 for additional format-specific options. 194 195 Level: beginner 196 197 .seealso: `MatCreateSeqAIJ(()`, `MatCreateAIJ()`, 198 `MatCreateSeqDense()`, `MatCreateDense()`, 199 `MatCreateSeqBAIJ()`, `MatCreateBAIJ()`, 200 `MatCreateSeqSBAIJ()`, `MatCreateSBAIJ()`, 201 `MatConvert()` 202 @*/ 203 PetscErrorCode MatSetFromOptions(Mat B) { 204 const char *deft = MATAIJ; 205 char type[256]; 206 PetscBool flg, set; 207 PetscInt bind_below = 0; 208 209 PetscFunctionBegin; 210 PetscValidHeaderSpecific(B, MAT_CLASSID, 1); 211 212 PetscObjectOptionsBegin((PetscObject)B); 213 214 if (B->rmap->bs < 0) { 215 PetscInt newbs = -1; 216 PetscCall(PetscOptionsInt("-mat_block_size", "Set the blocksize used to store the matrix", "MatSetBlockSize", newbs, &newbs, &flg)); 217 if (flg) { 218 PetscCall(PetscLayoutSetBlockSize(B->rmap, newbs)); 219 PetscCall(PetscLayoutSetBlockSize(B->cmap, newbs)); 220 } 221 } 222 223 PetscCall(PetscOptionsFList("-mat_type", "Matrix type", "MatSetType", MatList, deft, type, 256, &flg)); 224 if (flg) { 225 PetscCall(MatSetType(B, type)); 226 } else if (!((PetscObject)B)->type_name) { 227 PetscCall(MatSetType(B, deft)); 228 } 229 230 PetscCall(PetscOptionsName("-mat_is_symmetric", "Checks if mat is symmetric on MatAssemblyEnd()", "MatIsSymmetric", &B->checksymmetryonassembly)); 231 PetscCall(PetscOptionsReal("-mat_is_symmetric", "Checks if mat is symmetric on MatAssemblyEnd()", "MatIsSymmetric", B->checksymmetrytol, &B->checksymmetrytol, NULL)); 232 PetscCall(PetscOptionsBool("-mat_null_space_test", "Checks if provided null space is correct in MatAssemblyEnd()", "MatSetNullSpaceTest", B->checknullspaceonassembly, &B->checknullspaceonassembly, NULL)); 233 PetscCall(PetscOptionsBool("-mat_error_if_failure", "Generate an error if an error occurs when factoring the matrix", "MatSetErrorIfFailure", B->erroriffailure, &B->erroriffailure, NULL)); 234 235 PetscTryTypeMethod(B, setfromoptions, PetscOptionsObject); 236 237 flg = PETSC_FALSE; 238 PetscCall(PetscOptionsBool("-mat_new_nonzero_location_err", "Generate an error if new nonzeros are created in the matrix structure (useful to test preallocation)", "MatSetOption", flg, &flg, &set)); 239 if (set) PetscCall(MatSetOption(B, MAT_NEW_NONZERO_LOCATION_ERR, flg)); 240 flg = PETSC_FALSE; 241 PetscCall(PetscOptionsBool("-mat_new_nonzero_allocation_err", "Generate an error if new nonzeros are allocated in the matrix structure (useful to test preallocation)", "MatSetOption", flg, &flg, &set)); 242 if (set) PetscCall(MatSetOption(B, MAT_NEW_NONZERO_ALLOCATION_ERR, flg)); 243 flg = PETSC_FALSE; 244 PetscCall(PetscOptionsBool("-mat_ignore_zero_entries", "For AIJ/IS matrices this will stop zero values from creating a zero location in the matrix", "MatSetOption", flg, &flg, &set)); 245 if (set) PetscCall(MatSetOption(B, MAT_IGNORE_ZERO_ENTRIES, flg)); 246 247 flg = PETSC_FALSE; 248 PetscCall(PetscOptionsBool("-mat_form_explicit_transpose", "Hint to form an explicit transpose for operations like MatMultTranspose", "MatSetOption", flg, &flg, &set)); 249 if (set) PetscCall(MatSetOption(B, MAT_FORM_EXPLICIT_TRANSPOSE, flg)); 250 251 /* Bind to CPU if below a user-specified size threshold. 252 * This perhaps belongs in the options for the GPU Mat types, but MatBindToCPU() does nothing when called on non-GPU types, 253 * and putting it here makes is more maintainable than duplicating this for all. */ 254 PetscCall(PetscOptionsInt("-mat_bind_below", "Set the size threshold (in local rows) below which the Mat is bound to the CPU", "MatBindToCPU", bind_below, &bind_below, &flg)); 255 if (flg && B->rmap->n < bind_below) PetscCall(MatBindToCPU(B, PETSC_TRUE)); 256 257 /* process any options handlers added with PetscObjectAddOptionsHandler() */ 258 PetscCall(PetscObjectProcessOptionsHandlers((PetscObject)B, PetscOptionsObject)); 259 PetscOptionsEnd(); 260 PetscFunctionReturn(0); 261 } 262 263 /*@C 264 MatXAIJSetPreallocation - set preallocation for serial and parallel `MATAIJ`, `MATBAIJ`, and `MATSBAIJ` matrices and their unassembled versions. 265 266 Collective on A 267 268 Input Parameters: 269 + A - matrix being preallocated 270 . bs - block size 271 . dnnz - number of nonzero column blocks per block row of diagonal part of parallel matrix 272 . onnz - number of nonzero column blocks per block row of off-diagonal part of parallel matrix 273 . dnnzu - number of nonzero column blocks per block row of upper-triangular part of diagonal part of parallel matrix 274 - onnzu - number of nonzero column blocks per block row of upper-triangular part of off-diagonal part of parallel matrix 275 276 Level: beginner 277 278 .seealso: `MatSeqAIJSetPreallocation()`, `MatMPIAIJSetPreallocation()`, `MatSeqBAIJSetPreallocation()`, `MatMPIBAIJSetPreallocation()`, `MatSeqSBAIJSetPreallocation()`, `MatMPISBAIJSetPreallocation()`, 279 `PetscSplitOwnership()` 280 @*/ 281 PetscErrorCode MatXAIJSetPreallocation(Mat A, PetscInt bs, const PetscInt dnnz[], const PetscInt onnz[], const PetscInt dnnzu[], const PetscInt onnzu[]) { 282 PetscInt cbs; 283 void (*aij)(void); 284 void (*is)(void); 285 void (*hyp)(void) = NULL; 286 287 PetscFunctionBegin; 288 if (bs != PETSC_DECIDE) { /* don't mess with an already set block size */ 289 PetscCall(MatSetBlockSize(A, bs)); 290 } 291 PetscCall(PetscLayoutSetUp(A->rmap)); 292 PetscCall(PetscLayoutSetUp(A->cmap)); 293 PetscCall(MatGetBlockSizes(A, &bs, &cbs)); 294 /* these routines assumes bs == cbs, this should be checked somehow */ 295 PetscCall(MatSeqBAIJSetPreallocation(A, bs, 0, dnnz)); 296 PetscCall(MatMPIBAIJSetPreallocation(A, bs, 0, dnnz, 0, onnz)); 297 PetscCall(MatSeqSBAIJSetPreallocation(A, bs, 0, dnnzu)); 298 PetscCall(MatMPISBAIJSetPreallocation(A, bs, 0, dnnzu, 0, onnzu)); 299 /* 300 In general, we have to do extra work to preallocate for scalar (AIJ) or unassembled (IS) matrices so we check whether it will do any 301 good before going on with it. 302 */ 303 PetscCall(PetscObjectQueryFunction((PetscObject)A, "MatMPIAIJSetPreallocation_C", &aij)); 304 PetscCall(PetscObjectQueryFunction((PetscObject)A, "MatISSetPreallocation_C", &is)); 305 #if defined(PETSC_HAVE_HYPRE) 306 PetscCall(PetscObjectQueryFunction((PetscObject)A, "MatHYPRESetPreallocation_C", &hyp)); 307 #endif 308 if (!aij && !is && !hyp) PetscCall(PetscObjectQueryFunction((PetscObject)A, "MatSeqAIJSetPreallocation_C", &aij)); 309 if (aij || is || hyp) { 310 if (bs == cbs && bs == 1) { 311 PetscCall(MatSeqAIJSetPreallocation(A, 0, dnnz)); 312 PetscCall(MatMPIAIJSetPreallocation(A, 0, dnnz, 0, onnz)); 313 PetscCall(MatISSetPreallocation(A, 0, dnnz, 0, onnz)); 314 #if defined(PETSC_HAVE_HYPRE) 315 PetscCall(MatHYPRESetPreallocation(A, 0, dnnz, 0, onnz)); 316 #endif 317 } else { /* Convert block-row precallocation to scalar-row */ 318 PetscInt i, m, *sdnnz, *sonnz; 319 PetscCall(MatGetLocalSize(A, &m, NULL)); 320 PetscCall(PetscMalloc2((!!dnnz) * m, &sdnnz, (!!onnz) * m, &sonnz)); 321 for (i = 0; i < m; i++) { 322 if (dnnz) sdnnz[i] = dnnz[i / bs] * cbs; 323 if (onnz) sonnz[i] = onnz[i / bs] * cbs; 324 } 325 PetscCall(MatSeqAIJSetPreallocation(A, 0, dnnz ? sdnnz : NULL)); 326 PetscCall(MatMPIAIJSetPreallocation(A, 0, dnnz ? sdnnz : NULL, 0, onnz ? sonnz : NULL)); 327 PetscCall(MatISSetPreallocation(A, 0, dnnz ? sdnnz : NULL, 0, onnz ? sonnz : NULL)); 328 #if defined(PETSC_HAVE_HYPRE) 329 PetscCall(MatHYPRESetPreallocation(A, 0, dnnz ? sdnnz : NULL, 0, onnz ? sonnz : NULL)); 330 #endif 331 PetscCall(PetscFree2(sdnnz, sonnz)); 332 } 333 } 334 PetscFunctionReturn(0); 335 } 336 337 /* 338 Merges some information from Cs header to A; the C object is then destroyed 339 340 This is somewhat different from MatHeaderReplace() it would be nice to merge the code 341 */ 342 PetscErrorCode MatHeaderMerge(Mat A, Mat *C) { 343 PetscInt refct; 344 PetscOps Abops; 345 struct _MatOps Aops; 346 char *mtype, *mname, *mprefix; 347 Mat_Product *product; 348 Mat_Redundant *redundant; 349 PetscObjectState state; 350 351 PetscFunctionBegin; 352 PetscValidHeaderSpecific(A, MAT_CLASSID, 1); 353 PetscValidHeaderSpecific(*C, MAT_CLASSID, 2); 354 if (A == *C) PetscFunctionReturn(0); 355 PetscCheckSameComm(A, 1, *C, 2); 356 /* save the parts of A we need */ 357 Abops = ((PetscObject)A)->bops[0]; 358 Aops = A->ops[0]; 359 refct = ((PetscObject)A)->refct; 360 mtype = ((PetscObject)A)->type_name; 361 mname = ((PetscObject)A)->name; 362 state = ((PetscObject)A)->state; 363 mprefix = ((PetscObject)A)->prefix; 364 product = A->product; 365 redundant = A->redundant; 366 367 /* zero these so the destroy below does not free them */ 368 ((PetscObject)A)->type_name = NULL; 369 ((PetscObject)A)->name = NULL; 370 371 /* 372 free all the interior data structures from mat 373 cannot use PetscUseTypeMethod(A,destroy); because compiler 374 thinks it may print NULL type_name and name 375 */ 376 PetscTryTypeMethod(A, destroy); 377 378 PetscCall(PetscFree(A->defaultvectype)); 379 PetscCall(PetscLayoutDestroy(&A->rmap)); 380 PetscCall(PetscLayoutDestroy(&A->cmap)); 381 PetscCall(PetscFunctionListDestroy(&((PetscObject)A)->qlist)); 382 PetscCall(PetscObjectListDestroy(&((PetscObject)A)->olist)); 383 PetscCall(PetscComposedQuantitiesDestroy((PetscObject)A)); 384 385 /* copy C over to A */ 386 PetscCall(PetscFree(A->factorprefix)); 387 PetscCall(PetscMemcpy(A, *C, sizeof(struct _p_Mat))); 388 389 /* return the parts of A we saved */ 390 ((PetscObject)A)->bops[0] = Abops; 391 A->ops[0] = Aops; 392 ((PetscObject)A)->refct = refct; 393 ((PetscObject)A)->type_name = mtype; 394 ((PetscObject)A)->name = mname; 395 ((PetscObject)A)->prefix = mprefix; 396 ((PetscObject)A)->state = state + 1; 397 A->product = product; 398 A->redundant = redundant; 399 400 /* since these two are copied into A we do not want them destroyed in C */ 401 ((PetscObject)*C)->qlist = NULL; 402 ((PetscObject)*C)->olist = NULL; 403 404 PetscCall(PetscHeaderDestroy(C)); 405 PetscFunctionReturn(0); 406 } 407 /* 408 Replace A's header with that of C; the C object is then destroyed 409 410 This is essentially code moved from MatDestroy() 411 412 This is somewhat different from MatHeaderMerge() it would be nice to merge the code 413 414 Used in DM hence is declared PETSC_EXTERN 415 */ 416 PETSC_EXTERN PetscErrorCode MatHeaderReplace(Mat A, Mat *C) { 417 PetscInt refct; 418 PetscObjectState state; 419 struct _p_Mat buffer; 420 MatStencilInfo stencil; 421 422 PetscFunctionBegin; 423 PetscValidHeaderSpecific(A, MAT_CLASSID, 1); 424 PetscValidHeaderSpecific(*C, MAT_CLASSID, 2); 425 if (A == *C) PetscFunctionReturn(0); 426 PetscCheckSameComm(A, 1, *C, 2); 427 PetscCheck(((PetscObject)*C)->refct == 1, PetscObjectComm((PetscObject)C), PETSC_ERR_ARG_WRONGSTATE, "Object C has refct %" PetscInt_FMT " > 1, would leave hanging reference", ((PetscObject)*C)->refct); 428 429 /* swap C and A */ 430 refct = ((PetscObject)A)->refct; 431 state = ((PetscObject)A)->state; 432 stencil = A->stencil; 433 PetscCall(PetscMemcpy(&buffer, A, sizeof(struct _p_Mat))); 434 PetscCall(PetscMemcpy(A, *C, sizeof(struct _p_Mat))); 435 PetscCall(PetscMemcpy(*C, &buffer, sizeof(struct _p_Mat))); 436 ((PetscObject)A)->refct = refct; 437 ((PetscObject)A)->state = state + 1; 438 A->stencil = stencil; 439 440 ((PetscObject)*C)->refct = 1; 441 PetscCall(MatShellSetOperation(*C, MATOP_DESTROY, (void (*)(void))NULL)); 442 PetscCall(MatDestroy(C)); 443 PetscFunctionReturn(0); 444 } 445 446 /*@ 447 MatBindToCPU - marks a matrix to temporarily stay on the CPU and perform computations on the CPU 448 449 Logically collective on A 450 451 Input Parameters: 452 + A - the matrix 453 - flg - bind to the CPU if value of `PETSC_TRUE` 454 455 Level: intermediate 456 457 .seealso: `MatBoundToCPU()` 458 @*/ 459 PetscErrorCode MatBindToCPU(Mat A, PetscBool flg) { 460 PetscFunctionBegin; 461 PetscValidHeaderSpecific(A, MAT_CLASSID, 1); 462 PetscValidLogicalCollectiveBool(A, flg, 2); 463 #if defined(PETSC_HAVE_DEVICE) 464 if (A->boundtocpu == flg) PetscFunctionReturn(0); 465 A->boundtocpu = flg; 466 PetscTryTypeMethod(A, bindtocpu, flg); 467 #endif 468 PetscFunctionReturn(0); 469 } 470 471 /*@ 472 MatBoundToCPU - query if a matrix is bound to the CPU 473 474 Input Parameter: 475 . A - the matrix 476 477 Output Parameter: 478 . flg - the logical flag 479 480 Level: intermediate 481 482 .seealso: `MatBindToCPU()` 483 @*/ 484 PetscErrorCode MatBoundToCPU(Mat A, PetscBool *flg) { 485 PetscFunctionBegin; 486 PetscValidHeaderSpecific(A, MAT_CLASSID, 1); 487 PetscValidBoolPointer(flg, 2); 488 #if defined(PETSC_HAVE_DEVICE) 489 *flg = A->boundtocpu; 490 #else 491 *flg = PETSC_TRUE; 492 #endif 493 PetscFunctionReturn(0); 494 } 495 496 PetscErrorCode MatSetValuesCOO_Basic(Mat A, const PetscScalar coo_v[], InsertMode imode) { 497 IS is_coo_i, is_coo_j; 498 const PetscInt *coo_i, *coo_j; 499 PetscInt n, n_i, n_j; 500 PetscScalar zero = 0.; 501 502 PetscFunctionBegin; 503 PetscCall(PetscObjectQuery((PetscObject)A, "__PETSc_coo_i", (PetscObject *)&is_coo_i)); 504 PetscCall(PetscObjectQuery((PetscObject)A, "__PETSc_coo_j", (PetscObject *)&is_coo_j)); 505 PetscCheck(is_coo_i, PetscObjectComm((PetscObject)A), PETSC_ERR_COR, "Missing coo_i IS"); 506 PetscCheck(is_coo_j, PetscObjectComm((PetscObject)A), PETSC_ERR_COR, "Missing coo_j IS"); 507 PetscCall(ISGetLocalSize(is_coo_i, &n_i)); 508 PetscCall(ISGetLocalSize(is_coo_j, &n_j)); 509 PetscCheck(n_i == n_j, PETSC_COMM_SELF, PETSC_ERR_COR, "Wrong local size %" PetscInt_FMT " != %" PetscInt_FMT, n_i, n_j); 510 PetscCall(ISGetIndices(is_coo_i, &coo_i)); 511 PetscCall(ISGetIndices(is_coo_j, &coo_j)); 512 if (imode != ADD_VALUES) PetscCall(MatZeroEntries(A)); 513 for (n = 0; n < n_i; n++) PetscCall(MatSetValue(A, coo_i[n], coo_j[n], coo_v ? coo_v[n] : zero, ADD_VALUES)); 514 PetscCall(ISRestoreIndices(is_coo_i, &coo_i)); 515 PetscCall(ISRestoreIndices(is_coo_j, &coo_j)); 516 PetscFunctionReturn(0); 517 } 518 519 PetscErrorCode MatSetPreallocationCOO_Basic(Mat A, PetscCount ncoo, const PetscInt coo_i[], const PetscInt coo_j[]) { 520 Mat preallocator; 521 IS is_coo_i, is_coo_j; 522 PetscScalar zero = 0.0; 523 524 PetscFunctionBegin; 525 PetscCall(PetscLayoutSetUp(A->rmap)); 526 PetscCall(PetscLayoutSetUp(A->cmap)); 527 PetscCall(MatCreate(PetscObjectComm((PetscObject)A), &preallocator)); 528 PetscCall(MatSetType(preallocator, MATPREALLOCATOR)); 529 PetscCall(MatSetSizes(preallocator, A->rmap->n, A->cmap->n, A->rmap->N, A->cmap->N)); 530 PetscCall(MatSetLayouts(preallocator, A->rmap, A->cmap)); 531 PetscCall(MatSetUp(preallocator)); 532 for (PetscCount n = 0; n < ncoo; n++) PetscCall(MatSetValue(preallocator, coo_i[n], coo_j[n], zero, INSERT_VALUES)); 533 PetscCall(MatAssemblyBegin(preallocator, MAT_FINAL_ASSEMBLY)); 534 PetscCall(MatAssemblyEnd(preallocator, MAT_FINAL_ASSEMBLY)); 535 PetscCall(MatPreallocatorPreallocate(preallocator, PETSC_TRUE, A)); 536 PetscCall(MatDestroy(&preallocator)); 537 PetscCheck(ncoo <= PETSC_MAX_INT, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "ncoo %" PetscCount_FMT " overflowed PetscInt; configure --with-64-bit-indices or request support", ncoo); 538 PetscCall(ISCreateGeneral(PETSC_COMM_SELF, ncoo, coo_i, PETSC_COPY_VALUES, &is_coo_i)); 539 PetscCall(ISCreateGeneral(PETSC_COMM_SELF, ncoo, coo_j, PETSC_COPY_VALUES, &is_coo_j)); 540 PetscCall(PetscObjectCompose((PetscObject)A, "__PETSc_coo_i", (PetscObject)is_coo_i)); 541 PetscCall(PetscObjectCompose((PetscObject)A, "__PETSc_coo_j", (PetscObject)is_coo_j)); 542 PetscCall(ISDestroy(&is_coo_i)); 543 PetscCall(ISDestroy(&is_coo_j)); 544 PetscFunctionReturn(0); 545 } 546 547 /*@C 548 MatSetPreallocationCOO - set preallocation for matrices using a coordinate format of the entries with global indices 549 550 Collective on A 551 552 Input Parameters: 553 + A - matrix being preallocated 554 . ncoo - number of entries 555 . coo_i - row indices 556 - coo_j - column indices 557 558 Level: beginner 559 560 Notes: 561 The indices coo_i and coo_j may be modified within this function. The caller should not rely on them 562 having any specific value after this function returns. The arrays can be freed or reused immediately 563 after this function returns. 564 565 Entries can be repeated, see `MatSetValuesCOO()`. Entries with negative row or column indices are allowed 566 but will be ignored. The corresponding entries in `MatSetValuesCOO()` will be ignored too. Remote entries 567 are allowed and will be properly added or inserted to the matrix, unless the matrix option MAT_IGNORE_OFF_PROC_ENTRIES 568 is set, in which case remote entries are ignored, or `MAT_NO_OFF_PROC_ENTRIES` is set, in which case an error will be generated. 569 570 .seealso: `MatSetValuesCOO()`, `MatSeqAIJSetPreallocation()`, `MatMPIAIJSetPreallocation()`, `MatSeqBAIJSetPreallocation()`, `MatMPIBAIJSetPreallocation()`, `MatSeqSBAIJSetPreallocation()`, `MatMPISBAIJSetPreallocation()`, `MatSetPreallocationCOOLocal()`, `DMSetMatrixPreallocateSkip()` 571 @*/ 572 PetscErrorCode MatSetPreallocationCOO(Mat A, PetscCount ncoo, PetscInt coo_i[], PetscInt coo_j[]) { 573 PetscErrorCode (*f)(Mat, PetscCount, const PetscInt[], const PetscInt[]) = NULL; 574 575 PetscFunctionBegin; 576 PetscValidHeaderSpecific(A, MAT_CLASSID, 1); 577 PetscValidType(A, 1); 578 if (ncoo) PetscValidIntPointer(coo_i, 3); 579 if (ncoo) PetscValidIntPointer(coo_j, 4); 580 PetscCall(PetscLayoutSetUp(A->rmap)); 581 PetscCall(PetscLayoutSetUp(A->cmap)); 582 PetscCall(PetscObjectQueryFunction((PetscObject)A, "MatSetPreallocationCOO_C", &f)); 583 584 PetscCall(PetscLogEventBegin(MAT_PreallCOO, A, 0, 0, 0)); 585 if (f) { 586 PetscCall((*f)(A, ncoo, coo_i, coo_j)); 587 } else { /* allow fallback, very slow */ 588 PetscCall(MatSetPreallocationCOO_Basic(A, ncoo, coo_i, coo_j)); 589 } 590 PetscCall(PetscLogEventEnd(MAT_PreallCOO, A, 0, 0, 0)); 591 A->preallocated = PETSC_TRUE; 592 A->nonzerostate++; 593 PetscFunctionReturn(0); 594 } 595 596 /*@C 597 MatSetPreallocationCOOLocal - set preallocation for matrices using a coordinate format of the entries with local indices 598 599 Collective on A 600 601 Input Parameters: 602 + A - matrix being preallocated 603 . ncoo - number of entries 604 . coo_i - row indices (local numbering; may be modified) 605 - coo_j - column indices (local numbering; may be modified) 606 607 Level: beginner 608 609 Notes: 610 The local indices are translated using the local to global mapping, thus `MatSetLocalToGlobalMapping()` must have been 611 called prior to this function. For matrices created with `DMCreateMatrix()` the local to global mapping is often already provided. 612 613 The indices coo_i and coo_j may be modified within this function. They might be translated to corresponding global 614 indices, but the caller should not rely on them having any specific value after this function returns. The arrays 615 can be freed or reused immediately after this function returns. 616 617 Entries can be repeated, see `MatSetValuesCOO()`. Entries with negative row or column indices are allowed 618 but will be ignored. The corresponding entries in `MatSetValuesCOO()` will be ignored too. Remote entries 619 are allowed and will be properly added or inserted to the matrix. 620 621 .seealso: `MatSetValuesCOO()`, `MatSeqAIJSetPreallocation()`, `MatMPIAIJSetPreallocation()`, `MatSeqBAIJSetPreallocation()`, `MatMPIBAIJSetPreallocation()`, `MatSeqSBAIJSetPreallocation()`, `MatMPISBAIJSetPreallocation()`, `MatSetPreallocationCOO()`, `DMSetMatrixPreallocateSkip()` 622 @*/ 623 PetscErrorCode MatSetPreallocationCOOLocal(Mat A, PetscCount ncoo, PetscInt coo_i[], PetscInt coo_j[]) { 624 PetscErrorCode (*f)(Mat, PetscCount, PetscInt[], PetscInt[]) = NULL; 625 626 PetscFunctionBegin; 627 PetscValidHeaderSpecific(A, MAT_CLASSID, 1); 628 PetscValidType(A, 1); 629 if (ncoo) PetscValidIntPointer(coo_i, 3); 630 if (ncoo) PetscValidIntPointer(coo_j, 4); 631 PetscCheck(ncoo <= PETSC_MAX_INT, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "ncoo %" PetscCount_FMT " overflowed PetscInt; configure --with-64-bit-indices or request support", ncoo); 632 PetscCall(PetscLayoutSetUp(A->rmap)); 633 PetscCall(PetscLayoutSetUp(A->cmap)); 634 635 PetscCall(PetscObjectQueryFunction((PetscObject)A, "MatSetPreallocationCOOLocal_C", &f)); 636 if (f) { 637 PetscCall((*f)(A, ncoo, coo_i, coo_j)); 638 A->nonzerostate++; 639 } else { 640 ISLocalToGlobalMapping ltog_row, ltog_col; 641 PetscCall(MatGetLocalToGlobalMapping(A, <og_row, <og_col)); 642 if (ltog_row) PetscCall(ISLocalToGlobalMappingApply(ltog_row, ncoo, coo_i, coo_i)); 643 if (ltog_col) PetscCall(ISLocalToGlobalMappingApply(ltog_col, ncoo, coo_j, coo_j)); 644 PetscCall(MatSetPreallocationCOO(A, ncoo, coo_i, coo_j)); 645 } 646 A->preallocated = PETSC_TRUE; 647 PetscFunctionReturn(0); 648 } 649 650 /*@ 651 MatSetValuesCOO - set values at once in a matrix preallocated using `MatSetPreallocationCOO()` 652 653 Collective on A 654 655 Input Parameters: 656 + A - matrix being preallocated 657 . coo_v - the matrix values (can be NULL) 658 - imode - the insert mode 659 660 Level: beginner 661 662 Notes: 663 The values must follow the order of the indices prescribed with `MatSetPreallocationCOO()` or `MatSetPreallocationCOOLocal()`. 664 665 When repeated entries are specified in the COO indices the coo_v values are first properly summed, regardless of the value of imode. 666 The imode flag indicates if coo_v must be added to the current values of the matrix (`ADD_VALUES`) or overwritten (`INSERT_VALUES`). 667 668 `MatAssemblyBegin()` and `MatAssemblyEnd()` do not need to be called after this routine. It automatically handles the assembly process. 669 670 .seealso: `MatSetPreallocationCOO()`, `MatSetPreallocationCOOLocal()`, `InsertMode`, `INSERT_VALUES`, `ADD_VALUES` 671 @*/ 672 PetscErrorCode MatSetValuesCOO(Mat A, const PetscScalar coo_v[], InsertMode imode) { 673 PetscErrorCode (*f)(Mat, const PetscScalar[], InsertMode) = NULL; 674 675 PetscFunctionBegin; 676 PetscValidHeaderSpecific(A, MAT_CLASSID, 1); 677 PetscValidType(A, 1); 678 MatCheckPreallocated(A, 1); 679 PetscValidLogicalCollectiveEnum(A, imode, 3); 680 PetscCall(PetscObjectQueryFunction((PetscObject)A, "MatSetValuesCOO_C", &f)); 681 PetscCall(PetscLogEventBegin(MAT_SetVCOO, A, 0, 0, 0)); 682 if (f) { 683 PetscCall((*f)(A, coo_v, imode)); 684 } else { /* allow fallback */ 685 PetscCall(MatSetValuesCOO_Basic(A, coo_v, imode)); 686 } 687 PetscCall(PetscLogEventEnd(MAT_SetVCOO, A, 0, 0, 0)); 688 PetscCall(MatAssemblyBegin(A, MAT_FINAL_ASSEMBLY)); 689 PetscCall(MatAssemblyEnd(A, MAT_FINAL_ASSEMBLY)); 690 PetscFunctionReturn(0); 691 } 692 693 /*@ 694 MatSetBindingPropagates - Sets whether the state of being bound to the CPU for a GPU matrix type propagates to child and some other associated objects 695 696 Input Parameters: 697 + A - the matrix 698 - flg - flag indicating whether the boundtocpu flag should be propagated 699 700 Level: developer 701 702 Notes: 703 If the value of flg is set to true, the following will occur: 704 705 `MatCreateSubMatrices()` and `MatCreateRedundantMatrix()` will bind created matrices to CPU if the input matrix is bound to the CPU. 706 707 `MatCreateVecs()` will bind created vectors to CPU if the input matrix is bound to the CPU. 708 The bindingpropagates flag itself is also propagated by the above routines. 709 710 Developer Note: 711 If the fine-scale `DMDA `has the -dm_bind_below option set to true, then `DMCreateInterpolationScale()` calls `MatSetBindingPropagates()` 712 on the restriction/interpolation operator to set the bindingpropagates flag to true. 713 714 .seealso: `VecSetBindingPropagates()`, `MatGetBindingPropagates()` 715 @*/ 716 PetscErrorCode MatSetBindingPropagates(Mat A, PetscBool flg) { 717 PetscFunctionBegin; 718 PetscValidHeaderSpecific(A, MAT_CLASSID, 1); 719 #if defined(PETSC_HAVE_VIENNACL) || defined(PETSC_HAVE_CUDA) 720 A->bindingpropagates = flg; 721 #endif 722 PetscFunctionReturn(0); 723 } 724 725 /*@ 726 MatGetBindingPropagates - Gets whether the state of being bound to the CPU for a GPU matrix type propagates to child and some other associated objects 727 728 Input Parameter: 729 . A - the matrix 730 731 Output Parameter: 732 . flg - flag indicating whether the boundtocpu flag will be propagated 733 734 Level: developer 735 736 .seealso: `MatSetBindingPropagates()` 737 @*/ 738 PetscErrorCode MatGetBindingPropagates(Mat A, PetscBool *flg) { 739 PetscFunctionBegin; 740 PetscValidHeaderSpecific(A, MAT_CLASSID, 1); 741 PetscValidBoolPointer(flg, 2); 742 #if defined(PETSC_HAVE_VIENNACL) || defined(PETSC_HAVE_CUDA) 743 *flg = A->bindingpropagates; 744 #else 745 *flg = PETSC_FALSE; 746 #endif 747 PetscFunctionReturn(0); 748 } 749